path = src/jemalloc
url = https://github.com/rust-lang/jemalloc.git
[submodule "src/rust-installer"]
- path = src/rust-installer
+ path = src/tools/rust-installer
url = https://github.com/rust-lang/rust-installer.git
[submodule "src/liblibc"]
path = src/liblibc
url = https://github.com/rust-lang-nursery/nomicon.git
[submodule "src/tools/cargo"]
path = src/tools/cargo
- url = https://github.com/rust-lang/cargo
+ url = https://github.com/rust-lang/cargo.git
[submodule "reference"]
path = src/doc/reference
url = https://github.com/rust-lang-nursery/reference.git
url = https://github.com/rust-lang/book.git
[submodule "src/tools/rls"]
path = src/tools/rls
- url = https://github.com/rust-lang-nursery/rls
+ url = https://github.com/rust-lang-nursery/rls.git
python x.py build src/libcore --stage 0
```
-You can explore the build system throught the various `--help` pages for each
+You can explore the build system through the various `--help` pages for each
subcommand. For example to learn more about a command you can run:
```
- set PATH="C:\Program Files (x86)\Inno Setup 5";%PATH%
# Help debug some handle issues on AppVeyor
- - ps: Invoke-WebRequest -Uri https://download.sysinternals.com/files/Handle.zip -OutFile handle.zip
+ - appveyor-retry appveyor DownloadFile https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-15-Handle.zip
- mkdir handle
- - ps: Expand-Archive handle.zip -dest handle
+ - 7z x -ohandle 2017-05-15-Handle.zip
- set PATH=%PATH%;%CD%\handle
- handle.exe -accepteula -help
valopt_nosave target "${CFG_HOST}" "GNUs ./configure syntax LLVM target triples"
valopt_nosave mandir "${CFG_PREFIX}/share/man" "install man pages in PATH"
valopt_nosave docdir "${CFG_PREFIX}/share/doc/rust" "install documentation in PATH"
+valopt_nosave bindir "${CFG_PREFIX}/bin" "install binaries"
# On Windows this determines root of the subtree for target libraries.
# Host runtime libs always go to 'bin'.
CFG_PREFIX=${CFG_PREFIX%/}
CFG_MANDIR=${CFG_MANDIR%/}
CFG_DOCDIR=${CFG_DOCDIR%/}
+CFG_BINDIR=${CFG_BINDIR%/}
CFG_HOST="$(echo $CFG_HOST | tr ',' ' ')"
CFG_TARGET="$(echo $CFG_TARGET | tr ',' ' ')"
putvar CFG_NACL_CROSS_PATH
putvar CFG_MANDIR
putvar CFG_DOCDIR
+putvar CFG_BINDIR
putvar CFG_USING_LIBCPP
msg
"libc 0.0.0",
]
+[[package]]
+name = "advapi32-sys"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "aho-corasick"
+version = "0.5.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "aho-corasick"
version = "0.6.3"
version = "0.0.0"
[[package]]
-name = "atty"
-version = "0.2.2"
+name = "backtrace"
+version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
+ "backtrace-sys 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cfg-if 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "dbghelp-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-demangle 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
]
+[[package]]
+name = "backtrace-sys"
+version = "0.1.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "bitflags"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
+[[package]]
+name = "bitflags"
+version = "0.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
[[package]]
name = "bitflags"
version = "0.8.2"
"filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
"gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)",
"getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
"num_cpus 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-serialize 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
"toml 0.1.30 (registry+https://github.com/rust-lang/crates.io-index)",
]
+[[package]]
+name = "bufstream"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
[[package]]
name = "build-manifest"
version = "0.1.0"
dependencies = [
- "rustc-serialize 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
"toml 0.1.30 (registry+https://github.com/rust-lang/crates.io-index)",
]
"filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
]
+[[package]]
+name = "cargo"
+version = "0.20.0"
+source = "git+https://github.com/rust-lang/cargo#2b32084293d8da63b48de56363a0f2e986ec3367"
+replace = "cargo 0.20.0"
+
+[[package]]
+name = "cargo"
+version = "0.20.0"
+dependencies = [
+ "advapi32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "bufstream 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cargotest 0.1.0",
+ "chrono 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "crates-io 0.9.0",
+ "crossbeam 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "curl 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "docopt 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "env_logger 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "flate2 0.2.19 (registry+https://github.com/rust-lang/crates.io-index)",
+ "fs2 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "git2 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "git2-curl 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
+ "hamcrest 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libgit2-sys 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "miow 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "num_cpus 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "openssl 0.9.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "psapi-sys 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "semver 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_derive 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_ignored 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "shell-escape 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tar 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "term 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "toml 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "url 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "cargotest"
+version = "0.1.0"
+dependencies = [
+ "bufstream 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cargo 0.20.0",
+ "filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "flate2 0.2.19 (registry+https://github.com/rust-lang/crates.io-index)",
+ "git2 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "hamcrest 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tar 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "term 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "url 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "cargotest2"
version = "0.1.0"
+[[package]]
+name = "cfg-if"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "chrono"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "num 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
+ "time 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "clap"
-version = "2.22.1"
+version = "2.19.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"ansi_term 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "atty 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "bitflags 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "strsim 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)",
"term_size 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "unicode-segmentation 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "unicode-segmentation 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
- "vec_map 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "vec_map 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "yaml-rust 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
"env_logger 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
"filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-serialize 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "core"
version = "0.0.0"
+[[package]]
+name = "crates-io"
+version = "0.9.0"
+dependencies = [
+ "curl 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_derive 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "url 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "crossbeam"
+version = "0.2.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "curl"
+version = "0.4.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "curl-sys 0.3.11 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "openssl-probe 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "openssl-sys 0.9.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "curl-sys"
+version = "0.3.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libz-sys 1.0.13 (registry+https://github.com/rust-lang/crates.io-index)",
+ "openssl-sys 0.9.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "dbghelp-sys"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "derive-new"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "quote 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 0.8.7 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "diff"
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
+[[package]]
+name = "docopt"
+version = "0.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "lazy_static 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "regex 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "dtoa"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
+[[package]]
+name = "either"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "enum_primitive"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "num-traits 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "env_logger"
+version = "0.3.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "regex 0.1.80 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "env_logger"
version = "0.4.2"
"regex 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
+[[package]]
+name = "error-chain"
+version = "0.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "backtrace 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "error_index_generator"
version = "0.0.0"
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
"gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)",
]
+[[package]]
+name = "flate2"
+version = "0.2.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "miniz-sys 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "fmt_macros"
version = "0.0.0"
+[[package]]
+name = "foreign-types"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "fs2"
+version = "0.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "gcc"
version = "0.3.46"
source = "registry+https://github.com/rust-lang/crates.io-index"
+[[package]]
+name = "gdi32-sys"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "getopts"
version = "0.0.0"
version = "0.2.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
+[[package]]
+name = "git2"
+version = "0.6.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libgit2-sys 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "openssl-probe 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "openssl-sys 0.9.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "url 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "git2-curl"
+version = "0.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "curl 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "git2 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "url 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "glob"
+version = "0.2.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
[[package]]
name = "graphviz"
version = "0.0.0"
+[[package]]
+name = "hamcrest"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "num 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
+ "regex 0.1.80 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "handlebars"
-version = "0.25.2"
+version = "0.25.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "lazy_static 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "lazy_static 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
"pest 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "quick-error 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quick-error 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-serialize 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde_json 0.9.9 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_json 0.9.10 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "idna"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
+ "unicode-bidi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "unicode-normalization 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "installer"
+version = "0.0.0"
+dependencies = [
+ "clap 2.19.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "error-chain 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "flate2 0.2.19 (registry+https://github.com/rust-lang/crates.io-index)",
+ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "lazy_static 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tar 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "walkdir 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "xz2 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "itertools"
+version = "0.5.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "either 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
"winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
+[[package]]
+name = "languageserver-types"
+version = "0.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "enum_primitive 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_derive 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "url 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "url_serde 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "lazy_static"
-version = "0.2.5"
+version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
[[package]]
name = "libc"
-version = "0.2.21"
+version = "0.2.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
+[[package]]
+name = "libgit2-sys"
+version = "0.6.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "cmake 0.1.23 (registry+https://github.com/rust-lang/crates.io-index)",
+ "curl-sys 0.3.11 (registry+https://github.com/rust-lang/crates.io-index)",
+ "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libssh2-sys 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libz-sys 1.0.13 (registry+https://github.com/rust-lang/crates.io-index)",
+ "openssl-sys 0.9.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "libssh2-sys"
+version = "0.2.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "cmake 0.1.23 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libz-sys 1.0.13 (registry+https://github.com/rust-lang/crates.io-index)",
+ "openssl-sys 0.9.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "libz-sys"
+version = "1.0.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "linkchecker"
version = "0.1.0"
version = "0.3.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
+[[package]]
+name = "lzma-sys"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "matches"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
[[package]]
name = "mdbook"
version = "0.0.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "clap 2.22.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "clap 2.19.3 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "handlebars 0.25.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "handlebars 0.25.3 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
"open 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"pulldown-cmark 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 0.9.11 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde_json 0.9.9 (registry+https://github.com/rust-lang/crates.io-index)",
- "toml 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 0.9.15 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_json 0.9.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "toml 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "memchr"
+version = "0.1.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "miniz-sys"
+version = "0.1.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "miow"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "net2 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "multimap"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "net2"
+version = "0.2.29"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "cfg-if 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "num"
+version = "0.1.37"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "num-bigint 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
+ "num-complex 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
+ "num-integer 0.1.34 (registry+https://github.com/rust-lang/crates.io-index)",
+ "num-iter 0.1.33 (registry+https://github.com/rust-lang/crates.io-index)",
+ "num-rational 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)",
+ "num-traits 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "num-bigint"
+version = "0.1.37"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "num-integer 0.1.34 (registry+https://github.com/rust-lang/crates.io-index)",
+ "num-traits 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rand 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "num-complex"
+version = "0.1.37"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "num-traits 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "num-integer"
+version = "0.1.34"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "num-traits 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "num-iter"
+version = "0.1.33"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "num-integer 0.1.34 (registry+https://github.com/rust-lang/crates.io-index)",
+ "num-traits 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "num-rational"
+version = "0.1.36"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "num-bigint 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
+ "num-integer 0.1.34 (registry+https://github.com/rust-lang/crates.io-index)",
+ "num-traits 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
+[[package]]
+name = "openssl"
+version = "0.9.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "bitflags 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "foreign-types 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "lazy_static 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "openssl-sys 0.9.12 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "openssl-probe"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "openssl-sys"
+version = "0.9.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)",
+ "gdi32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
+ "user32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "owning_ref"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "stable_deref_trait 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "panic_abort"
version = "0.0.0"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
+[[package]]
+name = "pkg-config"
+version = "0.3.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
[[package]]
name = "proc_macro"
version = "0.0.0"
"syntax_pos 0.0.0",
]
+[[package]]
+name = "psapi-sys"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "pulldown-cmark"
version = "0.0.8"
[[package]]
name = "quick-error"
-version = "1.1.0"
+version = "1.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "quote"
+version = "0.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "quote"
+version = "0.3.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "racer"
+version = "2.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "clap 2.19.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "env_logger 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syntex_errors 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syntex_syntax 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "toml 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "typed-arena 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
[[package]]
name = "rand"
"core 0.0.0",
]
+[[package]]
+name = "rand"
+version = "0.3.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "redox_syscall"
+version = "0.1.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "regex"
+version = "0.1.80"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "aho-corasick 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
+ "regex-syntax 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
+ "thread_local 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "utf8-ranges 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "regex"
version = "0.2.1"
"utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
+[[package]]
+name = "regex-syntax"
+version = "0.3.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
[[package]]
name = "regex-syntax"
version = "0.4.0"
name = "remote-test-server"
version = "0.1.0"
+[[package]]
+name = "rls"
+version = "0.1.0"
+dependencies = [
+ "cargo 0.20.0 (git+https://github.com/rust-lang/cargo)",
+ "derive-new 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "env_logger 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "languageserver-types 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "racer 2.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rls-analysis 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rls-data 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rls-span 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rls-vfs 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustfmt 0.8.4 (git+https://github.com/rust-lang-nursery/rustfmt)",
+ "serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_derive 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "toml 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "url 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "url_serde 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "rls-analysis"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "derive-new 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rls-data 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rls-span 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "rls-data"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"rls-span 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-serialize 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "rustc-serialize 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_derive 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "rls-vfs"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "racer 2.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rls-span 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "rustbook"
version = "0.1.0"
dependencies = [
- "clap 2.22.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "clap 2.19.3 (registry+https://github.com/rust-lang/crates.io-index)",
"mdbook 0.0.21 (registry+https://github.com/rust-lang/crates.io-index)",
]
"fmt_macros 0.0.0",
"graphviz 0.0.0",
"log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc_back 0.0.0",
"rustc_bitflags 0.0.0",
"rustc_const_math 0.0.0",
"rustc_data_structures 0.0.0",
"rustc_errors 0.0.0",
- "rustc_llvm 0.0.0",
"serialize 0.0.0",
"syntax 0.0.0",
"syntax_pos 0.0.0",
]
+[[package]]
+name = "rustc-demangle"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
[[package]]
name = "rustc-main"
version = "0.0.0"
[[package]]
name = "rustc-serialize"
-version = "0.3.23"
+version = "0.3.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
"rustc_errors 0.0.0",
"rustc_incremental 0.0.0",
"rustc_lint 0.0.0",
- "rustc_llvm 0.0.0",
"rustc_metadata 0.0.0",
"rustc_mir 0.0.0",
"rustc_passes 0.0.0",
dependencies = [
"flate 0.0.0",
"log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"proc_macro 0.0.0",
"rustc 0.0.0",
"rustc_back 0.0.0",
"rustc_const_math 0.0.0",
"rustc_data_structures 0.0.0",
"rustc_errors 0.0.0",
- "rustc_llvm 0.0.0",
"serialize 0.0.0",
"syntax 0.0.0",
"syntax_ext 0.0.0",
"rls-data 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rls-span 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc 0.0.0",
- "rustc-serialize 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc_typeck 0.0.0",
"syntax 0.0.0",
"syntax_pos 0.0.0",
dependencies = [
"flate 0.0.0",
"log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc 0.0.0",
"rustc_back 0.0.0",
"rustc_bitflags 0.0.0",
"syntax_pos 0.0.0",
]
+[[package]]
+name = "rustfmt"
+version = "0.8.4"
+source = "git+https://github.com/rust-lang-nursery/rustfmt#bf9b3fa1d7cab2f7bd541539d397a92b4954ec96"
+dependencies = [
+ "diff 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "env_logger 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)",
+ "itertools 0.5.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "multimap 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "regex 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_derive 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "strings 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syntex_errors 0.58.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syntex_syntax 0.58.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "term 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "toml 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "unicode-segmentation 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "walkdir 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "same-file"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "semver"
+version = "0.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "semver-parser"
+version = "0.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "serde"
+version = "0.9.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
[[package]]
name = "serde"
-version = "0.9.11"
+version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
+[[package]]
+name = "serde_derive"
+version = "1.0.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_derive_internals 0.15.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "serde_derive_internals"
+version = "0.15.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)",
+ "synom 0.11.3 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "serde_ignored"
+version = "0.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "serde_json"
+version = "0.9.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "dtoa 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "itoa 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "num-traits 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 0.9.15 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "serde_json"
-version = "0.9.9"
+version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"dtoa 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
"itoa 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"num-traits 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 0.9.11 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "serialize"
version = "0.0.0"
+[[package]]
+name = "shell-escape"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "stable_deref_trait"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
[[package]]
name = "std"
version = "0.0.0"
"core 0.0.0",
]
+[[package]]
+name = "strings"
+version = "0.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "strsim"
+version = "0.5.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
[[package]]
name = "strsim"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
+[[package]]
+name = "syn"
+version = "0.8.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "quote 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "unicode-xid 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "syn"
+version = "0.11.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)",
+ "synom 0.11.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "synom"
+version = "0.11.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "syntax"
version = "0.0.0"
"serialize 0.0.0",
]
+[[package]]
+name = "syntex_errors"
+version = "0.52.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syntex_pos 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "term 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "unicode-xid 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "syntex_errors"
+version = "0.58.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syntex_pos 0.58.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "term 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "syntex_pos"
+version = "0.52.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "syntex_pos"
+version = "0.58.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "syntex_syntax"
+version = "0.52.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syntex_errors 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syntex_pos 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "term 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "unicode-xid 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "syntex_syntax"
+version = "0.58.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "bitflags 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syntex_errors 0.58.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syntex_pos 0.58.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "tar"
+version = "0.4.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "xattr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "tempdir"
+version = "0.3.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "rand 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "term"
version = "0.0.0"
+[[package]]
+name = "term"
+version = "0.4.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "term_size"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "thread-id"
-version = "3.0.0"
+version = "2.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "thread-id"
+version = "3.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "thread_local"
+version = "0.2.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "thread-id 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "thread-id 3.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "thread-id 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"unreachable 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
name = "tidy"
version = "0.1.0"
+[[package]]
+name = "time"
+version = "0.1.37"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+ "redox_syscall 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "toml"
version = "0.1.30"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "rustc-serialize 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "toml"
-version = "0.3.1"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "toml"
+version = "0.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "serde 0.9.15 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "toml"
+version = "0.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "typed-arena"
+version = "1.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "unicode-bidi"
+version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "serde 0.9.11 (registry+https://github.com/rust-lang/crates.io-index)",
+ "matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
]
+[[package]]
+name = "unicode-normalization"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
[[package]]
name = "unicode-segmentation"
-version = "1.1.0"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "unicode-segmentation"
+version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
+[[package]]
+name = "unicode-xid"
+version = "0.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "unicode-xid"
+version = "0.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
[[package]]
name = "unreachable"
version = "0.1.1"
"void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
+[[package]]
+name = "url"
+version = "1.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "idna 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "url_serde"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "url 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "user32-sys"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "utf8-ranges"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
[[package]]
name = "utf8-ranges"
version = "1.0.0"
[[package]]
name = "vec_map"
-version = "0.7.0"
+version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
+[[package]]
+name = "walkdir"
+version = "1.0.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "same-file 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "winapi"
version = "0.2.8"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
+[[package]]
+name = "ws2_32-sys"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "xattr"
+version = "0.1.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "xz2"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "lzma-sys 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "yaml-rust"
+version = "0.3.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
[metadata]
+"checksum advapi32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e06588080cb19d0acb6739808aafa5f26bfb2ca015b2b6370028b44cf7cb8a9a"
+"checksum aho-corasick 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ca972c2ea5f742bfce5687b9aef75506a764f61d37f8f649047846a9686ddb66"
"checksum aho-corasick 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)" = "500909c4f87a9e52355b26626d890833e9e1d53ac566db76c36faa984b889699"
"checksum ansi_term 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "23ac7c30002a5accbf7e8987d0632fa6de155b7c3d39d0067317a391e00a2ef6"
-"checksum atty 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "d912da0db7fa85514874458ca3651fe2cddace8d0b0505571dbdcd41ab490159"
+"checksum backtrace 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f551bc2ddd53aea015d453ef0b635af89444afa5ed2405dd0b2062ad5d600d80"
+"checksum backtrace-sys 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "d192fd129132fbc97497c1f2ec2c2c5174e376b95f535199ef4fe0a293d33842"
"checksum bitflags 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4f67931368edf3a9a51d29886d245f1c3db2f1ef0dcc9e35ff70341b78c10d23"
+"checksum bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "aad18937a628ec6abcd26d1489012cc0e18c21798210f491af69ded9b881106d"
"checksum bitflags 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1370e9fc2a6ae53aea8b7a5110edbd08836ed87c88736dfabccade1c2b44bff4"
-"checksum clap 2.22.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e17a4a72ffea176f77d6e2db609c6c919ef221f23862c9915e687fb54d833485"
+"checksum bufstream 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "f2f382711e76b9de6c744cc00d0497baba02fb00a787f088c879f01d09468e32"
+"checksum cargo 0.20.0 (git+https://github.com/rust-lang/cargo)" = "<none>"
+"checksum cfg-if 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "de1e760d7b6535af4241fca8bd8adf68e2e7edacc6b29f5d399050c5e48cf88c"
+"checksum chrono 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d9123be86fd2a8f627836c235ecdf331fdd067ecf7ac05aa1a68fbcf2429f056"
+"checksum clap 2.19.3 (registry+https://github.com/rust-lang/crates.io-index)" = "95b78f3fe0fc94c13c731714363260e04b557a637166f33a4570d3189d642374"
"checksum cmake 0.1.23 (registry+https://github.com/rust-lang/crates.io-index)" = "92278eb79412c8f75cfc89e707a1bb3a6490b68f7f2e78d15c774f30fe701122"
+"checksum crossbeam 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)" = "0c5ea215664ca264da8a9d9c3be80d2eaf30923c259d03e870388eb927508f97"
+"checksum curl 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c90e1240ef340dd4027ade439e5c7c2064dd9dc652682117bd50d1486a3add7b"
+"checksum curl-sys 0.3.11 (registry+https://github.com/rust-lang/crates.io-index)" = "23e7e544dc5e1ba42c4a4a678bd47985e84b9c3f4d3404c29700622a029db9c3"
+"checksum dbghelp-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "97590ba53bcb8ac28279161ca943a924d1fd4a8fb3fa63302591647c4fc5b850"
+"checksum derive-new 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "41be6ca3b99e0c0483fb2389685448f650459c3ecbe4e18d7705d8010ec4ab8e"
"checksum diff 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "0a515461b6c8c08419850ced27bc29e86166dcdcde8fbe76f8b1f0589bb49472"
+"checksum docopt 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ab32ea6e284d87987066f21a9e809a73c14720571ef34516f0890b3d355ccfd8"
"checksum dtoa 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "80c8b71fd71146990a9742fc06dcbbde19161a267e0ad4e572c35162f4578c90"
+"checksum either 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "18785c1ba806c258137c937e44ada9ee7e69a37e3c72077542cd2f069d78562a"
+"checksum enum_primitive 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "be4551092f4d519593039259a9ed8daedf0da12e5109c5280338073eaeb81180"
+"checksum env_logger 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "15abd780e45b3ea4f76b4e9a26ff4843258dd8a3eed2775a0e7368c2e7936c2f"
"checksum env_logger 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e3856f1697098606fc6cb97a93de88ca3f3bc35bb878c725920e6e82ecf05e83"
+"checksum error-chain 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d9435d864e017c3c6afeac1654189b06cdb491cf2ff73dbf0d73b0f292f42ff8"
"checksum filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "5363ab8e4139b8568a6237db5248646e5a8a2f89bd5ccb02092182b11fd3e922"
+"checksum flate2 0.2.19 (registry+https://github.com/rust-lang/crates.io-index)" = "36df0166e856739905cd3d7e0b210fe818592211a008862599845e012d8d304c"
+"checksum foreign-types 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3e4056b9bd47f8ac5ba12be771f77a0dae796d1bbaaf5fd0b9c2d38b69b8a29d"
+"checksum fs2 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "34edaee07555859dc13ca387e6ae05686bb4d0364c95d649b6dab959511f4baf"
"checksum gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)" = "181e3cebba1d663bd92eb90e2da787e10597e027eb00de8d742b260a7850948f"
+"checksum gdi32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0912515a8ff24ba900422ecda800b52f4016a56251922d397c576bf92c690518"
"checksum getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)" = "d9047cfbd08a437050b363d35ef160452c5fe8ea5187ae0a624708c91581d685"
-"checksum handlebars 0.25.2 (registry+https://github.com/rust-lang/crates.io-index)" = "663e1728d8037fb0d4e13bcd1b1909fb5d913690a9929eb385922df157c2ff8f"
+"checksum git2 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)" = "9de9df4358c17e448a778d90cd0272e1dab5eae30244502333fa2001c4e24357"
+"checksum git2-curl 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "68676bc784bf0bef83278898929bf64a251e87c0340723d0b93fa096c9c5bf8e"
+"checksum glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "8be18de09a56b60ed0edf84bc9df007e30040691af7acd1c41874faac5895bfb"
+"checksum hamcrest 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "bf088f042a467089e9baa4972f57f9247e42a0cc549ba264c7a04fbb8ecb89d4"
+"checksum handlebars 0.25.3 (registry+https://github.com/rust-lang/crates.io-index)" = "15bdf598fc3c2de40c6b340213028301c0d225eea55a2294e6cc148074e557a1"
+"checksum idna 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "6ac85ec3f80c8e4e99d9325521337e14ec7555c458a14e377d189659a427f375"
+"checksum itertools 0.5.10 (registry+https://github.com/rust-lang/crates.io-index)" = "4833d6978da405305126af4ac88569b5d71ff758581ce5a987dbfa3755f694fc"
"checksum itoa 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "eb2f404fbc66fd9aac13e998248505e7ecb2ad8e44ab6388684c5fb11c6c251c"
"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
-"checksum lazy_static 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "4732c563b9a21a406565c4747daa7b46742f082911ae4753f390dc9ec7ee1a97"
-"checksum libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)" = "88ee81885f9f04bff991e306fea7c1c60a5f0f9e409e99f6b40e3311a3363135"
+"checksum languageserver-types 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "97c2985bfcbbcb0189cfa25e1c10c1ac7111df2b6214b652c690127aefdf4e5b"
+"checksum lazy_static 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "3b37545ab726dd833ec6420aaba8231c5b320814b9029ad585555d2a03e94fbf"
+"checksum libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)" = "babb8281da88cba992fa1f4ddec7d63ed96280a1a53ec9b919fd37b53d71e502"
+"checksum libgit2-sys 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)" = "dd89dd7196d5fa35b659c3eaf3c1b14b9bd961bfd1a07dfca49adeb8a6aa3763"
+"checksum libssh2-sys 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0db4ec23611747ef772db1c4d650f8bd762f07b461727ec998f953c614024b75"
+"checksum libz-sys 1.0.13 (registry+https://github.com/rust-lang/crates.io-index)" = "e5ee912a45d686d393d5ac87fac15ba0ba18daae14e8e7543c63ebf7fb7e970c"
"checksum log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)" = "5141eca02775a762cc6cd564d8d2c50f67c0ea3a372cbf1c51592b3e029e10ad"
+"checksum lzma-sys 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "fedff6a5cbb24494ec6ee4784e9ac5c187161fede04c7767d49bf87544013afa"
+"checksum matches 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "efd7622e3022e1a6eaa602c4cea8912254e5582c9c692e9167714182244801b1"
"checksum mdbook 0.0.21 (registry+https://github.com/rust-lang/crates.io-index)" = "f1e2e9d848514dcfad4195788d0d42ae5153a477c191d75d5b84fab10f222fbd"
+"checksum memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "d8b629fb514376c675b98c1421e80b151d3817ac42d7c667717d282761418d20"
"checksum memchr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1dbccc0e46f1ea47b9f17e6d67c5a96bd27030519c519c9c91327e31275a47b4"
+"checksum miniz-sys 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "28eaee17666671fa872e567547e8428e83308ebe5808cdf6a0e28397dbe2c726"
+"checksum miow 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8c1f2f3b1cf331de6896aabf6e9d55dca90356cc9960cca7eaaf408a355ae919"
+"checksum multimap 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9223f4774d08e06185e44e555b9a7561243d387bac49c78a6205c42d6975fbf2"
+"checksum net2 0.2.29 (registry+https://github.com/rust-lang/crates.io-index)" = "bc01404e7568680f1259aa5729539f221cb1e6d047a0d9053cab4be8a73b5d67"
+"checksum num 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)" = "98b15ba84e910ea7a1973bccd3df7b31ae282bf9d8bd2897779950c9b8303d40"
+"checksum num-bigint 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)" = "ba6d838b16e56da1b6c383d065ff1ec3c7d7797f65a3e8f6ba7092fd87820bac"
+"checksum num-complex 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)" = "148eb324ca772230853418731ffdf13531738b50f89b30692a01fcdcb0a64677"
+"checksum num-integer 0.1.34 (registry+https://github.com/rust-lang/crates.io-index)" = "ef1a4bf6f9174aa5783a9b4cc892cacd11aebad6c69ad027a0b65c6ca5f8aa37"
+"checksum num-iter 0.1.33 (registry+https://github.com/rust-lang/crates.io-index)" = "f7d1891bd7b936f12349b7d1403761c8a0b85a18b148e9da4429d5d102c1a41e"
+"checksum num-rational 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)" = "c2dc5ea04020a8f18318ae485c751f8cfa1c0e69dcf465c29ddaaa64a313cc44"
"checksum num-traits 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)" = "e1cbfa3781f3fe73dc05321bed52a06d2d491eaa764c52335cf4399f046ece99"
"checksum num_cpus 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ca313f1862c7ec3e0dfe8ace9fa91b1d9cb5c84ace3d00f5ec4216238e93c167"
"checksum open 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3478ed1686bd1300c8a981a940abc92b06fac9cbef747f4c668d4e032ff7b842"
+"checksum openssl 0.9.12 (registry+https://github.com/rust-lang/crates.io-index)" = "bb5d1663b73d10c6a3eda53e2e9d0346f822394e7b858d7257718f65f61dfbe2"
+"checksum openssl-probe 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d98df0270d404ccd3c050a41d579c52d1db15375168bb3471e04ec0f5f378daf"
+"checksum openssl-sys 0.9.12 (registry+https://github.com/rust-lang/crates.io-index)" = "3a5886d87d3e2a0d890bf62dc8944f5e3769a405f7e1e9ef6e517e47fd7a0897"
+"checksum owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "cdf84f41639e037b484f93433aa3897863b561ed65c6e59c7073d7c561710f37"
"checksum pest 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "0a6dda33d67c26f0aac90d324ab2eb7239c819fc7b2552fe9faa4fe88441edc8"
+"checksum pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "3a8b4c6b8165cd1a1cd4b9b120978131389f64bdaf456435caa41e630edba903"
+"checksum psapi-sys 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "abcd5d1a07d360e29727f757a9decb3ce8bc6e0efa8969cfaad669a8317a2478"
"checksum pulldown-cmark 0.0.14 (registry+https://github.com/rust-lang/crates.io-index)" = "d9ab1e588ef8efd702c7ed9d2bd774db5e6f4d878bb5a1a9f371828fbdff6973"
"checksum pulldown-cmark 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)" = "1058d7bb927ca067656537eec4e02c2b4b70eaaa129664c5b90c111e20326f41"
-"checksum quick-error 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0aad603e8d7fb67da22dbdf1f4b826ce8829e406124109e73cf1b2454b93a71c"
+"checksum quick-error 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3c36987d4978eb1be2e422b1e0423a557923a5c3e7e6f31d5699e9aafaefa469"
+"checksum quote 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "4c5cf478fe1006dbcc72567121d23dbdae5f1632386068c5c86ff4f645628504"
+"checksum quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e920b65c65f10b2ae65c831a81a073a89edd28c7cce89475bff467ab4167a"
+"checksum racer 2.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "b0d72b3afd67882adfca61d609fafb8d7aa5f9e814f12c32fcc6e171995920e8"
+"checksum rand 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "022e0636ec2519ddae48154b028864bdce4eaf7d35226ab8e65c611be97b189d"
+"checksum redox_syscall 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)" = "29dbdfd4b9df8ab31dec47c6087b7b13cbf4a776f335e4de8efba8288dda075b"
+"checksum regex 0.1.80 (registry+https://github.com/rust-lang/crates.io-index)" = "4fd4ace6a8cf7860714a2c2280d6c1f7e6a413486c13298bbc86fd3da019402f"
"checksum regex 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4278c17d0f6d62dfef0ab00028feb45bd7d2102843f80763474eeb1be8a10c01"
+"checksum regex-syntax 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "f9ec002c35e86791825ed294b50008eea9ddfc8def4420124fbc6b08db834957"
"checksum regex-syntax 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2f9191b1f57603095f105d317e375d19b1c9c5c3185ea9633a99a6dcbed04457"
+"checksum rls-analysis 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a62d88c341375c6f3f8b2e18b9b364896e7d3e7aa916907de717d0267e116506"
"checksum rls-data 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "fc4277ce3c57f456b11fe3145b181a844a25201bab5cbaa1978457e6e2f27d47"
"checksum rls-span 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5d7c7046dc6a92f2ae02ed302746db4382e75131b9ce20ce967259f6b5867a6a"
-"checksum rustc-serialize 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)" = "684ce48436d6465300c9ea783b6b14c4361d6b8dcbb1375b486a69cc19e2dfb0"
-"checksum serde 0.9.11 (registry+https://github.com/rust-lang/crates.io-index)" = "a702319c807c016e51f672e5c77d6f0b46afddd744b5e437d6b8436b888b458f"
-"checksum serde_json 0.9.9 (registry+https://github.com/rust-lang/crates.io-index)" = "dbc45439552eb8fb86907a2c41c1fd0ef97458efb87ff7f878db466eb581824e"
+"checksum rls-vfs 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "986eada111517bcb5a7a75205b3f2b70c82e7766653cca61a23f5afce79bdb94"
+"checksum rustc-demangle 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "3058a43ada2c2d0b92b3ae38007a2d0fa5e9db971be260e0171408a4ff471c95"
+"checksum rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)" = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda"
+"checksum rustfmt 0.8.4 (git+https://github.com/rust-lang-nursery/rustfmt)" = "<none>"
+"checksum same-file 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "d931a44fdaa43b8637009e7632a02adc4f2b2e0733c08caa4cf00e8da4a117a7"
+"checksum semver 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7a3186ec9e65071a2095434b1f5bb24838d4e8e130f584c790f6033c79943537"
+"checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3"
+"checksum serde 0.9.15 (registry+https://github.com/rust-lang/crates.io-index)" = "34b623917345a631dc9608d5194cc206b3fe6c3554cd1c75b937e55e285254af"
+"checksum serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "38a3db3a5757f68069aba764b793823ea9fb9717c42c016f8903f8add50f508a"
+"checksum serde_derive 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "e46ef71ee001a4279a4513e79a6ebbb59da3a4987bf77a6df2e5534cd6f21d82"
+"checksum serde_derive_internals 0.15.0 (registry+https://github.com/rust-lang/crates.io-index)" = "021c338d22c7e30f957a6ab7e388cb6098499dda9fd4ba1661ee074ca7a180d1"
+"checksum serde_ignored 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c10e798e4405d7dcec3658989e35ee6706f730a9ed7c1184d5ebd84317e82f46"
+"checksum serde_json 0.9.10 (registry+https://github.com/rust-lang/crates.io-index)" = "ad8bcf487be7d2e15d3d543f04312de991d631cfe1b43ea0ade69e6a8a5b16a1"
+"checksum serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "48b04779552e92037212c3615370f6bd57a40ebba7f20e554ff9f55e41a69a7b"
+"checksum shell-escape 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "dd5cc96481d54583947bfe88bf30c23d53f883c6cd0145368b69989d97b84ef8"
+"checksum stable_deref_trait 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "15132e0e364248108c5e2c02e3ab539be8d6f5d52a01ca9bbf27ed657316f02b"
+"checksum strings 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "54f86446ab480b4f60782188f4f78886465c5793aee248cbb48b7fdc0d022420"
+"checksum strsim 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "67f84c44fbb2f91db7fef94554e6b2ac05909c9c0b0bc23bb98d3a1aebfe7f7c"
"checksum strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b4d15c810519a91cf877e7e36e63fe068815c678181439f2f29e2562147c3694"
+"checksum syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)" = "d3b891b9015c88c576343b9b3e41c2c11a51c219ef067b264bd9c8aa9b441dad"
+"checksum syn 0.8.7 (registry+https://github.com/rust-lang/crates.io-index)" = "6ae6fb0dcc9bd85f89a1a4adc0df2fd90c90c98849d61433983dd7a9df6363f7"
+"checksum synom 0.11.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a393066ed9010ebaed60b9eafa373d4b1baac186dd7e008555b0f702b51945b6"
+"checksum syntex_errors 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9e52bffe6202cfb67587784cf23e0ec5bf26d331eef4922a16d5c42e12aa1e9b"
+"checksum syntex_errors 0.58.1 (registry+https://github.com/rust-lang/crates.io-index)" = "867cc5c2d7140ae7eaad2ae9e8bf39cb18a67ca651b7834f88d46ca98faadb9c"
+"checksum syntex_pos 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)" = "955ef4b16af4c468e4680d1497f873ff288f557d338180649e18f915af5e15ac"
+"checksum syntex_pos 0.58.1 (registry+https://github.com/rust-lang/crates.io-index)" = "13ad4762fe52abc9f4008e85c4fb1b1fe3aa91ccb99ff4826a439c7c598e1047"
+"checksum syntex_syntax 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)" = "76a302e717e348aa372ff577791c3832395650073b8d8432f8b3cb170b34afde"
+"checksum syntex_syntax 0.58.1 (registry+https://github.com/rust-lang/crates.io-index)" = "6e0e4dbae163dd98989464c23dd503161b338790640e11537686f2ef0f25c791"
+"checksum tar 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)" = "ab0ef9ead2fe0aa9e18475a96a207bfd5143f4124779ef7429503a8665416ce8"
+"checksum tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "87974a6f5c1dfb344d733055601650059a3363de2a6104819293baff662132d6"
+"checksum term 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "d168af3930b369cfe245132550579d47dfd873d69470755a19c2c6568dbbd989"
"checksum term_size 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "07b6c1ac5b3fffd75073276bca1ceed01f67a28537097a2a9539e116e50fb21a"
-"checksum thread-id 3.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4437c97558c70d129e40629a5b385b3fb1ffac301e63941335e4d354081ec14a"
+"checksum thread-id 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a9539db560102d1cef46b8b78ce737ff0bb64e7e18d35b2a5688f7d097d0ff03"
+"checksum thread-id 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8df7875b676fddfadffd96deea3b1124e5ede707d4884248931077518cf1f773"
+"checksum thread_local 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "8576dbbfcaef9641452d5cf0df9b0e7eeab7694956dd33bb61515fb8f18cfdd5"
"checksum thread_local 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c85048c6260d17cf486ceae3282d9fb6b90be220bf5b28c400f5485ffc29f0c7"
+"checksum time 0.1.37 (registry+https://github.com/rust-lang/crates.io-index)" = "ffd7ccbf969a892bf83f1e441126968a07a3941c24ff522a26af9f9f4585d1a3"
"checksum toml 0.1.30 (registry+https://github.com/rust-lang/crates.io-index)" = "0590d72182e50e879c4da3b11c6488dae18fccb1ae0c7a3eda18e16795844796"
-"checksum toml 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3474f3c6eaf32eedb4f4a66a26214f020f828a6d96c37e38a35e3a379bbcfd11"
-"checksum unicode-segmentation 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "18127285758f0e2c6cf325bb3f3d138a12fee27de4f23e146cd6a179f26c2cf3"
+"checksum toml 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "736b60249cb25337bc196faa43ee12c705e426f3d55c214d73a4e7be06f92cb4"
+"checksum toml 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "bd86ad9ebee246fdedd610e0f6d0587b754a3d81438db930a244d0480ed7878f"
+"checksum toml 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4cc5dbfb20a481e64b99eb7ae280859ec76730c7191570ba5edaa962394edb0a"
+"checksum typed-arena 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8e2f9dc90da4f9d66ffc9ad3ead2c7d57582a26f4a3292d2ce7011bd29965100"
+"checksum unicode-bidi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "d3a078ebdd62c0e71a709c3d53d2af693fe09fe93fbff8344aebe289b78f9032"
+"checksum unicode-normalization 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "e28fa37426fceeb5cf8f41ee273faa7c82c47dc8fba5853402841e665fcd86ff"
+"checksum unicode-segmentation 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c3bc443ded17b11305ffffe6b37e2076f328a5a8cb6aa877b1b98f77699e98b5"
+"checksum unicode-segmentation 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a8083c594e02b8ae1654ae26f0ade5158b119bd88ad0e8227a5d8fcd72407946"
"checksum unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "bf3a113775714a22dcb774d8ea3655c53a32debae63a063acc00a91cc586245f"
+"checksum unicode-xid 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "36dff09cafb4ec7c8cf0023eb0b686cb6ce65499116a12201c9e11840ca01beb"
+"checksum unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "8c1f860d7d29cf02cb2f3f359fd35991af3d30bac52c57d265a3c461074cb4dc"
"checksum unreachable 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1f2ae5ddb18e1c92664717616dd9549dde73f539f01bd7b77c2edb2446bdff91"
+"checksum url 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f5ba8a749fb4479b043733416c244fa9d1d3af3d7c23804944651c8a448cb87e"
+"checksum url_serde 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "74e7d099f1ee52f823d4bdd60c93c3602043c728f5db3b97bdb548467f7bddea"
+"checksum user32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4ef4711d107b21b410a3a974b1204d9accc8b10dad75d8324b5d755de1617d47"
+"checksum utf8-ranges 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a1ca13c08c41c9c3e04224ed9ff80461d97e121589ff27c753a16cb10830ae0f"
"checksum utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "662fab6525a98beff2921d7f61a39e7d59e0b425ebc7d0d9e66d316e55124122"
-"checksum vec_map 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f8cdc8b93bd0198ed872357fb2e667f7125646b1762f16d60b2c96350d361897"
+"checksum vec_map 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "cac5efe5cb0fa14ec2f84f83c701c562ee63f6dcc680861b21d65c682adfb05f"
"checksum void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d"
+"checksum walkdir 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)" = "bb08f9e670fab86099470b97cd2b252d6527f0b3cc1401acdb595ffc9dd288ff"
"checksum winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a"
"checksum winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc"
+"checksum ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d59cefebd0c892fa2dd6de581e937301d8552cb44489cdff035c6187cb63fa5e"
+"checksum xattr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "5f04de8a1346489a2f9e9bd8526b73d135ec554227b17568456e86aa35b6f3fc"
+"checksum xz2 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "e9510bdf100731599107c61f77daf46713a69a568f75458999c1f9dbf6ba25b0"
+"checksum yaml-rust 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "e66366e18dc58b46801afbf2ca7661a9f59cc8c5962c29892b6039b4f86fa992"
"tools/build-manifest",
"tools/remote-test-client",
"tools/remote-test-server",
-]
-
-# These projects have their own Cargo.lock
-exclude = [
+ "tools/rust-installer",
"tools/cargo",
"tools/rls",
]
[profile.test]
debug = false
debug-assertions = false
+
+[replace]
+"https://github.com/rust-lang/cargo#0.20.0" = { path = "tools/cargo" }
use std::process::{Command, ExitStatus};
fn main() {
- let args = env::args_os().skip(1).collect::<Vec<_>>();
+ let mut args = env::args_os().skip(1).collect::<Vec<_>>();
+
+ // Append metadata suffix for internal crates. See the corresponding entry
+ // in bootstrap/lib.rs for details.
+ if let Ok(s) = env::var("RUSTC_METADATA_SUFFIX") {
+ for i in 1..args.len() {
+ // Dirty code for borrowing issues
+ let mut new = None;
+ if let Some(current_as_str) = args[i].to_str() {
+ if (&*args[i - 1] == "-C" && current_as_str.starts_with("metadata")) ||
+ current_as_str.starts_with("-Cmetadata") {
+ new = Some(format!("{}-{}", current_as_str, s));
+ }
+ }
+ if let Some(new) = new { args[i] = new.into(); }
+ }
+ }
+
// Detect whether or not we're a build script depending on whether --target
// is passed (a bit janky...)
let target = args.windows(2)
import datetime
import hashlib
import os
+import re
import shutil
import subprocess
import sys
shutil.move(tp, fp)
shutil.rmtree(os.path.join(dst, fname))
-def run(args, verbose=False, exception=False):
+def run(args, verbose=False, exception=False, cwd=None):
if verbose:
print("running: " + ' '.join(args))
sys.stdout.flush()
# Use Popen here instead of call() as it apparently allows powershell on
# Windows to not lock up waiting for input presumably.
- ret = subprocess.Popen(args)
+ ret = subprocess.Popen(args, cwd=cwd)
code = ret.wait()
if code != 0:
err = "failed to run: " + ' '.join(args)
def get_toml(self, key):
for line in self.config_toml.splitlines():
- if line.startswith(key + ' ='):
- return self.get_string(line)
+ match = re.match(r'^{}\s*=(.*)$'.format(key), line)
+ if match is not None:
+ value = match.group(1)
+ return self.get_string(value) or value.strip()
return None
def get_mk(self, key):
def get_string(self, line):
start = line.find('"')
+ if start == -1:
+ return None
end = start + 1 + line[start + 1:].find('"')
return line[start + 1:end]
args.append("--frozen")
self.run(args, env)
- def run(self, args, env):
- proc = subprocess.Popen(args, env=env)
+ def run(self, args, env=None, cwd=None):
+ proc = subprocess.Popen(args, env=env, cwd=cwd)
ret = proc.wait()
if ret != 0:
sys.exit(ret)
+ def output(self, args, env=None, cwd=None):
+ proc = subprocess.Popen(args, stdout=subprocess.PIPE, env=env, cwd=cwd)
+ (out, err) = proc.communicate()
+ ret = proc.wait()
+ if ret != 0:
+ print(out)
+ sys.exit(ret)
+ return out
+
def build_triple(self):
default_encoding = sys.getdefaultencoding()
config = self.get_toml('build')
return "{}-{}".format(cputype, ostype)
+ def update_submodules(self):
+ if (not os.path.exists(os.path.join(self.rust_root, ".git"))) or \
+ self.get_toml('submodules') == "false" or \
+ self.get_mk('CFG_DISABLE_MANAGE_SUBMODULES') == "1":
+ return
+
+ print('Updating submodules')
+ output = self.output(["git", "submodule", "status"], cwd=self.rust_root)
+ submodules = []
+ for line in output.splitlines():
+ # NOTE `git submodule status` output looks like this:
+ #
+ # -5066b7dcab7e700844b0e2ba71b8af9dc627a59b src/liblibc
+ # +b37ef24aa82d2be3a3cc0fe89bf82292f4ca181c src/compiler-rt (remotes/origin/..)
+ # e058ca661692a8d01f8cf9d35939dfe3105ce968 src/jemalloc (3.6.0-533-ge058ca6)
+ #
+ # The first character can be '-', '+' or ' ' and denotes the
+ # `State` of the submodule Right next to this character is the
+ # SHA-1 of the submodule HEAD And after that comes the path to the
+ # submodule
+ path = line[1:].split(' ')[1]
+ submodules.append([path, line[0]])
+
+ self.run(["git", "submodule", "sync"], cwd=self.rust_root)
+
+ for submod in submodules:
+ path, status = submod
+ if path.endswith(b"llvm") and \
+ (self.get_toml('llvm-config') or self.get_mk('CFG_LLVM_ROOT')):
+ continue
+ if path.endswith(b"jemalloc") and \
+ (self.get_toml('jemalloc') or self.get_mk('CFG_JEMALLOC_ROOT')):
+ continue
+ submod_path = os.path.join(self.rust_root, path)
+
+ if status == ' ':
+ self.run(["git", "reset", "--hard"], cwd=submod_path)
+ self.run(["git", "clean", "-fdx"], cwd=submod_path)
+ elif status == '+':
+ self.run(["git", "submodule", "update", path], cwd=self.rust_root)
+ self.run(["git", "reset", "--hard"], cwd=submod_path)
+ self.run(["git", "clean", "-fdx"], cwd=submod_path)
+ elif status == '-':
+ self.run(["git", "submodule", "init", path], cwd=self.rust_root)
+ self.run(["git", "submodule", "update", path], cwd=self.rust_root)
+ else:
+ raise ValueError('unknown submodule status: ' + status)
+
def bootstrap():
parser = argparse.ArgumentParser(description='Build rust')
parser.add_argument('--config')
else:
rb._download_url = 'https://static.rust-lang.org'
+ rb.update_submodules()
+
# Fetch/build the bootstrap
rb.build = rb.build_triple()
rb.download_stage0()
// Fallback musl-root for all targets
pub musl_root: Option<PathBuf>,
pub prefix: Option<PathBuf>,
+ pub sysconfdir: Option<PathBuf>,
pub docdir: Option<PathBuf>,
+ pub bindir: Option<PathBuf>,
pub libdir: Option<PathBuf>,
pub libdir_relative: Option<PathBuf>,
pub mandir: Option<PathBuf>,
#[derive(RustcDecodable, Default, Clone)]
struct Install {
prefix: Option<String>,
- mandir: Option<String>,
+ sysconfdir: Option<String>,
docdir: Option<String>,
+ bindir: Option<String>,
libdir: Option<String>,
+ mandir: Option<String>,
}
/// TOML representation of how the LLVM build is configured.
if let Some(ref install) = toml.install {
config.prefix = install.prefix.clone().map(PathBuf::from);
- config.mandir = install.mandir.clone().map(PathBuf::from);
+ config.sysconfdir = install.sysconfdir.clone().map(PathBuf::from);
config.docdir = install.docdir.clone().map(PathBuf::from);
+ config.bindir = install.bindir.clone().map(PathBuf::from);
config.libdir = install.libdir.clone().map(PathBuf::from);
+ config.mandir = install.mandir.clone().map(PathBuf::from);
}
if let Some(ref llvm) = toml.llvm {
"CFG_PREFIX" => {
self.prefix = Some(PathBuf::from(value));
}
+ "CFG_SYSCONFDIR" => {
+ self.sysconfdir = Some(PathBuf::from(value));
+ }
"CFG_DOCDIR" => {
self.docdir = Some(PathBuf::from(value));
}
+ "CFG_BINDIR" => {
+ self.bindir = Some(PathBuf::from(value));
+ }
"CFG_LIBDIR" => {
self.libdir = Some(PathBuf::from(value));
}
# Instead of installing to /usr/local, install to this path instead.
#prefix = "/usr/local"
+# Where to install system configuration files
+# If this is a relative path, it will get installed in `prefix` above
+#sysconfdir = "/etc"
+
+# Where to install documentation in `prefix` above
+#docdir = "share/doc/rust"
+
+# Where to install binaries in `prefix` above
+#bindir = "bin"
+
# Where to install libraries in `prefix` above
#libdir = "lib"
# Where to install man pages in `prefix` above
#mandir = "share/man"
-# Where to install documentation in `prefix` above
-#docdir = "share/doc/rust"
-
# =============================================================================
# Options for compiling Rust code itself
# =============================================================================
use build_helper::output;
-#[cfg(not(target_os = "solaris"))]
-const SH_CMD: &'static str = "sh";
-// On Solaris, sh is the historical bourne shell, not a POSIX shell, or bash.
-#[cfg(target_os = "solaris")]
-const SH_CMD: &'static str = "bash";
-
use {Build, Compiler, Mode};
use channel;
use util::{cp_r, libdir, is_dylib, cp_filtered, copy, exe};
build.out.join("tmp/dist")
}
+fn rust_installer(build: &Build) -> Command {
+ build.tool_cmd(&Compiler::new(0, &build.config.build), "rust-installer")
+}
+
/// Builds the `rust-docs` installer component.
///
/// Slurps up documentation from the `stage`'s `host`.
let src = build.out.join(host).join("doc");
cp_r(&src, &dst);
- let mut cmd = Command::new(SH_CMD);
- cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
+ let mut cmd = rust_installer(build);
+ cmd.arg("generate")
.arg("--product-name=Rust-Documentation")
.arg("--rel-manifest-dir=rustlib")
.arg("--success-message=Rust-documentation-is-installed.")
- .arg(format!("--image-dir={}", sanitize_sh(&image)))
- .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
- .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
+ .arg("--image-dir").arg(&image)
+ .arg("--work-dir").arg(&tmpdir(build))
+ .arg("--output-dir").arg(&distdir(build))
.arg(format!("--package-name={}-{}", name, host))
.arg("--component-name=rust-docs")
.arg("--legacy-manifest-dirs=rustlib,cargo")
.arg(host);
build.run(&mut cmd);
- let mut cmd = Command::new(SH_CMD);
- cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
+ let mut cmd = rust_installer(build);
+ cmd.arg("generate")
.arg("--product-name=Rust-MinGW")
.arg("--rel-manifest-dir=rustlib")
.arg("--success-message=Rust-MinGW-is-installed.")
- .arg(format!("--image-dir={}", sanitize_sh(&image)))
- .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
- .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
+ .arg("--image-dir").arg(&image)
+ .arg("--work-dir").arg(&tmpdir(build))
+ .arg("--output-dir").arg(&distdir(build))
.arg(format!("--package-name={}-{}", name, host))
.arg("--component-name=rust-mingw")
.arg("--legacy-manifest-dirs=rustlib,cargo");
}
// Finally, wrap everything up in a nice tarball!
- let mut cmd = Command::new(SH_CMD);
- cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
+ let mut cmd = rust_installer(build);
+ cmd.arg("generate")
.arg("--product-name=Rust")
.arg("--rel-manifest-dir=rustlib")
.arg("--success-message=Rust-is-ready-to-roll.")
- .arg(format!("--image-dir={}", sanitize_sh(&image)))
- .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
- .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
- .arg(format!("--non-installed-overlay={}", sanitize_sh(&overlay)))
+ .arg("--image-dir").arg(&image)
+ .arg("--work-dir").arg(&tmpdir(build))
+ .arg("--output-dir").arg(&distdir(build))
+ .arg("--non-installed-overlay").arg(&overlay)
.arg(format!("--package-name={}-{}", name, host))
.arg("--component-name=rustc")
.arg("--legacy-manifest-dirs=rustlib,cargo");
let src = build.sysroot(compiler).join("lib/rustlib");
cp_r(&src.join(target), &dst);
- let mut cmd = Command::new(SH_CMD);
- cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
+ let mut cmd = rust_installer(build);
+ cmd.arg("generate")
.arg("--product-name=Rust")
.arg("--rel-manifest-dir=rustlib")
.arg("--success-message=std-is-standing-at-the-ready.")
- .arg(format!("--image-dir={}", sanitize_sh(&image)))
- .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
- .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
+ .arg("--image-dir").arg(&image)
+ .arg("--work-dir").arg(&tmpdir(build))
+ .arg("--output-dir").arg(&distdir(build))
.arg(format!("--package-name={}-{}", name, target))
.arg(format!("--component-name=rust-std-{}", target))
.arg("--legacy-manifest-dirs=rustlib,cargo");
println!("image_src: {:?}, dst: {:?}", image_src, dst);
cp_r(&image_src, &dst);
- let mut cmd = Command::new(SH_CMD);
- cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
+ let mut cmd = rust_installer(build);
+ cmd.arg("generate")
.arg("--product-name=Rust")
.arg("--rel-manifest-dir=rustlib")
.arg("--success-message=save-analysis-saved.")
- .arg(format!("--image-dir={}", sanitize_sh(&image)))
- .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
- .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
+ .arg("--image-dir").arg(&image)
+ .arg("--work-dir").arg(&tmpdir(build))
+ .arg("--output-dir").arg(&distdir(build))
.arg(format!("--package-name={}-{}", name, target))
.arg(format!("--component-name=rust-analysis-{}", target))
.arg("--legacy-manifest-dirs=rustlib,cargo");
write_file(&plain_dst_src.join("version"), build.rust_version().as_bytes());
// Create plain source tarball
- let tarball = rust_src_location(build);
+ let mut tarball = rust_src_location(build);
+ tarball.set_extension(""); // strip .gz
+ tarball.set_extension(""); // strip .tar
if let Some(dir) = tarball.parent() {
t!(fs::create_dir_all(dir));
}
- let mut cmd = Command::new("tar");
- cmd.arg("-czf").arg(sanitize_sh(&tarball))
- .arg(&plain_name)
+ let mut cmd = rust_installer(build);
+ cmd.arg("tarball")
+ .arg("--input").arg(&plain_name)
+ .arg("--output").arg(&tarball)
+ .arg("--work-dir=.")
.current_dir(tmpdir(build));
build.run(&mut cmd);
}
// Create source tarball in rust-installer format
- let mut cmd = Command::new(SH_CMD);
- cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
+ let mut cmd = rust_installer(build);
+ cmd.arg("generate")
.arg("--product-name=Rust")
.arg("--rel-manifest-dir=rustlib")
.arg("--success-message=Awesome-Source.")
- .arg(format!("--image-dir={}", sanitize_sh(&image)))
- .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
- .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
+ .arg("--image-dir").arg(&image)
+ .arg("--work-dir").arg(&tmpdir(build))
+ .arg("--output-dir").arg(&distdir(build))
.arg(format!("--package-name={}", name))
.arg("--component-name=rust-src")
.arg("--legacy-manifest-dirs=rustlib,cargo");
// Prepare the image directory
t!(fs::create_dir_all(image.join("share/zsh/site-functions")));
- t!(fs::create_dir_all(image.join("etc/bash_completions.d")));
+ t!(fs::create_dir_all(image.join("etc/bash_completion.d")));
let cargo = build.cargo_out(&compiler, Mode::Tool, target)
.join(exe("cargo", target));
install(&cargo, &image.join("bin"), 0o755);
}
install(&etc.join("_cargo"), &image.join("share/zsh/site-functions"), 0o644);
copy(&etc.join("cargo.bashcomp.sh"),
- &image.join("etc/bash_completions.d/cargo"));
+ &image.join("etc/bash_completion.d/cargo"));
let doc = image.join("share/doc/cargo");
install(&src.join("README.md"), &doc, 0o644);
install(&src.join("LICENSE-MIT"), &doc, 0o644);
t!(t!(File::create(overlay.join("version"))).write_all(version.as_bytes()));
// Generate the installer tarball
- let mut cmd = Command::new("sh");
- cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
+ let mut cmd = rust_installer(build);
+ cmd.arg("generate")
.arg("--product-name=Rust")
.arg("--rel-manifest-dir=rustlib")
.arg("--success-message=Rust-is-ready-to-roll.")
- .arg(format!("--image-dir={}", sanitize_sh(&image)))
- .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
- .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
- .arg(format!("--non-installed-overlay={}", sanitize_sh(&overlay)))
+ .arg("--image-dir").arg(&image)
+ .arg("--work-dir").arg(&tmpdir(build))
+ .arg("--output-dir").arg(&distdir(build))
+ .arg("--non-installed-overlay").arg(&overlay)
.arg(format!("--package-name={}-{}", name, target))
.arg("--component-name=cargo")
.arg("--legacy-manifest-dirs=rustlib,cargo");
t!(t!(File::create(overlay.join("version"))).write_all(version.as_bytes()));
// Generate the installer tarball
- let mut cmd = Command::new("sh");
- cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh")))
+ let mut cmd = rust_installer(build);
+ cmd.arg("generate")
.arg("--product-name=Rust")
.arg("--rel-manifest-dir=rustlib")
.arg("--success-message=RLS-ready-to-serve.")
- .arg(format!("--image-dir={}", sanitize_sh(&image)))
- .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build))))
- .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
- .arg(format!("--non-installed-overlay={}", sanitize_sh(&overlay)))
+ .arg("--image-dir").arg(&image)
+ .arg("--work-dir").arg(&tmpdir(build))
+ .arg("--output-dir").arg(&distdir(build))
+ .arg("--non-installed-overlay").arg(&overlay)
.arg(format!("--package-name={}-{}", name, target))
.arg("--component-name=rls")
.arg("--legacy-manifest-dirs=rustlib,cargo");
// upgrades rustc was upgraded before rust-std. To avoid rustc clobbering
// the std files during uninstall. To do this ensure that rustc comes
// before rust-std in the list below.
- let mut input_tarballs = format!("{},{},{},{},{},{}",
- sanitize_sh(&rustc_installer),
- sanitize_sh(&cargo_installer),
- sanitize_sh(&rls_installer),
- sanitize_sh(&analysis_installer),
- sanitize_sh(&docs_installer),
- sanitize_sh(&std_installer));
+ let mut tarballs = vec![rustc_installer, cargo_installer, rls_installer,
+ analysis_installer, docs_installer, std_installer];
if target.contains("pc-windows-gnu") {
- input_tarballs.push_str(",");
- input_tarballs.push_str(&sanitize_sh(&mingw_installer));
+ tarballs.push(mingw_installer);
+ }
+ let mut input_tarballs = tarballs[0].as_os_str().to_owned();
+ for tarball in &tarballs[1..] {
+ input_tarballs.push(",");
+ input_tarballs.push(tarball);
}
- let mut cmd = Command::new(SH_CMD);
- cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/combine-installers.sh")))
+ let mut cmd = rust_installer(build);
+ cmd.arg("combine")
.arg("--product-name=Rust")
.arg("--rel-manifest-dir=rustlib")
.arg("--success-message=Rust-is-ready-to-roll.")
- .arg(format!("--work-dir={}", sanitize_sh(&work)))
- .arg(format!("--output-dir={}", sanitize_sh(&distdir(build))))
+ .arg("--work-dir").arg(&work)
+ .arg("--output-dir").arg(&distdir(build))
.arg(format!("--package-name={}-{}", pkgname(build, "rust"), target))
.arg("--legacy-manifest-dirs=rustlib,cargo")
- .arg(format!("--input-tarballs={}", input_tarballs))
- .arg(format!("--non-installed-overlay={}", sanitize_sh(&overlay)));
+ .arg("--input-tarballs").arg(input_tarballs)
+ .arg("--non-installed-overlay").arg(&overlay);
build.run(&mut cmd);
let mut license = String::new();
/// Installs everything.
pub fn install(build: &Build, stage: u32, host: &str) {
let prefix_default = PathBuf::from("/usr/local");
+ let sysconfdir_default = PathBuf::from("/etc");
let docdir_default = PathBuf::from("share/doc/rust");
- let mandir_default = PathBuf::from("share/man");
+ let bindir_default = PathBuf::from("bin");
let libdir_default = PathBuf::from("lib");
+ let mandir_default = PathBuf::from("share/man");
let prefix = build.config.prefix.as_ref().unwrap_or(&prefix_default);
+ let sysconfdir = build.config.sysconfdir.as_ref().unwrap_or(&sysconfdir_default);
let docdir = build.config.docdir.as_ref().unwrap_or(&docdir_default);
+ let bindir = build.config.bindir.as_ref().unwrap_or(&bindir_default);
let libdir = build.config.libdir.as_ref().unwrap_or(&libdir_default);
let mandir = build.config.mandir.as_ref().unwrap_or(&mandir_default);
+ let sysconfdir = prefix.join(sysconfdir);
let docdir = prefix.join(docdir);
+ let bindir = prefix.join(bindir);
let libdir = prefix.join(libdir);
let mandir = prefix.join(mandir);
let destdir = env::var_os("DESTDIR").map(PathBuf::from);
let prefix = add_destdir(&prefix, &destdir);
+ let sysconfdir = add_destdir(&sysconfdir, &destdir);
let docdir = add_destdir(&docdir, &destdir);
+ let bindir = add_destdir(&bindir, &destdir);
let libdir = add_destdir(&libdir, &destdir);
let mandir = add_destdir(&mandir, &destdir);
t!(fs::create_dir_all(&empty_dir));
if build.config.docs {
install_sh(&build, "docs", "rust-docs", &build.rust_package_vers(),
- stage, host, &prefix, &docdir, &libdir, &mandir, &empty_dir);
+ stage, host, &prefix, &sysconfdir, &docdir, &bindir, &libdir,
+ &mandir, &empty_dir);
}
for target in build.config.target.iter() {
install_sh(&build, "std", "rust-std", &build.rust_package_vers(),
- stage, target, &prefix, &docdir, &libdir, &mandir, &empty_dir);
+ stage, target, &prefix, &sysconfdir, &docdir, &bindir, &libdir,
+ &mandir, &empty_dir);
}
if build.config.extended {
install_sh(&build, "cargo", "cargo", &build.cargo_package_vers(),
- stage, host, &prefix, &docdir, &libdir, &mandir, &empty_dir);
+ stage, host, &prefix, &sysconfdir, &docdir, &bindir, &libdir,
+ &mandir, &empty_dir);
install_sh(&build, "rls", "rls", &build.rls_package_vers(),
- stage, host, &prefix, &docdir, &libdir, &mandir, &empty_dir);
+ stage, host, &prefix, &sysconfdir, &docdir, &bindir, &libdir,
+ &mandir, &empty_dir);
}
install_sh(&build, "rustc", "rustc", &build.rust_package_vers(),
- stage, host, &prefix, &docdir, &libdir, &mandir, &empty_dir);
+ stage, host, &prefix, &sysconfdir, &docdir, &bindir, &libdir,
+ &mandir, &empty_dir);
t!(fs::remove_dir_all(&empty_dir));
}
fn install_sh(build: &Build, package: &str, name: &str, version: &str, stage: u32, host: &str,
- prefix: &Path, docdir: &Path, libdir: &Path, mandir: &Path, empty_dir: &Path) {
+ prefix: &Path, sysconfdir: &Path, docdir: &Path, bindir: &Path, libdir: &Path,
+ mandir: &Path, empty_dir: &Path) {
println!("Install {} stage{} ({})", package, stage, host);
let package_name = format!("{}-{}-{}", name, version, host);
cmd.current_dir(empty_dir)
.arg(sanitize_sh(&tmpdir(build).join(&package_name).join("install.sh")))
.arg(format!("--prefix={}", sanitize_sh(prefix)))
+ .arg(format!("--sysconfdir={}", sanitize_sh(sysconfdir)))
.arg(format!("--docdir={}", sanitize_sh(docdir)))
+ .arg(format!("--bindir={}", sanitize_sh(bindir)))
.arg(format!("--libdir={}", sanitize_sh(libdir)))
.arg(format!("--mandir={}", sanitize_sh(mandir)))
.arg("--disable-ldconfig");
use std::ffi::OsString;
use std::fs::{self, File};
use std::io::Read;
-use std::path::{Component, PathBuf, Path};
+use std::path::{PathBuf, Path};
use std::process::Command;
use build_helper::{run_silent, run_suppressed, output, mtime};
self.verbose(&format!("auto-detected local-rebuild {}", local_release));
self.local_rebuild = true;
}
- self.verbose("updating submodules");
- self.update_submodules();
self.verbose("learning about cargo");
metadata::build(self);
step::run(self);
}
- /// Updates all git submodules that we have.
- ///
- /// This will detect if any submodules are out of date an run the necessary
- /// commands to sync them all with upstream.
- fn update_submodules(&self) {
- struct Submodule<'a> {
- path: &'a Path,
- state: State,
- }
-
- enum State {
- // The submodule may have staged/unstaged changes
- MaybeDirty,
- // Or could be initialized but never updated
- NotInitialized,
- // The submodule, itself, has extra commits but those changes haven't been commited to
- // the (outer) git repository
- OutOfSync,
- }
-
- if !self.src_is_git || !self.config.submodules {
- return
- }
- let git = || {
- let mut cmd = Command::new("git");
- cmd.current_dir(&self.src);
- return cmd
- };
- let git_submodule = || {
- let mut cmd = Command::new("git");
- cmd.current_dir(&self.src).arg("submodule");
- return cmd
- };
-
- // FIXME: this takes a seriously long time to execute on Windows and a
- // nontrivial amount of time on Unix, we should have a better way
- // of detecting whether we need to run all the submodule commands
- // below.
- let out = output(git_submodule().arg("status"));
- let mut submodules = vec![];
- for line in out.lines() {
- // NOTE `git submodule status` output looks like this:
- //
- // -5066b7dcab7e700844b0e2ba71b8af9dc627a59b src/liblibc
- // +b37ef24aa82d2be3a3cc0fe89bf82292f4ca181c src/compiler-rt (remotes/origin/..)
- // e058ca661692a8d01f8cf9d35939dfe3105ce968 src/jemalloc (3.6.0-533-ge058ca6)
- //
- // The first character can be '-', '+' or ' ' and denotes the `State` of the submodule
- // Right next to this character is the SHA-1 of the submodule HEAD
- // And after that comes the path to the submodule
- let path = Path::new(line[1..].split(' ').skip(1).next().unwrap());
- let state = if line.starts_with('-') {
- State::NotInitialized
- } else if line.starts_with('+') {
- State::OutOfSync
- } else if line.starts_with(' ') {
- State::MaybeDirty
- } else {
- panic!("unexpected git submodule state: {:?}", line.chars().next());
- };
-
- submodules.push(Submodule { path: path, state: state })
- }
-
- self.run(git_submodule().arg("sync"));
-
- for submodule in submodules {
- // If using llvm-root then don't touch the llvm submodule.
- if submodule.path.components().any(|c| c == Component::Normal("llvm".as_ref())) &&
- self.config.target_config.get(&self.config.build)
- .and_then(|c| c.llvm_config.as_ref()).is_some()
- {
- continue
- }
-
- if submodule.path.components().any(|c| c == Component::Normal("jemalloc".as_ref())) &&
- !self.config.use_jemalloc
- {
- continue
- }
-
- // `submodule.path` is the relative path to a submodule (from the repository root)
- // `submodule_path` is the path to a submodule from the cwd
-
- // use `submodule.path` when e.g. executing a submodule specific command from the
- // repository root
- // use `submodule_path` when e.g. executing a normal git command for the submodule
- // (set via `current_dir`)
- let submodule_path = self.src.join(submodule.path);
-
- match submodule.state {
- State::MaybeDirty => {
- // drop staged changes
- self.run(git().current_dir(&submodule_path)
- .args(&["reset", "--hard"]));
- // drops unstaged changes
- self.run(git().current_dir(&submodule_path)
- .args(&["clean", "-fdx"]));
- },
- State::NotInitialized => {
- self.run(git_submodule().arg("init").arg(submodule.path));
- self.run(git_submodule().arg("update").arg(submodule.path));
- },
- State::OutOfSync => {
- // drops submodule commits that weren't reported to the (outer) git repository
- self.run(git_submodule().arg("update").arg(submodule.path));
- self.run(git().current_dir(&submodule_path)
- .args(&["reset", "--hard"]));
- self.run(git().current_dir(&submodule_path)
- .args(&["clean", "-fdx"]));
- },
- }
- }
- }
-
/// Clear out `dir` if `input` is newer.
///
/// After this executes, it will also ensure that `dir` exists.
.env("RUSTDOC_REAL", self.rustdoc(compiler))
.env("RUSTC_FLAGS", self.rustc_flags(target).join(" "));
- // Tools don't get debuginfo right now, e.g. cargo and rls don't get
- // compiled with debuginfo.
if mode != Mode::Tool {
- cargo.env("RUSTC_DEBUGINFO", self.config.rust_debuginfo.to_string())
- .env("RUSTC_DEBUGINFO_LINES", self.config.rust_debuginfo_lines.to_string())
- .env("RUSTC_FORCE_UNSTABLE", "1");
+ // Tools don't get debuginfo right now, e.g. cargo and rls don't
+ // get compiled with debuginfo.
+ cargo.env("RUSTC_DEBUGINFO", self.config.rust_debuginfo.to_string())
+ .env("RUSTC_DEBUGINFO_LINES", self.config.rust_debuginfo_lines.to_string())
+ .env("RUSTC_FORCE_UNSTABLE", "1");
+
+ // Currently the compiler depends on crates from crates.io, and
+ // then other crates can depend on the compiler (e.g. proc-macro
+ // crates). Let's say, for example that rustc itself depends on the
+ // bitflags crate. If an external crate then depends on the
+ // bitflags crate as well, we need to make sure they don't
+ // conflict, even if they pick the same verison of bitflags. We'll
+ // want to make sure that e.g. a plugin and rustc each get their
+ // own copy of bitflags.
+
+ // Cargo ensures that this works in general through the -C metadata
+ // flag. This flag will frob the symbols in the binary to make sure
+ // they're different, even though the source code is the exact
+ // same. To solve this problem for the compiler we extend Cargo's
+ // already-passed -C metadata flag with our own. Our rustc.rs
+ // wrapper around the actual rustc will detect -C metadata being
+ // passed and frob it with this extra string we're passing in.
+ cargo.env("RUSTC_METADATA_SUFFIX", "rustc");
}
// Enable usage of unstable features
.dep(|s| s.name("maybe-clean-tools"))
.dep(|s| s.name("libstd-tool"))
.run(move |s| compile::tool(build, s.stage, s.target, "remote-test-client"));
+ rules.build("tool-rust-installer", "src/tools/rust-installer")
+ .dep(|s| s.name("maybe-clean-tools"))
+ .dep(|s| s.name("libstd-tool"))
+ .run(move |s| compile::tool(build, s.stage, s.target, "rust-installer"));
rules.build("tool-cargo", "src/tools/cargo")
.host(true)
.default(build.config.extended)
.host(true)
.only_host_build(true)
.default(true)
+ .dep(move |s| tool_rust_installer(build, s))
.run(move |s| dist::rustc(build, s.stage, s.target));
rules.dist("dist-std", "src/libstd")
.dep(move |s| {
})
.default(true)
.only_host_build(true)
+ .dep(move |s| tool_rust_installer(build, s))
.run(move |s| dist::std(build, &s.compiler(), s.target));
rules.dist("dist-mingw", "path/to/nowhere")
.default(true)
.only_host_build(true)
+ .dep(move |s| tool_rust_installer(build, s))
.run(move |s| {
if s.target.contains("pc-windows-gnu") {
dist::mingw(build, s.target)
.host(true)
.only_build(true)
.only_host_build(true)
+ .dep(move |s| tool_rust_installer(build, s))
.run(move |_| dist::rust_src(build));
rules.dist("dist-docs", "src/doc")
.default(true)
.only_host_build(true)
.dep(|s| s.name("default:doc"))
+ .dep(move |s| tool_rust_installer(build, s))
.run(move |s| dist::docs(build, s.stage, s.target));
rules.dist("dist-analysis", "analysis")
.default(build.config.extended)
.dep(|s| s.name("dist-std"))
.only_host_build(true)
+ .dep(move |s| tool_rust_installer(build, s))
.run(move |s| dist::analysis(build, &s.compiler(), s.target));
rules.dist("dist-rls", "rls")
.host(true)
.only_host_build(true)
.dep(|s| s.name("tool-rls"))
+ .dep(move |s| tool_rust_installer(build, s))
.run(move |s| dist::rls(build, s.stage, s.target));
rules.dist("install", "path/to/nowhere")
.dep(|s| s.name("default:dist"))
.host(true)
.only_host_build(true)
.dep(|s| s.name("tool-cargo"))
+ .dep(move |s| tool_rust_installer(build, s))
.run(move |s| dist::cargo(build, s.stage, s.target));
rules.dist("dist-extended", "extended")
.default(build.config.extended)
.dep(|d| d.name("dist-cargo"))
.dep(|d| d.name("dist-rls"))
.dep(|d| d.name("dist-analysis"))
+ .dep(move |s| tool_rust_installer(build, s))
.run(move |s| dist::extended(build, s.stage, s.target));
rules.dist("dist-sign", "hash-and-sign")
rules.verify();
return rules;
+
+ /// Helper to depend on a stage0 build-only rust-installer tool.
+ fn tool_rust_installer<'a>(build: &'a Build, step: &Step<'a>) -> Step<'a> {
+ step.name("tool-rust-installer")
+ .host(&build.config.build)
+ .target(&build.config.build)
+ .stage(0)
+ }
}
#[derive(PartialEq, Eq, Hash, Clone, Debug)]
Images will output artifacts in an `obj` dir at the root of a repository.
+## Filesystem layout
+
+- Each directory, excluding `scripts` and `disabled`, corresponds to a docker image
+- `scripts` contains files shared by docker images
+- `disabled` contains images that are not build travis
+
## Cross toolchains
A number of these images take quite a long time to compile as they're building
+++ /dev/null
-#!/bin/sh
-# Copyright 2017 The Rust Project Developers. See the COPYRIGHT
-# file at the top-level directory of this distribution and at
-# http://rust-lang.org/COPYRIGHT.
-#
-# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-# option. This file may not be copied, modified, or distributed
-# except according to those terms.
-
-set -ex
-
-URL=https://dl.google.com/android/repository
-
-download_ndk() {
- mkdir -p /android/ndk
- cd /android/ndk
- curl -O $URL/$1
- unzip -q $1
- rm $1
- mv android-ndk-* ndk
-}
-
-make_standalone_toolchain() {
- # See https://developer.android.com/ndk/guides/standalone_toolchain.htm
- python2.7 /android/ndk/ndk/build/tools/make_standalone_toolchain.py \
- --install-dir /android/ndk/$1-$2 \
- --arch $1 \
- --api $2
-}
-
-remove_ndk() {
- rm -rf /android/ndk/ndk
-}
RUN apt-get update && \
apt-get install -y --no-install-recommends \
+ ca-certificates \
+ cmake \
+ curl \
+ file \
g++ \
+ git \
+ libssl-dev \
make \
- file \
- curl \
- ca-certificates \
+ pkg-config \
python2.7 \
- git \
- cmake \
- unzip \
sudo \
- xz-utils \
- libssl-dev \
- pkg-config
-
-RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
- dpkg -i dumb-init_*.deb && \
- rm dumb-init_*.deb
+ unzip \
+ xz-utils
-RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
- chmod +x /usr/local/bin/sccache
+# dumb-init
+COPY scripts/dumb-init.sh /scripts/
+RUN sh /scripts/dumb-init.sh
-# Install NDK
-COPY install-ndk.sh /tmp
-RUN . /tmp/install-ndk.sh && \
- download_ndk android-ndk-r13b-linux-x86_64.zip && \
- make_standalone_toolchain arm 9 && \
- remove_ndk
+# ndk
+COPY scripts/android-ndk.sh /scripts/
+RUN . /scripts/android-ndk.sh && \
+ download_and_make_toolchain android-ndk-r13b-linux-x86_64.zip arm 9
-# Install SDK
+# sdk
RUN dpkg --add-architecture i386 && \
apt-get update && \
apt-get install -y --no-install-recommends \
- openjdk-9-jre-headless \
- tzdata \
- libstdc++6:i386 \
libgl1-mesa-glx \
- libpulse0
+ libpulse0 \
+ libstdc++6:i386 \
+ openjdk-9-jre-headless \
+ tzdata
-COPY install-sdk.sh /tmp
-RUN . /tmp/install-sdk.sh && \
- download_sdk tools_r25.2.5-linux.zip && \
- download_sysimage armeabi-v7a 18 && \
- create_avd armeabi-v7a 18
+COPY scripts/android-sdk.sh /scripts/
+RUN . /scripts/android-sdk.sh && \
+ download_and_create_avd tools_r25.2.5-linux.zip armeabi-v7a 18
-# Setup env
+# env
ENV PATH=$PATH:/android/sdk/tools
ENV PATH=$PATH:/android/sdk/platform-tools
--target=$TARGETS \
--arm-linux-androideabi-ndk=/android/ndk/arm-9
-ENV SCRIPT python2.7 ../x.py test --target $TARGETS --verbose
+ENV SCRIPT python2.7 ../x.py test --target $TARGETS
+
+# sccache
+COPY scripts/sccache.sh /scripts/
+RUN sh /scripts/sccache.sh
-# Entrypoint
-COPY start-emulator.sh /android/
-ENTRYPOINT ["/usr/bin/dumb-init", "--", "/android/start-emulator.sh"]
+# init
+COPY scripts/android-start-emulator.sh /scripts/
+ENTRYPOINT ["/usr/bin/dumb-init", "--", "/scripts/android-start-emulator.sh"]
+++ /dev/null
-#!/bin/sh
-# Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-# file at the top-level directory of this distribution and at
-# http://rust-lang.org/COPYRIGHT.
-#
-# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-# option. This file may not be copied, modified, or distributed
-# except according to those terms.
-
-set -ex
-
-URL=https://dl.google.com/android/repository
-
-download_ndk() {
- mkdir -p /android/ndk
- cd /android/ndk
- curl -O $URL/$1
- unzip -q $1
- rm $1
- mv android-ndk-* ndk
-}
-
-make_standalone_toolchain() {
- # See https://developer.android.com/ndk/guides/standalone_toolchain.html
- python2.7 /android/ndk/ndk/build/tools/make_standalone_toolchain.py \
- --install-dir /android/ndk/$1-$2 \
- --arch $1 \
- --api $2
-}
-
-remove_ndk() {
- rm -rf /android/ndk/ndk
-}
+++ /dev/null
-#!/bin/sh
-# Copyright 2017 The Rust Project Developers. See the COPYRIGHT
-# file at the top-level directory of this distribution and at
-# http://rust-lang.org/COPYRIGHT.
-#
-# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-# option. This file may not be copied, modified, or distributed
-# except according to those terms.
-
-set -ex
-
-URL=https://dl.google.com/android/repository
-
-download_sdk() {
- mkdir -p /android/sdk
- cd /android/sdk
- curl -O $URL/$1
- unzip -q $1
- rm -rf $1
-}
-
-download_sysimage() {
- # See https://developer.android.com/studio/tools/help/android.html
- abi=$1
- api=$2
-
- filter="platform-tools,android-$api"
- filter="$filter,sys-img-$abi-android-$api"
-
- # Keep printing yes to accept the licenses
- while true; do echo yes; sleep 10; done | \
- /android/sdk/tools/android update sdk -a --no-ui \
- --filter "$filter"
-}
-
-create_avd() {
- # See https://developer.android.com/studio/tools/help/android.html
- abi=$1
- api=$2
-
- echo no | \
- /android/sdk/tools/android create avd \
- --name $abi-$api \
- --target android-$api \
- --abi $abi
-}
-
+++ /dev/null
-#!/bin/sh
-# Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-# file at the top-level directory of this distribution and at
-# http://rust-lang.org/COPYRIGHT.
-#
-# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-# option. This file may not be copied, modified, or distributed
-# except according to those terms.
-
-set -ex
-
-# Setting SHELL to a file instead on a symlink helps android
-# emulator identify the system
-export SHELL=/bin/bash
-
-# Using the default qemu2 engine makes time::tests::since_epoch fails because
-# the emulator date is set to unix epoch (in armeabi-v7a-18 image). Using
-# classic engine the emulator starts with the current date and the tests run
-# fine. If another image is used, this need to be evaluated again.
-nohup nohup emulator @armeabi-v7a-18 \
- -engine classic -no-window -partition-size 2047 0<&- &>/dev/null &
-
-exec "$@"
# The `vexpress_config` config file was a previously generated config file for
# the kernel. This file was generated by running `make vexpress_defconfig`
# followed by `make menuconfig` and then enabling the IPv6 protocol page.
-COPY vexpress_config /build/.config
+COPY armhf-gnu/vexpress_config /build/.config
RUN curl https://cdn.kernel.org/pub/linux/kernel/v4.x/linux-4.4.42.tar.xz | \
tar xJf - && \
cd /build/linux-4.4.42 && \
# Copy over our init script, which starts up our test server and also a few
# other misc tasks.
-COPY rcS rootfs/etc/init.d/rcS
+COPY armhf-gnu/rcS rootfs/etc/init.d/rcS
RUN chmod +x rootfs/etc/init.d/rcS
# Helper to quickly fill the entropy pool in the kernel.
-COPY addentropy.c /tmp/
+COPY armhf-gnu/addentropy.c /tmp/
RUN arm-linux-gnueabihf-gcc addentropy.c -o rootfs/addentropy -static
# TODO: What is this?!
WORKDIR /tmp
-COPY build-rumprun.sh /tmp/
+COPY cross/build-rumprun.sh /tmp/
RUN ./build-rumprun.sh
-COPY build-arm-musl.sh /tmp/
+COPY cross/build-arm-musl.sh /tmp/
RUN ./build-arm-musl.sh
# originally from
RUN apt-get update && \
apt-get install -y --no-install-recommends \
+ ca-certificates \
+ cmake \
+ curl \
+ file \
g++ \
+ git \
+ libssl-dev \
make \
- file \
- curl \
- ca-certificates \
+ pkg-config \
python2.7 \
- git \
- cmake \
- unzip \
sudo \
- xz-utils \
- libssl-dev \
- pkg-config
-
-RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
- dpkg -i dumb-init_*.deb && \
- rm dumb-init_*.deb
-
-RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
- chmod +x /usr/local/bin/sccache
+ unzip \
+ xz-utils
-ENTRYPOINT ["/usr/bin/dumb-init", "--"]
+# dumb-init
+COPY scripts/dumb-init.sh /scripts/
+RUN sh /scripts/dumb-init.sh
-COPY android-ndk.sh /
-RUN . /android-ndk.sh && \
- download_ndk android-ndk-r13b-linux-x86_64.zip && \
- make_standalone_toolchain arm64 21 && \
- remove_ndk
+# ndk
+COPY scripts/android-ndk.sh /scripts/
+RUN . /scripts/android-ndk.sh && \
+ download_and_make_toolchain android-ndk-r13b-linux-x86_64.zip arm64 21
+# env
ENV PATH=$PATH:/android/ndk/arm64-21/bin
ENV DEP_Z_ROOT=/android/ndk/arm64-21/sysroot/usr/
--enable-cargo-openssl-static
ENV SCRIPT python2.7 ../x.py dist --target $HOSTS --host $HOSTS
+
+# sccache
+COPY scripts/sccache.sh /scripts/
+RUN sh /scripts/sccache.sh
+
+# init
+ENTRYPOINT ["/usr/bin/dumb-init", "--"]
RUN apt-get update && \
apt-get install -y --no-install-recommends \
+ ca-certificates \
+ cmake \
+ curl \
+ file \
g++ \
+ git \
+ libssl-dev \
make \
- file \
- curl \
- ca-certificates \
+ pkg-config \
python2.7 \
- git \
- cmake \
- unzip \
sudo \
- xz-utils \
- libssl-dev \
- pkg-config
-
-RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
- dpkg -i dumb-init_*.deb && \
- rm dumb-init_*.deb
-
-RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
- chmod +x /usr/local/bin/sccache
+ unzip \
+ xz-utils
-ENTRYPOINT ["/usr/bin/dumb-init", "--"]
+# dumb-init
+COPY scripts/dumb-init.sh /scripts/
+RUN sh /scripts/dumb-init.sh
-COPY android-ndk.sh /
-RUN . /android-ndk.sh && \
+# ndk
+COPY scripts/android-ndk.sh /scripts/
+RUN . /scripts/android-ndk.sh && \
download_ndk android-ndk-r13b-linux-x86_64.zip && \
make_standalone_toolchain arm 9 && \
make_standalone_toolchain arm 21 && \
remove_ndk
+RUN chmod 777 /android/ndk && \
+ ln -s /android/ndk/arm-21 /android/ndk/arm
+
+# env
ENV PATH=$PATH:/android/ndk/arm-9/bin
ENV DEP_Z_ROOT=/android/ndk/arm-9/sysroot/usr/
# level 9), the default linker behavior is to generate an error, to allow the
# build to finish we use --warn-unresolved-symbols. Note that the missing
# symbols does not affect std, only the compiler (llvm) and cargo (openssl).
-RUN chmod 777 /android/ndk && \
- ln -s /android/ndk/arm-21 /android/ndk/arm
-
ENV SCRIPT \
python2.7 ../x.py build src/llvm --host $HOSTS --target $HOSTS && \
(export RUSTFLAGS="\"-C link-arg=-Wl,--warn-unresolved-symbols\""; \
rm /android/ndk/arm && \
ln -s /android/ndk/arm-9 /android/ndk/arm && \
python2.7 ../x.py dist --host $HOSTS --target $HOSTS)
+
+# sccache
+COPY scripts/sccache.sh /scripts/
+RUN sh /scripts/sccache.sh
+
+# init
+ENTRYPOINT ["/usr/bin/dumb-init", "--"]
RUN apt-get update && \
apt-get install -y --no-install-recommends \
+ ca-certificates \
+ cmake \
+ curl \
+ file \
g++ \
+ git \
+ libssl-dev \
make \
- file \
- curl \
- ca-certificates \
+ pkg-config \
python2.7 \
- git \
- cmake \
- unzip \
sudo \
- xz-utils \
- libssl-dev \
- pkg-config
-
-RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
- dpkg -i dumb-init_*.deb && \
- rm dumb-init_*.deb
-
-RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
- chmod +x /usr/local/bin/sccache
+ unzip \
+ xz-utils
-ENTRYPOINT ["/usr/bin/dumb-init", "--"]
+# dumb-init
+COPY scripts/dumb-init.sh /scripts/
+RUN sh /scripts/dumb-init.sh
-COPY android-ndk.sh /
-RUN . /android-ndk.sh && \
+# ndk
+COPY scripts/android-ndk.sh /scripts/
+RUN . /scripts/android-ndk.sh && \
download_ndk android-ndk-r13b-linux-x86_64.zip && \
make_standalone_toolchain x86 9 && \
make_standalone_toolchain x86 21 && \
remove_ndk
+RUN chmod 777 /android/ndk && \
+ ln -s /android/ndk/x86-21 /android/ndk/x86
+
+# env
ENV PATH=$PATH:/android/ndk/x86-9/bin
ENV DEP_Z_ROOT=/android/ndk/x86-9/sysroot/usr/
# level 9), the default linker behavior is to generate an error, to allow the
# build to finish we use --warn-unresolved-symbols. Note that the missing
# symbols does not affect std, only the compiler (llvm) and cargo (openssl).
-RUN chmod 777 /android/ndk && \
- ln -s /android/ndk/x86-21 /android/ndk/x86
-
ENV SCRIPT \
python2.7 ../x.py build src/llvm --host $HOSTS --target $HOSTS && \
(export RUSTFLAGS="\"-C link-arg=-Wl,--warn-unresolved-symbols\""; \
rm /android/ndk/x86 && \
ln -s /android/ndk/x86-9 /android/ndk/x86 && \
python2.7 ../x.py dist --host $HOSTS --target $HOSTS)
+
+# sccache
+COPY scripts/sccache.sh /scripts/
+RUN sh /scripts/sccache.sh
+
+# init
+ENTRYPOINT ["/usr/bin/dumb-init", "--"]
RUN apt-get update && \
apt-get install -y --no-install-recommends \
+ ca-certificates \
+ cmake \
+ curl \
+ file \
g++ \
+ git \
+ libssl-dev \
make \
- file \
- curl \
- ca-certificates \
+ pkg-config \
python2.7 \
- git \
- cmake \
- unzip \
sudo \
- xz-utils \
- libssl-dev \
- pkg-config
-
-RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
- dpkg -i dumb-init_*.deb && \
- rm dumb-init_*.deb
-
-RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
- chmod +x /usr/local/bin/sccache
+ unzip \
+ xz-utils
-ENTRYPOINT ["/usr/bin/dumb-init", "--"]
+# dumb-init
+COPY scripts/dumb-init.sh /scripts/
+RUN sh /scripts/dumb-init.sh
-COPY android-ndk.sh /
-RUN . /android-ndk.sh && \
- download_ndk android-ndk-r13b-linux-x86_64.zip && \
- make_standalone_toolchain x86_64 21 && \
- remove_ndk
+# ndk
+COPY scripts/android-ndk.sh /scripts/
+RUN . /scripts/android-ndk.sh && \
+ download_and_make_toolchain android-ndk-r13b-linux-x86_64.zip x86_64 21
+# env
ENV PATH=$PATH:/android/ndk/x86_64-21/bin
ENV DEP_Z_ROOT=/android/ndk/x86_64-21/sysroot/usr/
--enable-cargo-openssl-static
ENV SCRIPT python2.7 ../x.py dist --target $HOSTS --host $HOSTS
+
+# sccache
+COPY scripts/sccache.sh /scripts/
+RUN sh /scripts/sccache.sh
+
+# init
+ENTRYPOINT ["/usr/bin/dumb-init", "--"]
USER rustbuild
WORKDIR /tmp
-COPY aarch64-linux-gnu.config build-toolchains.sh /tmp/
+COPY dist-aarch64-linux/aarch64-linux-gnu.config dist-aarch64-linux/build-toolchains.sh /tmp/
RUN ./build-toolchains.sh
USER root
RUN apt-get update && \
apt-get install -y --no-install-recommends \
+ ca-certificates \
+ cmake \
+ curl \
+ file \
g++ \
+ git \
+ libssl-dev \
make \
- file \
- curl \
- ca-certificates \
+ pkg-config \
python2.7 \
- git \
- cmake \
- unzip \
sudo \
- xz-utils \
- libssl-dev \
- pkg-config
-
-RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
- dpkg -i dumb-init_*.deb && \
- rm dumb-init_*.deb
-
-RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
- chmod +x /usr/local/bin/sccache
+ unzip \
+ xz-utils
-ENTRYPOINT ["/usr/bin/dumb-init", "--"]
+# dumb-init
+COPY scripts/dumb-init.sh /scripts/
+RUN sh /scripts/dumb-init.sh
-# Install NDK
-COPY install-ndk.sh /tmp
-RUN . /tmp/install-ndk.sh && \
+# ndk
+COPY scripts/android-ndk.sh /scripts/
+RUN . /scripts/android-ndk.sh && \
download_ndk android-ndk-r13b-linux-x86_64.zip && \
make_standalone_toolchain arm 9 && \
make_standalone_toolchain x86 9 && \
make_standalone_toolchain x86_64 21 && \
remove_ndk
+# env
ENV TARGETS=arm-linux-androideabi
ENV TARGETS=$TARGETS,armv7-linux-androideabi
ENV TARGETS=$TARGETS,i686-linux-android
--x86_64-linux-android-ndk=/android/ndk/x86_64-21
ENV SCRIPT python2.7 ../x.py dist --target $TARGETS
+
+# cache
+COPY scripts/sccache.sh /scripts/
+RUN sh /scripts/sccache.sh
+
+# init
+ENTRYPOINT ["/usr/bin/dumb-init", "--"]
+++ /dev/null
-#!/bin/sh
-# Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-# file at the top-level directory of this distribution and at
-# http://rust-lang.org/COPYRIGHT.
-#
-# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-# option. This file may not be copied, modified, or distributed
-# except according to those terms.
-
-set -ex
-
-URL=https://dl.google.com/android/repository
-
-download_ndk() {
- mkdir -p /android/ndk
- cd /android/ndk
- curl -O $URL/$1
- unzip -q $1
- rm $1
- mv android-ndk-* ndk
-}
-
-make_standalone_toolchain() {
- # See https://developer.android.com/ndk/guides/standalone_toolchain.html
- python2.7 /android/ndk/ndk/build/tools/make_standalone_toolchain.py \
- --install-dir /android/ndk/$1-$2 \
- --arch $1 \
- --api $2
-}
-
-remove_ndk() {
- rm -rf /android/ndk/ndk
-}
USER rustbuild
WORKDIR /tmp
-COPY arm-linux-gnueabi.config build-toolchains.sh /tmp/
+COPY dist-arm-linux/arm-linux-gnueabi.config dist-arm-linux/build-toolchains.sh /tmp/
RUN ./build-toolchains.sh
USER root
USER rustbuild
WORKDIR /tmp
-COPY arm-linux-gnueabihf.config build-toolchains.sh /tmp/
+COPY dist-armhf-linux/arm-linux-gnueabihf.config dist-armhf-linux/build-toolchains.sh /tmp/
RUN ./build-toolchains.sh
USER root
USER rustbuild
WORKDIR /tmp
-COPY build-toolchains.sh armv7-linux-gnueabihf.config /tmp/
+COPY dist-armv7-linux/build-toolchains.sh dist-armv7-linux/armv7-linux-gnueabihf.config /tmp/
RUN ./build-toolchains.sh
USER root
tar xzf - -C /usr/local --strip-components=1
WORKDIR /tmp
-COPY shared.sh build-toolchain.sh compiler-rt-dso-handle.patch /tmp/
+COPY dist-fuchsia/shared.sh dist-fuchsia/build-toolchain.sh dist-fuchsia/compiler-rt-dso-handle.patch /tmp/
RUN /tmp/build-toolchain.sh
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
pkg-config
WORKDIR /build/
-COPY musl-libunwind-patch.patch build-musl.sh /build/
+COPY dist-i586-gnu-i686-musl/musl-libunwind-patch.patch dist-i586-gnu-i686-musl/build-musl.sh /build/
RUN sh /build/build-musl.sh && rm -rf /build
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
libssl-dev \
pkg-config
-COPY build-toolchain.sh /tmp/
+COPY dist-i686-freebsd/build-toolchain.sh /tmp/
RUN /tmp/build-toolchain.sh i686
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
ENV LD_LIBRARY_PATH=/rustroot/lib64:/rustroot/lib
ENV PKG_CONFIG_PATH=/rustroot/lib/pkgconfig
WORKDIR /tmp
-COPY shared.sh build-binutils.sh /tmp/
+COPY dist-i686-linux/shared.sh dist-i686-linux/build-binutils.sh /tmp/
# We need a build of openssl which supports SNI to download artifacts from
# static.rust-lang.org. This'll be used to link into libcurl below (and used
# later as well), so build a copy of OpenSSL with dynamic libraries into our
# generic root.
-COPY build-openssl.sh /tmp/
+COPY dist-i686-linux/build-openssl.sh /tmp/
RUN ./build-openssl.sh
# The `curl` binary on CentOS doesn't support SNI which is needed for fetching
#
# Note that we also disable a bunch of optional features of curl that we don't
# really need.
-COPY build-curl.sh /tmp/
+COPY dist-i686-linux/build-curl.sh /tmp/
RUN ./build-curl.sh
# binutils < 2.22 has a bug where the 32-bit executables it generates
RUN ./build-binutils.sh
# Need a newer version of gcc than centos has to compile LLVM nowadays
-COPY build-gcc.sh /tmp/
+COPY dist-i686-linux/build-gcc.sh /tmp/
RUN ./build-gcc.sh
# CentOS 5.5 has Python 2.4 by default, but LLVM needs 2.7+
-COPY build-python.sh /tmp/
+COPY dist-i686-linux/build-python.sh /tmp/
RUN ./build-python.sh
# Apparently CentOS 5.5 desn't have `git` in yum, but we're gonna need it for
# cloning, so download and build it here.
-COPY build-git.sh /tmp/
+COPY dist-i686-linux/build-git.sh /tmp/
RUN ./build-git.sh
# libssh2 (a dependency of Cargo) requires cmake 2.8.11 or higher but CentOS
# only has 2.6.4, so build our own
-COPY build-cmake.sh /tmp/
+COPY dist-i686-linux/build-cmake.sh /tmp/
RUN ./build-cmake.sh
# for sanitizers, we need kernel headers files newer than the ones CentOS ships
# with so we install newer ones here
-COPY build-headers.sh /tmp/
+COPY dist-i686-linux/build-headers.sh /tmp/
RUN ./build-headers.sh
RUN curl -Lo /rustroot/dumb-init \
USER rustbuild
WORKDIR /tmp
-COPY patches/ /tmp/patches/
-COPY powerpc-linux-gnu.config build-powerpc-toolchain.sh /tmp/
+COPY dist-powerpc-linux/patches/ /tmp/patches/
+COPY dist-powerpc-linux/powerpc-linux-gnu.config dist-powerpc-linux/build-powerpc-toolchain.sh /tmp/
RUN ./build-powerpc-toolchain.sh
USER root
USER rustbuild
WORKDIR /tmp
-COPY patches/ /tmp/patches/
-COPY shared.sh powerpc64-linux-gnu.config build-powerpc64-toolchain.sh /tmp/
+COPY dist-powerpc64-linux/patches/ /tmp/patches/
+COPY dist-powerpc64-linux/shared.sh dist-powerpc64-linux/powerpc64-linux-gnu.config dist-powerpc64-linux/build-powerpc64-toolchain.sh /tmp/
RUN ./build-powerpc64-toolchain.sh
USER root
USER root
RUN apt-get install -y --no-install-recommends rpm2cpio cpio
-COPY shared.sh build-powerpc64le-toolchain.sh /tmp/
+COPY dist-powerpc64le-linux/shared.sh dist-powerpc64le-linux/build-powerpc64le-toolchain.sh /tmp/
RUN ./build-powerpc64le-toolchain.sh
RUN curl -o /usr/local/bin/sccache \
USER rustbuild
WORKDIR /tmp
-COPY patches/ /tmp/patches/
-COPY s390x-linux-gnu.config build-s390x-toolchain.sh /tmp/
+COPY dist-s390x-linux/patches/ /tmp/patches/
+COPY dist-s390x-linux/s390x-linux-gnu.config dist-s390x-linux/build-s390x-toolchain.sh /tmp/
RUN ./build-s390x-toolchain.sh
USER root
libssl-dev \
pkg-config
-COPY build-toolchain.sh /tmp/
+COPY dist-x86_64-freebsd/build-toolchain.sh /tmp/
RUN /tmp/build-toolchain.sh x86_64
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
ENV LD_LIBRARY_PATH=/rustroot/lib64:/rustroot/lib
ENV PKG_CONFIG_PATH=/rustroot/lib/pkgconfig
WORKDIR /tmp
-COPY shared.sh build-binutils.sh /tmp/
+COPY dist-x86_64-linux/shared.sh dist-x86_64-linux/build-binutils.sh /tmp/
# We need a build of openssl which supports SNI to download artifacts from
# static.rust-lang.org. This'll be used to link into libcurl below (and used
# later as well), so build a copy of OpenSSL with dynamic libraries into our
# generic root.
-COPY build-openssl.sh /tmp/
+COPY dist-x86_64-linux/build-openssl.sh /tmp/
RUN ./build-openssl.sh
# The `curl` binary on CentOS doesn't support SNI which is needed for fetching
#
# Note that we also disable a bunch of optional features of curl that we don't
# really need.
-COPY build-curl.sh /tmp/
+COPY dist-x86_64-linux/build-curl.sh /tmp/
RUN ./build-curl.sh
# binutils < 2.22 has a bug where the 32-bit executables it generates
RUN ./build-binutils.sh
# Need a newer version of gcc than centos has to compile LLVM nowadays
-COPY build-gcc.sh /tmp/
+COPY dist-x86_64-linux/build-gcc.sh /tmp/
RUN ./build-gcc.sh
# CentOS 5.5 has Python 2.4 by default, but LLVM needs 2.7+
-COPY build-python.sh /tmp/
+COPY dist-x86_64-linux/build-python.sh /tmp/
RUN ./build-python.sh
# Apparently CentOS 5.5 desn't have `git` in yum, but we're gonna need it for
# cloning, so download and build it here.
-COPY build-git.sh /tmp/
+COPY dist-x86_64-linux/build-git.sh /tmp/
RUN ./build-git.sh
# libssh2 (a dependency of Cargo) requires cmake 2.8.11 or higher but CentOS
# only has 2.6.4, so build our own
-COPY build-cmake.sh /tmp/
+COPY dist-x86_64-linux/build-cmake.sh /tmp/
RUN ./build-cmake.sh
# for sanitizers, we need kernel headers files newer than the ones CentOS ships
# with so we install newer ones here
-COPY build-headers.sh /tmp/
+COPY dist-x86_64-linux/build-headers.sh /tmp/
RUN ./build-headers.sh
RUN curl -Lo /rustroot/dumb-init \
pkg-config
WORKDIR /build/
-COPY build-musl.sh /build/
+COPY dist-x86_64-musl/build-musl.sh /build/
RUN sh /build/build-musl.sh && rm -rf /build
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
USER rustbuild
WORKDIR /tmp
-COPY build-netbsd-toolchain.sh /tmp/
+COPY dist-x86_64-netbsd/build-netbsd-toolchain.sh /tmp/
RUN ./build-netbsd-toolchain.sh
USER root
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
WORKDIR /tmp
-COPY build-emscripten.sh /tmp/
+COPY emscripten/build-emscripten.sh /tmp/
RUN ./build-emscripten.sh
ENV PATH=$PATH:/tmp/emsdk_portable
ENV PATH=$PATH:/tmp/emsdk_portable/clang/tag-e1.37.10/build_tag-e1.37.10_32/bin
build \
--rm \
-t rust-ci \
- "$docker_dir/$image"
+ -f "$docker_dir/$image/Dockerfile" \
+ "$docker_dir"
elif [ -f "$docker_dir/disabled/$image/Dockerfile" ]; then
if [ -n "$TRAVIS_OS_NAME" ]; then
echo Cannot run disabled images on travis!
--- /dev/null
+# Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+# file at the top-level directory of this distribution and at
+# http://rust-lang.org/COPYRIGHT.
+#
+# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+# option. This file may not be copied, modified, or distributed
+# except according to those terms.
+
+set -ex
+
+URL=https://dl.google.com/android/repository
+
+download_ndk() {
+ mkdir -p /android/ndk
+ cd /android/ndk
+ curl -O $URL/$1
+ unzip -q $1
+ rm $1
+ mv android-ndk-* ndk
+}
+
+make_standalone_toolchain() {
+ # See https://developer.android.com/ndk/guides/standalone_toolchain.htm
+ python2.7 /android/ndk/ndk/build/tools/make_standalone_toolchain.py \
+ --install-dir /android/ndk/$1-$2 \
+ --arch $1 \
+ --api $2
+}
+
+remove_ndk() {
+ rm -rf /android/ndk/ndk
+}
+
+download_and_make_toolchain() {
+ download_ndk $1 && \
+ make_standalone_toolchain $2 $3 && \
+ remove_ndk
+}
--- /dev/null
+# Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+# file at the top-level directory of this distribution and at
+# http://rust-lang.org/COPYRIGHT.
+#
+# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+# option. This file may not be copied, modified, or distributed
+# except according to those terms.
+
+set -ex
+
+URL=https://dl.google.com/android/repository
+
+download_sdk() {
+ mkdir -p /android/sdk
+ cd /android/sdk
+ curl -O $URL/$1
+ unzip -q $1
+ rm -rf $1
+}
+
+download_sysimage() {
+ # See https://developer.android.com/studio/tools/help/android.html
+ abi=$1
+ api=$2
+
+ filter="platform-tools,android-$api"
+ filter="$filter,sys-img-$abi-android-$api"
+
+ # Keep printing yes to accept the licenses
+ while true; do echo yes; sleep 10; done | \
+ /android/sdk/tools/android update sdk -a --no-ui \
+ --filter "$filter"
+}
+
+create_avd() {
+ # See https://developer.android.com/studio/tools/help/android.html
+ abi=$1
+ api=$2
+
+ echo no | \
+ /android/sdk/tools/android create avd \
+ --name $abi-$api \
+ --target android-$api \
+ --abi $abi
+}
+
+download_and_create_avd() {
+ download_sdk $1
+ download_sysimage $2 $3
+ create_avd $2 $3
+}
--- /dev/null
+#!/bin/sh
+# Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+# file at the top-level directory of this distribution and at
+# http://rust-lang.org/COPYRIGHT.
+#
+# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+# option. This file may not be copied, modified, or distributed
+# except according to those terms.
+
+set -ex
+
+# Setting SHELL to a file instead on a symlink helps android
+# emulator identify the system
+export SHELL=/bin/bash
+
+# Using the default qemu2 engine makes time::tests::since_epoch fails because
+# the emulator date is set to unix epoch (in armeabi-v7a-18 image). Using
+# classic engine the emulator starts with the current date and the tests run
+# fine. If another image is used, this need to be evaluated again.
+nohup nohup emulator @armeabi-v7a-18 \
+ -engine classic -no-window -partition-size 2047 0<&- &>/dev/null &
+
+exec "$@"
--- /dev/null
+# Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+# file at the top-level directory of this distribution and at
+# http://rust-lang.org/COPYRIGHT.
+#
+# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+# option. This file may not be copied, modified, or distributed
+# except according to those terms.
+
+set -ex
+
+curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb
+dpkg -i dumb-init_*.deb
+rm dumb-init_*.deb
--- /dev/null
+# Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+# file at the top-level directory of this distribution and at
+# http://rust-lang.org/COPYRIGHT.
+#
+# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+# option. This file may not be copied, modified, or distributed
+# except according to those terms.
+
+set -ex
+
+curl -o /usr/local/bin/sccache \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl
+
+chmod +x /usr/local/bin/sccache
-Subproject commit ad7de198561b3a12217ea2da76d796d9c7fc0ed3
+Subproject commit 97422981c53a00f7c3d6584d363443117f179fff
-Subproject commit 6b0de90d87dda15e323ef24cdf7ed873ac5cf4d3
+Subproject commit f7a108dfa9e90b07821700c55d01f08a9adf005c
- [unique](library-features/unique.md)
- [unsize](library-features/unsize.md)
- [utf8_error_error_len](library-features/utf8-error-error-len.md)
+ - [vec_resize_default](library-features/vec-resize-default.md)
- [vec_remove_item](library-features/vec-remove-item.md)
- [windows_c](library-features/windows-c.md)
- [windows_handle](library-features/windows-handle.md)
[#23121]: https://github.com/rust-lang/rust/issues/23121
-See also [`slice_patterns`](slice-patterns.html).
+See also [`slice_patterns`](language-features/slice-patterns.html).
------------------------
[llvm-docs]: http://llvm.org/docs/LangRef.html#inline-assembler-expressions
If you need more power and don't mind losing some of the niceties of
-`asm!`, check out [global_asm](global_asm.html).
+`asm!`, check out [global_asm](language-features/global_asm.html).
[#29641]: https://github.com/rust-lang/rust/issues/29641
-See also [`box_syntax`](box-syntax.html)
+See also [`box_syntax`](language-features/box-syntax.html)
------------------------
[#27779]: https://github.com/rust-lang/rust/issues/27779
-See also [`box_patterns`](box-patterns.html)
+See also [`box_patterns`](language-features/box-patterns.html)
------------------------
If you don't need quite as much power and flexibility as
`global_asm!` provides, and you don't mind restricting your inline
-assembly to `fn` bodies only, you might try the [asm](asm.html)
-feature instead.
+assembly to `fn` bodies only, you might try the
+[asm](language-features/asm.html) feature instead.
[#37339]: https://github.com/rust-lang/rust/issues/37339
+Documentation to be appended to section G of the book.
+
------------------------
+### Loops as expressions
+
+Like most things in Rust, loops are expressions, and have a value; normally `()` unless the loop
+never exits.
+A `loop` can instead evaluate to a useful value via *break with value*:
+
+```rust
+#![feature(loop_break_value)]
+
+// Find the first square number over 1000:
+let mut n = 1;
+let square = loop {
+ if n * n > 1000 {
+ break n * n;
+ }
+ n += 1;
+};
+```
+
+The evaluation type may be specified externally:
+
+```rust
+#![feature(loop_break_value)]
+
+// Declare that value returned is unsigned 64-bit:
+let n: u64 = loop {
+ break 1;
+};
+```
+
+It is an error if types do not agree, either between a "break" value and an external requirement,
+or between multiple "break" values:
+
+```no_compile
+#![feature(loop_break_value)]
+
+loop {
+ if true {
+ break 1u32;
+ } else {
+ break 0u8; // error: types do not agree
+ }
+};
+
+let n: i32 = loop {
+ break 0u32; // error: type does not agree with external requirement
+};
+```
+
+#### Break: label, value
+
+Four forms of `break` are available, where EXPR is some expression which evaluates to a value:
+
+1. `break;`
+2. `break 'label;`
+3. `break EXPR;`
+4. `break 'label EXPR;`
+
+When no value is given, the value `()` is assumed, thus `break;` is equivalent to `break ();`.
+
+Using a label allows returning a value from an inner loop:
+```rust
+#![feature(loop_break_value)]
+let result = 'outer: loop {
+ for n in 1..10 {
+ if n > 4 {
+ break 'outer n;
+ }
+ }
+};
+```
[`plugin`] and `rustc_private` features as well. For more details, see
their docs.
-[`plugin`]: plugin.html
+[`plugin`]: language-features/plugin.html
------------------------
This feature is part of "compiler plugins." It will often be used with the
[`plugin_registrar`] and `rustc_private` features.
-[`plugin_registrar`]: plugin-registrar.html
+[`plugin_registrar`]: language-features/plugin-registrar.html
------------------------
------------------------
+This feature flag guards the new procedural macro features as laid out by [RFC 1566], which alongside the now-stable
+[custom derives], provide stabilizable alternatives to the compiler plugin API (which requires the use of
+perma-unstable internal APIs) for programmatically modifying Rust code at compile-time.
+The two new procedural macro kinds are:
+
+* Function-like procedural macros which are invoked like regular declarative macros, and:
+* Attribute-like procedural macros which can be applied to any item which built-in attributes can
+be applied to, and which can take arguments in their invocation as well.
+
+Additionally, this feature flag implicitly enables the [`use_extern_macros`](language-features/use-extern-macros.html) feature,
+which allows macros to be imported like any other item with `use` statements, as compared to
+applying `#[macro_use]` to an `extern crate` declaration. It is important to note that procedural macros may
+**only** be imported in this manner, and will throw an error otherwise.
+
+You **must** declare the `proc_macro` feature in both the crate declaring these new procedural macro kinds as well as
+in any crates that use them.
+
+### Common Concepts
+
+As with custom derives, procedural macros may only be declared in crates of the `proc-macro` type, and must be public
+functions. No other public items may be declared in `proc-macro` crates, but private items are fine.
+
+To declare your crate as a `proc-macro` crate, simply add:
+
+```toml
+[lib]
+proc-macro = true
+```
+
+to your `Cargo.toml`.
+
+Unlike custom derives, however, the name of the function implementing the procedural macro is used directly as the
+procedural macro's name, so choose carefully.
+
+Additionally, both new kinds of procedural macros return a `TokenStream` which *wholly* replaces the original
+invocation and its input.
+
+#### Importing
+
+As referenced above, the new procedural macros are not meant to be imported via `#[macro_use]` and will throw an
+error if they are. Instead, they are meant to be imported like any other item in Rust, with `use` statements:
+
+```rust,ignore
+#![feature(proc_macro)]
+
+// Where `my_proc_macros` is some crate of type `proc_macro`
+extern crate my_proc_macros;
+
+// And declares a `#[proc_macro] pub fn my_bang_macro()` at its root.
+use my_proc_macros::my_bang_macro;
+
+fn main() {
+ println!("{}", my_bang_macro!());
+}
+```
+
+#### Error Reporting
+
+Any panics in a procedural macro implementation will be caught by the compiler and turned into an error message pointing
+to the problematic invocation. Thus, it is important to make your panic messages as informative as possible: use
+`Option::expect` instead of `Option::unwrap` and `Result::expect` instead of `Result::unwrap`, and inform the user of
+the error condition as unambiguously as you can.
+
+#### `TokenStream`
+
+The `proc_macro::TokenStream` type is hardcoded into the signatures of procedural macro functions for both input and
+output. It is a wrapper around the compiler's internal representation for a given chunk of Rust code.
+
+### Function-like Procedural Macros
+
+These are procedural macros that are invoked like regular declarative macros. They are declared as public functions in
+crates of the `proc_macro` type and using the `#[proc_macro]` attribute. The name of the declared function becomes the
+name of the macro as it is to be imported and used. The function must be of the kind `fn(TokenStream) -> TokenStream`
+where the sole argument is the input to the macro and the return type is the macro's output.
+
+This kind of macro can expand to anything that is valid for the context it is invoked in, including expressions and
+statements, as well as items.
+
+**Note**: invocations of this kind of macro require a wrapping `[]`, `{}` or `()` like regular macros, but these do not
+appear in the input, only the tokens between them. The tokens between the braces do not need to be valid Rust syntax.
+
+<span class="filename">my_macro_crate/src/lib.rs</span>
+
+```rust,ignore
+#![feature(proc_macro)]
+
+// This is always necessary to get the `TokenStream` typedef.
+extern crate proc_macro;
+
+use proc_macro::TokenStream;
+
+#[proc_macro]
+pub fn say_hello(_input: TokenStream) -> TokenStream {
+ // This macro will accept any input because it ignores it.
+ // To enforce correctness in macros which don't take input,
+ // you may want to add `assert!(_input.to_string().is_empty());`.
+ "println!(\"Hello, world!\")".parse().unwrap()
+}
+```
+
+<span class="filename">my_macro_user/Cargo.toml</span>
+
+```toml
+[dependencies]
+my_macro_crate = { path = "<relative path to my_macro_crate>" }
+```
+
+<span class="filename">my_macro_user/src/lib.rs</span>
+
+```rust,ignore
+#![feature(proc_macro)]
+
+extern crate my_macro_crate;
+
+use my_macro_crate::say_hello;
+
+fn main() {
+ say_hello!();
+}
+```
+
+As expected, this prints `Hello, world!`.
+
+### Attribute-like Procedural Macros
+
+These are arguably the most powerful flavor of procedural macro as they can be applied anywhere attributes are allowed.
+
+They are declared as public functions in crates of the `proc-macro` type, using the `#[proc_macro_attribute]` attribute.
+The name of the function becomes the name of the attribute as it is to be imported and used. The function must be of the
+kind `fn(TokenStream, TokenStream) -> TokenStream` where:
+
+The first argument represents any metadata for the attribute (see [the reference chapter on attributes][refr-attr]).
+Only the metadata itself will appear in this argument, for example:
+
+ * `#[my_macro]` will get an empty string.
+ * `#[my_macro = "string"]` will get `= "string"`.
+ * `#[my_macro(ident)]` will get `(ident)`.
+ * etc.
+
+The second argument is the item that the attribute is applied to. It can be a function, a type definition,
+an impl block, an `extern` block, or a module—attribute invocations can take the inner form (`#![my_attr]`)
+or outer form (`#[my_attr]`).
+
+The return type is the output of the macro which *wholly* replaces the item it was applied to. Thus, if your intention
+is to merely modify an item, it *must* be copied to the output. The output must be an item; expressions, statements
+and bare blocks are not allowed.
+
+There is no restriction on how many items an attribute-like procedural macro can emit as long as they are valid in
+the given context.
+
+<span class="filename">my_macro_crate/src/lib.rs</span>
+
+```rust,ignore
+#![feature(proc_macro)]
+
+extern crate proc_macro;
+
+use proc_macro::TokenStream;
+
+/// Adds a `/// ### Panics` docstring to the end of the input's documentation
+///
+/// Does not assert that its receiver is a function or method.
+#[proc_macro_attribute]
+pub fn panics_note(args: TokenStream, input: TokenStream) -> TokenStream {
+ let args = args.to_string();
+ let mut input = input.to_string();
+
+ assert!(args.starts_with("= \""), "`#[panics_note]` requires an argument of the form \
+ `#[panics_note = \"panic note here\"]`");
+
+ // Get just the bare note string
+ let panics_note = args.trim_matches(&['=', ' ', '"'][..]);
+
+ // The input will include all docstrings regardless of where the attribute is placed,
+ // so we need to find the last index before the start of the item
+ let insert_idx = idx_after_last_docstring(&input);
+
+ // And insert our `### Panics` note there so it always appears at the end of an item's docs
+ input.insert_str(insert_idx, &format!("/// # Panics \n/// {}\n", panics_note));
+
+ input.parse().unwrap()
+}
+
+// `proc-macro` crates can contain any kind of private item still
+fn idx_after_last_docstring(input: &str) -> usize {
+ // Skip docstring lines to find the start of the item proper
+ input.lines().skip_while(|line| line.trim_left().starts_with("///")).next()
+ // Find the index of the first non-docstring line in the input
+ // Note: assumes this exact line is unique in the input
+ .and_then(|line_after| input.find(line_after))
+ // No docstrings in the input
+ .unwrap_or(0)
+}
+```
+
+<span class="filename">my_macro_user/Cargo.toml</span>
+
+```toml
+[dependencies]
+my_macro_crate = { path = "<relative path to my_macro_crate>" }
+```
+
+<span class="filename">my_macro_user/src/lib.rs</span>
+
+```rust,ignore
+#![feature(proc_macro)]
+
+extern crate my_macro_crate;
+
+use my_macro_crate::panics_note;
+
+/// Do the `foo` thing.
+#[panics_note = "Always."]
+pub fn foo() {
+ panic!()
+}
+```
+
+Then the rendered documentation for `pub fn foo` will look like this:
+
+> `pub fn foo()`
+>
+> ----
+> Do the `foo` thing.
+> # Panics
+> Always.
+
+[RFC 1566]: https://github.com/rust-lang/rfcs/blob/master/text/1566-proc-macros.md
+[custom derives]: https://doc.rust-lang.org/book/procedural-macros.html
+[rust-lang/rust#41430]: https://github.com/rust-lang/rust/issues/41430
+[refr-attr]: https://doc.rust-lang.org/reference/attributes.html
[#23121]: https://github.com/rust-lang/rust/issues/23121
-See also [`advanced_slice_patterns`](advanced-slice-patterns.html).
+See also
+[`advanced_slice_patterns`](language-features/advanced-slice-patterns.html).
------------------------
[#33082]: https://github.com/rust-lang/rust/issues/33082
-See also [`alloc_system`](alloc-system.html).
+See also [`alloc_system`](library-features/alloc-system.html).
------------------------
[#33082]: https://github.com/rust-lang/rust/issues/33082
-See also [`alloc_jemalloc`](alloc-jemalloc.html).
+See also [`alloc_jemalloc`](library-features/alloc-jemalloc.html).
------------------------
--- /dev/null
+# `vec_resize_default`
+
+The tracking issue for this feature is: [#41758]
+
+[#41758]: https://github.com/rust-lang/rust/issues/41758
+
+------------------------
}
impl<T: Clone> Vec<T> {
- /// Resizes the `Vec` in-place so that `len()` is equal to `new_len`.
+ /// Resizes the `Vec` in-place so that `len` is equal to `new_len`.
///
- /// If `new_len` is greater than `len()`, the `Vec` is extended by the
+ /// If `new_len` is greater than `len`, the `Vec` is extended by the
/// difference, with each additional slot filled with `value`.
- /// If `new_len` is less than `len()`, the `Vec` is simply truncated.
+ /// If `new_len` is less than `len`, the `Vec` is simply truncated.
+ ///
+ /// This method requires `Clone` to clone the passed value. If you'd
+ /// rather create a value with `Default` instead, see [`resize_default`].
///
/// # Examples
///
/// vec.resize(2, 0);
/// assert_eq!(vec, [1, 2]);
/// ```
+ ///
+ /// [`resize_default`]: #method.resize_default
#[stable(feature = "vec_resize", since = "1.5.0")]
pub fn resize(&mut self, new_len: usize, value: T) {
let len = self.len();
if new_len > len {
- self.extend_with_element(new_len - len, value);
+ self.extend_with(new_len - len, ExtendElement(value))
+ } else {
+ self.truncate(new_len);
+ }
+ }
+
+ /// Clones and appends all elements in a slice to the `Vec`.
+ ///
+ /// Iterates over the slice `other`, clones each element, and then appends
+ /// it to this `Vec`. The `other` vector is traversed in-order.
+ ///
+ /// Note that this function is same as `extend` except that it is
+ /// specialized to work with slices instead. If and when Rust gets
+ /// specialization this function will likely be deprecated (but still
+ /// available).
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// let mut vec = vec![1];
+ /// vec.extend_from_slice(&[2, 3, 4]);
+ /// assert_eq!(vec, [1, 2, 3, 4]);
+ /// ```
+ #[stable(feature = "vec_extend_from_slice", since = "1.6.0")]
+ pub fn extend_from_slice(&mut self, other: &[T]) {
+ self.spec_extend(other.iter())
+ }
+}
+
+impl<T: Default> Vec<T> {
+ /// Resizes the `Vec` in-place so that `len` is equal to `new_len`.
+ ///
+ /// If `new_len` is greater than `len`, the `Vec` is extended by the
+ /// difference, with each additional slot filled with `Default::default()`.
+ /// If `new_len` is less than `len`, the `Vec` is simply truncated.
+ ///
+ /// This method uses `Default` to create new values on every push. If
+ /// you'd rather `Clone` a given value, use [`resize`].
+ ///
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(vec_resize_default)]
+ ///
+ /// let mut vec = vec![1, 2, 3];
+ /// vec.resize_default(5);
+ /// assert_eq!(vec, [1, 2, 3, 0, 0]);
+ ///
+ /// let mut vec = vec![1, 2, 3, 4];
+ /// vec.resize_default(2);
+ /// assert_eq!(vec, [1, 2]);
+ /// ```
+ ///
+ /// [`resize`]: #method.resize
+ #[unstable(feature = "vec_resize_default", issue = "41758")]
+ pub fn resize_default(&mut self, new_len: usize) {
+ let len = self.len();
+
+ if new_len > len {
+ self.extend_with(new_len - len, ExtendDefault);
} else {
self.truncate(new_len);
}
}
+}
- /// Extend the vector by `n` additional clones of `value`.
- fn extend_with_element(&mut self, n: usize, value: T) {
+// This code generalises `extend_with_{element,default}`.
+trait ExtendWith<T> {
+ fn next(&self) -> T;
+ fn last(self) -> T;
+}
+
+struct ExtendElement<T>(T);
+impl<T: Clone> ExtendWith<T> for ExtendElement<T> {
+ fn next(&self) -> T { self.0.clone() }
+ fn last(self) -> T { self.0 }
+}
+
+struct ExtendDefault;
+impl<T: Default> ExtendWith<T> for ExtendDefault {
+ fn next(&self) -> T { Default::default() }
+ fn last(self) -> T { Default::default() }
+}
+impl<T> Vec<T> {
+ /// Extend the vector by `n` values, using the given generator.
+ fn extend_with<E: ExtendWith<T>>(&mut self, n: usize, value: E) {
self.reserve(n);
unsafe {
// Write all elements except the last one
for _ in 1..n {
- ptr::write(ptr, value.clone());
+ ptr::write(ptr, value.next());
ptr = ptr.offset(1);
- // Increment the length in every step in case clone() panics
+ // Increment the length in every step in case next() panics
local_len.increment_len(1);
}
if n > 0 {
// We can write the last element directly without cloning needlessly
- ptr::write(ptr, value);
+ ptr::write(ptr, value.last());
local_len.increment_len(1);
}
// len set by scope guard
}
}
-
- /// Clones and appends all elements in a slice to the `Vec`.
- ///
- /// Iterates over the slice `other`, clones each element, and then appends
- /// it to this `Vec`. The `other` vector is traversed in-order.
- ///
- /// Note that this function is same as `extend` except that it is
- /// specialized to work with slices instead. If and when Rust gets
- /// specialization this function will likely be deprecated (but still
- /// available).
- ///
- /// # Examples
- ///
- /// ```
- /// let mut vec = vec![1];
- /// vec.extend_from_slice(&[2, 3, 4]);
- /// assert_eq!(vec, [1, 2, 3, 4]);
- /// ```
- #[stable(feature = "vec_extend_from_slice", since = "1.6.0")]
- pub fn extend_from_slice(&mut self, other: &[T]) {
- self.spec_extend(other.iter())
- }
}
// Set the length of the vec when the `SetLenOnDrop` value goes out of scope.
impl<T: Clone> SpecFromElem for T {
default fn from_elem(elem: Self, n: usize) -> Vec<Self> {
let mut v = Vec::with_capacity(n);
- v.extend_with_element(n, elem);
+ v.extend_with(n, ExtendElement(elem));
v
}
}
}
}
let mut v = Vec::with_capacity(n);
- v.extend_with_element(n, elem);
+ v.extend_with(n, ExtendElement(elem));
v
}
}
{
unsafe {
let mut buf: [u8; 1024] = mem::uninitialized(); // enough for f32 and f64
- let mut parts: [flt2dec::Part; 5] = mem::uninitialized();
+ let mut parts: [flt2dec::Part; 4] = mem::uninitialized();
let formatted = flt2dec::to_exact_fixed_str(flt2dec::strategy::grisu::format_exact,
*num, sign, precision,
false, &mut buf, &mut parts);
unsafe {
// enough for f32 and f64
let mut buf: [u8; flt2dec::MAX_SIG_DIGITS] = mem::uninitialized();
- let mut parts: [flt2dec::Part; 5] = mem::uninitialized();
+ let mut parts: [flt2dec::Part; 4] = mem::uninitialized();
let formatted = flt2dec::to_shortest_str(flt2dec::strategy::grisu::format_shortest,
*num, sign, 0, false, &mut buf, &mut parts);
fmt.pad_formatted_parts(&formatted)
{
unsafe {
let mut buf: [u8; 1024] = mem::uninitialized(); // enough for f32 and f64
- let mut parts: [flt2dec::Part; 7] = mem::uninitialized();
+ let mut parts: [flt2dec::Part; 6] = mem::uninitialized();
let formatted = flt2dec::to_exact_exp_str(flt2dec::strategy::grisu::format_exact,
*num, sign, precision,
upper, &mut buf, &mut parts);
unsafe {
// enough for f32 and f64
let mut buf: [u8; flt2dec::MAX_SIG_DIGITS] = mem::uninitialized();
- let mut parts: [flt2dec::Part; 7] = mem::uninitialized();
+ let mut parts: [flt2dec::Part; 6] = mem::uninitialized();
let formatted = flt2dec::to_shortest_exp_str(flt2dec::strategy::grisu::format_shortest,
*num, sign, (0, 0), upper,
&mut buf, &mut parts);
/// it will only print given digits and nothing else.
///
/// The byte buffer should be at least `MAX_SIG_DIGITS` bytes long.
-/// There should be at least 5 parts available, due to the worst case like
-/// `[+][0.][0000][45][0000]` with `frac_digits = 10`.
+/// There should be at least 4 parts available, due to the worst case like
+/// `[+][0.][0000][2][0000]` with `frac_digits = 10`.
pub fn to_shortest_str<'a, T, F>(mut format_shortest: F, v: T,
sign: Sign, frac_digits: usize, _upper: bool,
buf: &'a mut [u8], parts: &'a mut [Part<'a>]) -> Formatted<'a>
/// cannot be in this range, avoiding any confusion.
///
/// The byte buffer should be at least `MAX_SIG_DIGITS` bytes long.
-/// There should be at least 7 parts available, due to the worst case like
-/// `[+][1][.][2345][e][-][67]`.
+/// There should be at least 6 parts available, due to the worst case like
+/// `[+][1][.][2345][e][-][6]`.
pub fn to_shortest_exp_str<'a, T, F>(mut format_shortest: F, v: T,
sign: Sign, dec_bounds: (i16, i16), upper: bool,
buf: &'a mut [u8], parts: &'a mut [Part<'a>]) -> Formatted<'a>
/// The byte buffer should be at least `ndigits` bytes long unless `ndigits` is
/// so large that only the fixed number of digits will be ever written.
/// (The tipping point for `f64` is about 800, so 1000 bytes should be enough.)
-/// There should be at least 7 parts available, due to the worst case like
-/// `[+][1][.][2345][e][-][67]`.
+/// There should be at least 6 parts available, due to the worst case like
+/// `[+][1][.][2345][e][-][6]`.
pub fn to_exact_exp_str<'a, T, F>(mut format_exact: F, v: T,
sign: Sign, ndigits: usize, upper: bool,
buf: &'a mut [u8], parts: &'a mut [Part<'a>]) -> Formatted<'a>
/// The byte buffer should be enough for the output unless `frac_digits` is
/// so large that only the fixed number of digits will be ever written.
/// (The tipping point for `f64` is about 800, and 1000 bytes should be enough.)
-/// There should be at least 5 parts available, due to the worst case like
-/// `[+][0.][0000][45][0000]` with `frac_digits = 10`.
+/// There should be at least 4 parts available, due to the worst case like
+/// `[+][0.][0000][2][0000]` with `frac_digits = 10`.
pub fn to_exact_fixed_str<'a, T, F>(mut format_exact: F, v: T,
sign: Sign, frac_digits: usize, _upper: bool,
buf: &'a mut [u8], parts: &'a mut [Part<'a>]) -> Formatted<'a>
use ops::*;
+#[allow(unused_macros)]
macro_rules! sh_impl_signed {
($t:ident, $f:ident) => (
#[stable(feature = "rust1", since = "1.0.0")]
($($t:ty)*) => { neg_impl_core!{ x => -x, $($t)*} }
}
+#[allow(unused_macros)]
macro_rules! neg_impl_unsigned {
($($t:ty)*) => {
neg_impl_core!{ x => {
}
}
+impl Quote for usize {
+ fn quote(&self) -> TokenStream {
+ let integer_symbol = Symbol::intern(&self.to_string());
+ TokenTree::Token(DUMMY_SP, Token::Literal(token::Lit::Integer(integer_symbol), None))
+ .into()
+ }
+}
+
impl Quote for Ident {
fn quote(&self) -> TokenStream {
// FIXME(jseyfried) quote hygiene
impl Quote for Lit {
fn quote(&self) -> TokenStream {
macro_rules! gen_match {
- ($($i:ident),*) => {
+ ($($i:ident),*; $($raw:ident),*) => {
match *self {
$( Lit::$i(lit) => quote!(::syntax::parse::token::Lit::$i((quote lit))), )*
- _ => panic!("Unsupported literal"),
+ $( Lit::$raw(lit, n) => {
+ quote!(::syntax::parse::token::Lit::$raw((quote lit), (quote n)))
+ })*
}
}
}
- gen_match!(Byte, Char, Float, Str_, Integer, ByteStr)
+ gen_match!(Byte, Char, Float, Str_, Integer, ByteStr; StrRaw, ByteStrRaw)
}
}
fmt_macros = { path = "../libfmt_macros" }
graphviz = { path = "../libgraphviz" }
log = "0.3"
+owning_ref = "0.3.3"
rustc_back = { path = "../librustc_back" }
rustc_bitflags = { path = "../librustc_bitflags" }
rustc_const_math = { path = "../librustc_const_math" }
rustc_data_structures = { path = "../librustc_data_structures" }
rustc_errors = { path = "../librustc_errors" }
-rustc_llvm = { path = "../librustc_llvm" }
serialize = { path = "../libserialize" }
syntax = { path = "../libsyntax" }
syntax_pos = { path = "../libsyntax_pos" }
UsedTraitImports(D),
ConstEval(D),
SymbolName(D),
+ SpecializationGraph(D),
+ ObjectSafety(D),
// The set of impls for a given trait. Ultimately, it would be
// nice to get more fine-grained here (e.g., to include a
// than changes in the impl body.
TraitImpls(D),
+ AllLocalTraitImpls,
+
// Nodes representing caches. To properly handle a true cache, we
// don't use a DepTrackingMap, but rather we push a task node.
// Otherwise the write into the map would be incorrectly
UsedTraitImports(ref d) => op(d).map(UsedTraitImports),
ConstEval(ref d) => op(d).map(ConstEval),
SymbolName(ref d) => op(d).map(SymbolName),
+ SpecializationGraph(ref d) => op(d).map(SpecializationGraph),
+ ObjectSafety(ref d) => op(d).map(ObjectSafety),
TraitImpls(ref d) => op(d).map(TraitImpls),
+ AllLocalTraitImpls => Some(AllLocalTraitImpls),
TraitItems(ref d) => op(d).map(TraitItems),
ReprHints(ref d) => op(d).map(ReprHints),
TraitSelect { ref trait_def_id, ref input_def_id } => {
[iss15872]: https://github.com/rust-lang/rust/issues/15872
"##,
+E0119: r##"
+There are conflicting trait implementations for the same type.
+Example of erroneous code:
+
+```compile_fail,E0119
+trait MyTrait {
+ fn get(&self) -> usize;
+}
+
+impl<T> MyTrait for T {
+ fn get(&self) -> usize { 0 }
+}
+
+struct Foo {
+ value: usize
+}
+
+impl MyTrait for Foo { // error: conflicting implementations of trait
+ // `MyTrait` for type `Foo`
+ fn get(&self) -> usize { self.value }
+}
+```
+
+When looking for the implementation for the trait, the compiler finds
+both the `impl<T> MyTrait for T` where T is all types and the `impl
+MyTrait for Foo`. Since a trait cannot be implemented multiple times,
+this is an error. So, when you write:
+
+```
+trait MyTrait {
+ fn get(&self) -> usize;
+}
+
+impl<T> MyTrait for T {
+ fn get(&self) -> usize { 0 }
+}
+```
+
+This makes the trait implemented on all types in the scope. So if you
+try to implement it on another one after that, the implementations will
+conflict. Example:
+
+```
+trait MyTrait {
+ fn get(&self) -> usize;
+}
+
+impl<T> MyTrait for T {
+ fn get(&self) -> usize { 0 }
+}
+
+struct Foo;
+
+fn main() {
+ let f = Foo;
+
+ f.get(); // the trait is implemented so we can use it
+}
+```
+"##,
+
E0133: r##"
Unsafe code was used outside of an unsafe function or block.
}
pub fn trait_impls(&self, trait_did: DefId) -> &'hir [NodeId] {
- self.dep_graph.read(DepNode::TraitImpls(trait_did));
+ self.dep_graph.read(DepNode::AllLocalTraitImpls);
// NB: intentionally bypass `self.forest.krate()` so that we
// do not trigger a read of the whole krate here
}
pub fn trait_default_impl(&self, trait_did: DefId) -> Option<NodeId> {
- self.dep_graph.read(DepNode::TraitImpls(trait_did));
+ self.dep_graph.read(DepNode::AllLocalTraitImpls);
// NB: intentionally bypass `self.forest.krate()` so that we
// do not trigger a read of the whole krate here
fingerprint
}
}
+
+impl<CTX> stable_hasher::HashStable<CTX> for Fingerprint {
+ fn hash_stable<W: stable_hasher::StableHasherResult>(&self,
+ _: &mut CTX,
+ hasher: &mut stable_hasher::StableHasher<W>) {
+ ::std::hash::Hash::hash(&self.0, hasher);
+ }
+}
use util::nodemap::NodeMap;
use std::hash as std_hash;
-use std::collections::{HashMap, HashSet};
+use std::collections::{HashMap, HashSet, BTreeMap};
use syntax::ast;
use syntax::attr;
hcx.tcx.hir.definitions().node_to_hir_id(*node_id).local_id
});
}
+
+
+pub fn hash_stable_btreemap<'a, 'tcx, K, V, SK, F, W>(hcx: &mut StableHashingContext<'a, 'tcx>,
+ hasher: &mut StableHasher<W>,
+ map: &BTreeMap<K, V>,
+ extract_stable_key: F)
+ where K: Eq + Ord,
+ V: HashStable<StableHashingContext<'a, 'tcx>>,
+ SK: HashStable<StableHashingContext<'a, 'tcx>> + Ord + Clone,
+ F: Fn(&mut StableHashingContext<'a, 'tcx>, &K) -> SK,
+ W: StableHasherResult,
+{
+ let mut keys: Vec<_> = map.keys()
+ .map(|k| (extract_stable_key(hcx, k), k))
+ .collect();
+ keys.sort_unstable_by_key(|&(ref stable_key, _)| stable_key.clone());
+ keys.len().hash_stable(hcx, hasher);
+ for (stable_key, key) in keys {
+ stable_key.hash_stable(hcx, hasher);
+ map[key].hash_stable(hcx, hasher);
+ }
+}
pub use self::fingerprint::Fingerprint;
pub use self::caching_codemap_view::CachingCodemapView;
pub use self::hcx::{StableHashingContext, NodeIdHashingMode, hash_stable_hashmap,
- hash_stable_hashset, hash_stable_nodemap};
+ hash_stable_hashset, hash_stable_nodemap,
+ hash_stable_btreemap};
mod fingerprint;
mod caching_codemap_view;
mod hcx;
use super::InferCtxt;
use super::{MiscVariable, TypeTrace};
+use hir::def_id::DefId;
use ty::{IntType, UintType};
use ty::{self, Ty, TyCtxt};
use ty::error::TypeError;
use ty::relate::{self, Relate, RelateResult, TypeRelation};
+use ty::subst::Substs;
use traits::{Obligation, PredicateObligations};
use syntax::ast;
Ok(ty::Binder(self.relate(a.skip_binder(), b.skip_binder())?))
}
+ fn relate_item_substs(&mut self,
+ item_def_id: DefId,
+ a_subst: &'tcx Substs<'tcx>,
+ b_subst: &'tcx Substs<'tcx>)
+ -> RelateResult<'tcx, &'tcx Substs<'tcx>>
+ {
+ if self.ambient_variance == ty::Variance::Invariant {
+ // Avoid fetching the variance if we are in an invariant
+ // context; no need, and it can induce dependency cycles
+ // (e.g. #41849).
+ relate::relate_substs(self, None, a_subst, b_subst)
+ } else {
+ let opt_variances = self.tcx().variances_of(item_def_id);
+ relate::relate_substs(self, Some(&opt_variances), a_subst, b_subst)
+ }
+ }
+
fn relate_with_variance<T: Relate<'tcx>>(&mut self,
variance: ty::Variance,
a: &T,
extern crate getopts;
extern crate graphviz;
extern crate libc;
-extern crate rustc_llvm as llvm;
+extern crate owning_ref;
extern crate rustc_back;
extern crate rustc_data_structures;
extern crate serialize;
"detects unreachable patterns"
}
+declare_lint! {
+ pub UNUSED_MACROS,
+ Warn,
+ "detects macros that were not used"
+}
+
declare_lint! {
pub WARNINGS,
Warn,
DEAD_CODE,
UNREACHABLE_CODE,
UNREACHABLE_PATTERNS,
+ UNUSED_MACROS,
WARNINGS,
UNUSED_FEATURES,
STABLE_FEATURES,
use hir::def_id::LOCAL_CRATE;
use hir::intravisit as hir_visit;
use syntax::visit as ast_visit;
+use syntax::tokenstream::ThinTokenStream;
/// Information about the registered lints.
///
fn visit_attribute(&mut self, attr: &'a ast::Attribute) {
run_lints!(self, check_attribute, early_passes, attr);
}
+
+ fn visit_mac_def(&mut self, _mac: &'a ThinTokenStream, id: ast::NodeId) {
+ let lints = self.sess.lints.borrow_mut().take(id);
+ for early_lint in lints {
+ self.early_lint(&early_lint);
+ }
+ }
}
enum CheckLintNameResult {
use util::nodemap::{NodeSet, DefIdMap};
use std::any::Any;
-use std::path::PathBuf;
+use std::path::{Path, PathBuf};
use std::rc::Rc;
+use owning_ref::ErasedBoxRef;
use syntax::ast;
use syntax::ext::base::SyntaxExtension;
use syntax::symbol::Symbol;
}
}
+/// The backend's way to give the crate store access to the metadata in a library.
+/// Note that it returns the raw metadata bytes stored in the library file, whether
+/// it is compressed, uncompressed, some weird mix, etc.
+/// rmeta files are backend independent and not handled here.
+///
+/// At the time of this writing, there is only one backend and one way to store
+/// metadata in library -- this trait just serves to decouple rustc_metadata from
+/// the archive reader, which depends on LLVM.
+pub trait MetadataLoader {
+ fn get_rlib_metadata(&self,
+ target: &Target,
+ filename: &Path)
+ -> Result<ErasedBoxRef<[u8]>, String>;
+ fn get_dylib_metadata(&self,
+ target: &Target,
+ filename: &Path)
+ -> Result<ErasedBoxRef<[u8]>, String>;
+}
+
/// A store of Rust crates, through with their metadata
/// can be accessed.
pub trait CrateStore {
fn crate_data_as_rc_any(&self, krate: CrateNum) -> Rc<Any>;
+ // access to the metadata loader
+ fn metadata_loader(&self) -> &MetadataLoader;
+
// item info
fn visibility(&self, def: DefId) -> ty::Visibility;
fn visible_parent_map<'a>(&'a self) -> ::std::cell::Ref<'a, DefIdMap<DefId>>;
fn used_link_args(&self) -> Vec<String>;
// utility functions
- fn metadata_filename(&self) -> &str;
- fn metadata_section_name(&self, target: &Target) -> &str;
fn used_crates(&self, prefer: LinkagePreference) -> Vec<(CrateNum, LibSource)>;
fn used_crate_source(&self, cnum: CrateNum) -> CrateSource;
fn extern_mod_stmt_cnum(&self, emod_id: ast::NodeId) -> Option<CrateNum>;
fn used_link_args(&self) -> Vec<String> { vec![] }
// utility functions
- fn metadata_filename(&self) -> &str { bug!("metadata_filename") }
- fn metadata_section_name(&self, target: &Target) -> &str { bug!("metadata_section_name") }
fn used_crates(&self, prefer: LinkagePreference) -> Vec<(CrateNum, LibSource)>
{ vec![] }
fn used_crate_source(&self, cnum: CrateNum) -> CrateSource { bug!("used_crate_source") }
bug!("encode_metadata")
}
fn metadata_encoding_version(&self) -> &[u8] { bug!("metadata_encoding_version") }
+
+ // access to the metadata loader
+ fn metadata_loader(&self) -> &MetadataLoader { bug!("metadata_loader") }
}
pub trait CrateLoader {
}
);
-#[derive(Clone, PartialEq, Eq)]
+#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub enum PrintRequest {
FileNames,
Sysroot,
linker: Option<String> = (None, parse_opt_string, [UNTRACKED],
"system linker to link outputs with"),
link_arg: Vec<String> = (vec![], parse_string_push, [UNTRACKED],
- "a single extra argument to pass to the linker (can be used several times)"),
+ "a single extra argument to append to the linker invocation (can be used several times)"),
link_args: Option<Vec<String>> = (None, parse_opt_list, [UNTRACKED],
- "extra arguments to pass to the linker (space separated)"),
+ "extra arguments to append to the linker invocation (space separated)"),
link_dead_code: bool = (false, parse_bool, [UNTRACKED],
"don't let linker strip dead code (turning it on can be used for code coverage)"),
lto: bool = (false, parse_bool, [TRACKED],
"attempt to recover from parse errors (experimental)"),
incremental: Option<String> = (None, parse_opt_string, [UNTRACKED],
"enable incremental compilation (experimental)"),
- incremental_cc: bool = (false, parse_bool, [UNTRACKED],
+ incremental_cc: bool = (true, parse_bool, [UNTRACKED],
"enable cross-crate incremental compilation (even more experimental)"),
incremental_info: bool = (false, parse_bool, [UNTRACKED],
"print high-level information about incremental reuse (or the lack thereof)"),
"add a mapping target to the file path remapping config"),
force_unstable_if_unmarked: bool = (false, parse_bool, [TRACKED],
"force all crates to be `rustc_private` unstable"),
+ pre_link_arg: Vec<String> = (vec![], parse_string_push, [UNTRACKED],
+ "a single extra argument to prepend the linker invocation (can be used several times)"),
+ pre_link_args: Option<Vec<String>> = (None, parse_opt_list, [UNTRACKED],
+ "extra arguments to prepend to the linker invocation (space separated)"),
}
pub fn default_lib_output() -> CrateType {
use rustc_back::{LinkerFlavor, PanicStrategy};
use rustc_back::target::Target;
use rustc_data_structures::flock;
-use llvm;
use std::path::{Path, PathBuf};
use std::cell::{self, Cell, RefCell};
use std::collections::HashMap;
use std::env;
-use std::ffi::CString;
use std::io::Write;
use std::rc::Rc;
use std::fmt;
use std::time::Duration;
use std::sync::Arc;
-use libc::c_int;
mod code_stats;
pub mod config;
out_of_fuel: Cell::new(false),
};
- init_llvm(&sess);
-
sess
}
}
}
-fn init_llvm(sess: &Session) {
- unsafe {
- // Before we touch LLVM, make sure that multithreading is enabled.
- use std::sync::Once;
- static INIT: Once = Once::new();
- static mut POISONED: bool = false;
- INIT.call_once(|| {
- if llvm::LLVMStartMultithreaded() != 1 {
- // use an extra bool to make sure that all future usage of LLVM
- // cannot proceed despite the Once not running more than once.
- POISONED = true;
- }
-
- configure_llvm(sess);
- });
-
- if POISONED {
- bug!("couldn't enable multi-threaded LLVM");
- }
- }
-}
-
-unsafe fn configure_llvm(sess: &Session) {
- let mut llvm_c_strs = Vec::new();
- let mut llvm_args = Vec::new();
-
- {
- let mut add = |arg: &str| {
- let s = CString::new(arg).unwrap();
- llvm_args.push(s.as_ptr());
- llvm_c_strs.push(s);
- };
- add("rustc"); // fake program name
- if sess.time_llvm_passes() { add("-time-passes"); }
- if sess.print_llvm_passes() { add("-debug-pass=Structure"); }
-
- for arg in &sess.opts.cg.llvm_args {
- add(&(*arg));
- }
- }
-
- llvm::LLVMInitializePasses();
-
- llvm::initialize_available_targets();
-
- llvm::LLVMRustSetLLVMOptions(llvm_args.len() as c_int,
- llvm_args.as_ptr());
-}
-
pub fn early_error(output: config::ErrorOutputType, msg: &str) -> ! {
let emitter: Box<Emitter> = match output {
config::ErrorOutputType::HumanReadable(color_config) => {
debug!("get_vtable_methods({:?})", trait_ref);
supertraits(tcx, trait_ref).flat_map(move |trait_ref| {
- tcx.populate_implementations_for_trait_if_necessary(trait_ref.def_id());
-
let trait_methods = tcx.associated_items(trait_ref.def_id())
.filter(|item| item.kind == ty::AssociatedKind::Method);
ty::Binder(self.predicate.skip_binder().self_ty())
}
}
+
+pub fn provide(providers: &mut ty::maps::Providers) {
+ *providers = ty::maps::Providers {
+ is_object_safe: object_safety::is_object_safe_provider,
+ specialization_graph_of: specialize::specialization_graph_provider,
+ ..*providers
+ };
+}
+
+pub fn provide_extern(providers: &mut ty::maps::Providers) {
+ *providers = ty::maps::Providers {
+ is_object_safe: object_safety::is_object_safe_provider,
+ specialization_graph_of: specialize::specialization_graph_provider,
+ ..*providers
+ };
+}
}
impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
- pub fn is_object_safe(self, trait_def_id: DefId) -> bool {
- // Because we query yes/no results frequently, we keep a cache:
- let def = self.trait_def(trait_def_id);
-
- let result = def.object_safety().unwrap_or_else(|| {
- let result = self.object_safety_violations(trait_def_id).is_empty();
-
- // Record just a yes/no result in the cache; this is what is
- // queried most frequently. Note that this may overwrite a
- // previous result, but always with the same thing.
- def.set_object_safety(result);
-
- result
- });
-
- debug!("is_object_safe({:?}) = {}", trait_def_id, result);
-
- result
- }
/// Returns the object safety violations that affect
/// astconv - currently, Self in supertraits. This is needed
error
}
}
+
+pub(super) fn is_object_safe_provider<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ trait_def_id: DefId)
+ -> bool {
+ tcx.object_safety_violations(trait_def_id).is_empty()
+}
// In either case, we handle this by not adding a
// candidate for an impl if it contains a `default`
// type.
- let opt_node_item = assoc_ty_def(selcx,
- impl_data.impl_def_id,
- obligation.predicate.item_name);
- let new_candidate = if let Some(node_item) = opt_node_item {
- let is_default = if node_item.node.is_from_trait() {
- // If true, the impl inherited a `type Foo = Bar`
- // given in the trait, which is implicitly default.
- // Otherwise, the impl did not specify `type` and
- // neither did the trait:
- //
- // ```rust
- // trait Foo { type T; }
- // impl Foo for Bar { }
- // ```
- //
- // This is an error, but it will be
- // reported in `check_impl_items_against_trait`.
- // We accept it here but will flag it as
- // an error when we confirm the candidate
- // (which will ultimately lead to `normalize_to_error`
- // being invoked).
- node_item.item.defaultness.has_value()
- } else {
- node_item.item.defaultness.is_default() ||
- selcx.tcx().impl_is_default(node_item.node.def_id())
- };
-
- // Only reveal a specializable default if we're past type-checking
- // and the obligations is monomorphic, otherwise passes such as
- // transmute checking and polymorphic MIR optimizations could
- // get a result which isn't correct for all monomorphizations.
- if !is_default {
+ let node_item = assoc_ty_def(selcx,
+ impl_data.impl_def_id,
+ obligation.predicate.item_name);
+
+ let is_default = if node_item.node.is_from_trait() {
+ // If true, the impl inherited a `type Foo = Bar`
+ // given in the trait, which is implicitly default.
+ // Otherwise, the impl did not specify `type` and
+ // neither did the trait:
+ //
+ // ```rust
+ // trait Foo { type T; }
+ // impl Foo for Bar { }
+ // ```
+ //
+ // This is an error, but it will be
+ // reported in `check_impl_items_against_trait`.
+ // We accept it here but will flag it as
+ // an error when we confirm the candidate
+ // (which will ultimately lead to `normalize_to_error`
+ // being invoked).
+ node_item.item.defaultness.has_value()
+ } else {
+ node_item.item.defaultness.is_default() ||
+ selcx.tcx().impl_is_default(node_item.node.def_id())
+ };
+
+ // Only reveal a specializable default if we're past type-checking
+ // and the obligations is monomorphic, otherwise passes such as
+ // transmute checking and polymorphic MIR optimizations could
+ // get a result which isn't correct for all monomorphizations.
+ let new_candidate = if !is_default {
+ Some(ProjectionTyCandidate::Select)
+ } else if selcx.projection_mode() == Reveal::All {
+ assert!(!poly_trait_ref.needs_infer());
+ if !poly_trait_ref.needs_subst() {
Some(ProjectionTyCandidate::Select)
- } else if selcx.projection_mode() == Reveal::All {
- assert!(!poly_trait_ref.needs_infer());
- if !poly_trait_ref.needs_subst() {
- Some(ProjectionTyCandidate::Select)
- } else {
- None
- }
} else {
None
}
} else {
- // This is saying that neither the trait nor
- // the impl contain a definition for this
- // associated type. Normally this situation
- // could only arise through a compiler bug --
- // if the user wrote a bad item name, it
- // should have failed in astconv. **However**,
- // at coherence-checking time, we only look at
- // the topmost impl (we don't even consider
- // the trait itself) for the definition -- and
- // so in that case it may be that the trait
- // *DOES* have a declaration, but we don't see
- // it, and we end up in this branch.
- //
- // This is kind of tricky to handle actually.
- // For now, we just unconditionally ICE,
- // because otherwise, examples like the
- // following will succeed:
- //
- // ```
- // trait Assoc {
- // type Output;
- // }
- //
- // impl<T> Assoc for T {
- // default type Output = bool;
- // }
- //
- // impl Assoc for u8 {}
- // impl Assoc for u16 {}
- //
- // trait Foo {}
- // impl Foo for <u8 as Assoc>::Output {}
- // impl Foo for <u16 as Assoc>::Output {}
- // return None;
- // }
- // ```
- //
- // The essential problem here is that the
- // projection fails, leaving two unnormalized
- // types, which appear not to unify -- so the
- // overlap check succeeds, when it should
- // fail.
- span_bug!(obligation.cause.span,
- "Tried to project an inherited associated type during \
- coherence checking, which is currently not supported.");
+ None
};
+
candidate_set.vec.extend(new_candidate);
}
super::VtableParam(..) => {
let VtableImplData { substs, nested, impl_def_id } = impl_vtable;
let tcx = selcx.tcx();
- let trait_ref = obligation.predicate.trait_ref;
let assoc_ty = assoc_ty_def(selcx, impl_def_id, obligation.predicate.item_name);
- match assoc_ty {
- Some(node_item) => {
- let ty = if !node_item.item.defaultness.has_value() {
- // This means that the impl is missing a definition for the
- // associated type. This error will be reported by the type
- // checker method `check_impl_items_against_trait`, so here we
- // just return TyError.
- debug!("confirm_impl_candidate: no associated type {:?} for {:?}",
- node_item.item.name,
- obligation.predicate.trait_ref);
- tcx.types.err
- } else {
- tcx.type_of(node_item.item.def_id)
- };
- let substs = translate_substs(selcx.infcx(), impl_def_id, substs, node_item.node);
- Progress {
- ty: ty.subst(tcx, substs),
- obligations: nested,
- cacheable: true
- }
- }
- None => {
- span_bug!(obligation.cause.span,
- "No associated type for {:?}",
- trait_ref);
- }
+ let ty = if !assoc_ty.item.defaultness.has_value() {
+ // This means that the impl is missing a definition for the
+ // associated type. This error will be reported by the type
+ // checker method `check_impl_items_against_trait`, so here we
+ // just return TyError.
+ debug!("confirm_impl_candidate: no associated type {:?} for {:?}",
+ assoc_ty.item.name,
+ obligation.predicate.trait_ref);
+ tcx.types.err
+ } else {
+ tcx.type_of(assoc_ty.item.def_id)
+ };
+ let substs = translate_substs(selcx.infcx(), impl_def_id, substs, assoc_ty.node);
+ Progress {
+ ty: ty.subst(tcx, substs),
+ obligations: nested,
+ cacheable: true
}
}
selcx: &SelectionContext<'cx, 'gcx, 'tcx>,
impl_def_id: DefId,
assoc_ty_name: ast::Name)
- -> Option<specialization_graph::NodeItem<ty::AssociatedItem>>
+ -> specialization_graph::NodeItem<ty::AssociatedItem>
{
- let trait_def_id = selcx.tcx().impl_trait_ref(impl_def_id).unwrap().def_id;
- let trait_def = selcx.tcx().trait_def(trait_def_id);
-
- if !trait_def.is_complete(selcx.tcx()) {
- let impl_node = specialization_graph::Node::Impl(impl_def_id);
- for item in impl_node.items(selcx.tcx()) {
- if item.kind == ty::AssociatedKind::Type && item.name == assoc_ty_name {
- return Some(specialization_graph::NodeItem {
- node: specialization_graph::Node::Impl(impl_def_id),
- item: item,
- });
- }
+ let tcx = selcx.tcx();
+ let trait_def_id = tcx.impl_trait_ref(impl_def_id).unwrap().def_id;
+ let trait_def = tcx.trait_def(trait_def_id);
+
+ // This function may be called while we are still building the
+ // specialization graph that is queried below (via TraidDef::ancestors()),
+ // so, in order to avoid unnecessary infinite recursion, we manually look
+ // for the associated item at the given impl.
+ // If there is no such item in that impl, this function will fail with a
+ // cycle error if the specialization graph is currently being built.
+ let impl_node = specialization_graph::Node::Impl(impl_def_id);
+ for item in impl_node.items(tcx) {
+ if item.kind == ty::AssociatedKind::Type && item.name == assoc_ty_name {
+ return specialization_graph::NodeItem {
+ node: specialization_graph::Node::Impl(impl_def_id),
+ item: item,
+ };
}
- None
+ }
+
+ if let Some(assoc_item) = trait_def
+ .ancestors(tcx, impl_def_id)
+ .defs(tcx, assoc_ty_name, ty::AssociatedKind::Type)
+ .next() {
+ assoc_item
} else {
- trait_def
- .ancestors(impl_def_id)
- .defs(selcx.tcx(), assoc_ty_name, ty::AssociatedKind::Type)
- .next()
+ // This is saying that neither the trait nor
+ // the impl contain a definition for this
+ // associated type. Normally this situation
+ // could only arise through a compiler bug --
+ // if the user wrote a bad item name, it
+ // should have failed in astconv.
+ bug!("No associated type `{}` for {}",
+ assoc_ty_name,
+ tcx.item_path_str(impl_def_id))
}
}
use traits::{self, Reveal, ObligationCause};
use ty::{self, TyCtxt, TypeFoldable};
use syntax_pos::DUMMY_SP;
+use std::rc::Rc;
pub mod specialization_graph;
let trait_def_id = tcx.trait_id_of_impl(impl_data.impl_def_id).unwrap();
let trait_def = tcx.trait_def(trait_def_id);
- let ancestors = trait_def.ancestors(impl_data.impl_def_id);
+ let ancestors = trait_def.ancestors(tcx, impl_data.impl_def_id);
match ancestors.defs(tcx, item.name, item.kind).next() {
Some(node_item) => {
let substs = tcx.infer_ctxt((), Reveal::All).enter(|infcx| {
self.map.insert((a, b), result);
}
}
+
+// Query provider for `specialization_graph_of`.
+pub(super) fn specialization_graph_provider<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ trait_id: DefId)
+ -> Rc<specialization_graph::Graph> {
+ let mut sg = specialization_graph::Graph::new();
+
+ let mut trait_impls: Vec<DefId> = tcx.trait_impls_of(trait_id).iter().collect();
+
+ // The coherence checking implementation seems to rely on impls being
+ // iterated over (roughly) in definition order, so we are sorting by
+ // negated CrateNum (so remote definitions are visited first) and then
+ // by a flattend version of the DefIndex.
+ trait_impls.sort_unstable_by_key(|def_id| {
+ (-(def_id.krate.as_u32() as i64),
+ def_id.index.address_space().index(),
+ def_id.index.as_array_index())
+ });
+
+ for impl_def_id in trait_impls {
+ if impl_def_id.is_local() {
+ // This is where impl overlap checking happens:
+ let insert_result = sg.insert(tcx, impl_def_id);
+ // Report error if there was one.
+ if let Err(overlap) = insert_result {
+ let mut err = struct_span_err!(tcx.sess,
+ tcx.span_of_impl(impl_def_id).unwrap(),
+ E0119,
+ "conflicting implementations of trait `{}`{}:",
+ overlap.trait_desc,
+ overlap.self_desc.clone().map_or(String::new(),
+ |ty| {
+ format!(" for type `{}`", ty)
+ }));
+
+ match tcx.span_of_impl(overlap.with_impl) {
+ Ok(span) => {
+ err.span_label(span, format!("first implementation here"));
+ err.span_label(tcx.span_of_impl(impl_def_id).unwrap(),
+ format!("conflicting implementation{}",
+ overlap.self_desc
+ .map_or(String::new(),
+ |ty| format!(" for `{}`", ty))));
+ }
+ Err(cname) => {
+ err.note(&format!("conflicting implementation in crate `{}`", cname));
+ }
+ }
+
+ err.emit();
+ }
+ } else {
+ let parent = tcx.impl_parent(impl_def_id).unwrap_or(trait_id);
+ sg.record_impl_from_cstore(tcx, parent, impl_def_id)
+ }
+ }
+
+ Rc::new(sg)
+}
use hir::def_id::DefId;
use traits::{self, Reveal};
-use ty::{self, TyCtxt, TraitDef, TypeFoldable};
+use ty::{self, TyCtxt, TypeFoldable};
use ty::fast_reject::{self, SimplifiedType};
+use std::rc::Rc;
use syntax::ast::Name;
use util::nodemap::{DefIdMap, FxHashMap};
}
}
-pub struct Ancestors<'a> {
- trait_def: &'a TraitDef,
+pub struct Ancestors {
+ trait_def_id: DefId,
+ specialization_graph: Rc<Graph>,
current_source: Option<Node>,
}
-impl<'a> Iterator for Ancestors<'a> {
+impl Iterator for Ancestors {
type Item = Node;
fn next(&mut self) -> Option<Node> {
let cur = self.current_source.take();
if let Some(Node::Impl(cur_impl)) = cur {
- let parent = self.trait_def.specialization_graph.borrow().parent(cur_impl);
- if parent == self.trait_def.def_id {
+ let parent = self.specialization_graph.parent(cur_impl);
+ if parent == self.trait_def_id {
self.current_source = Some(Node::Trait(parent));
} else {
self.current_source = Some(Node::Impl(parent));
}
}
-impl<'a, 'gcx, 'tcx> Ancestors<'a> {
+impl<'a, 'gcx, 'tcx> Ancestors {
/// Search the items from the given ancestors, returning each definition
/// with the given name and the given kind.
#[inline] // FIXME(#35870) Avoid closures being unexported due to impl Trait.
/// Walk up the specialization ancestors of a given impl, starting with that
/// impl itself.
-pub fn ancestors<'a>(trait_def: &'a TraitDef, start_from_impl: DefId) -> Ancestors<'a> {
+pub fn ancestors(tcx: TyCtxt,
+ trait_def_id: DefId,
+ start_from_impl: DefId)
+ -> Ancestors {
+ let specialization_graph = tcx.specialization_graph_of(trait_def_id);
Ancestors {
- trait_def: trait_def,
+ trait_def_id,
+ specialization_graph,
current_source: Some(Node::Impl(start_from_impl)),
}
}
use mir;
use mir::transform::{MirSuite, MirPassIndex};
use session::CompileResult;
+use traits::specialization_graph;
use ty::{self, CrateInherentImpls, Ty, TyCtxt};
use ty::item_path;
use ty::steal::Steal;
use ty::subst::Substs;
+use ty::fast_reject::SimplifiedType;
use util::nodemap::{DefIdSet, NodeSet};
use rustc_data_structures::indexed_vec::IndexVec;
}
}
+impl Key for (DefId, SimplifiedType) {
+ fn map_crate(&self) -> CrateNum {
+ self.0.krate
+ }
+ fn default_span(&self, tcx: TyCtxt) -> Span {
+ self.0.default_span(tcx)
+ }
+}
+
impl<'tcx> Key for (DefId, &'tcx Substs<'tcx>) {
fn map_crate(&self) -> CrateNum {
self.0.krate
}
}
+impl<'tcx> QueryDescription for queries::trait_impls_of<'tcx> {
+ fn describe(tcx: TyCtxt, def_id: DefId) -> String {
+ format!("trait impls of `{}`", tcx.item_path_str(def_id))
+ }
+}
+
+impl<'tcx> QueryDescription for queries::relevant_trait_impls_for<'tcx> {
+ fn describe(tcx: TyCtxt, (def_id, ty): (DefId, SimplifiedType)) -> String {
+ format!("relevant impls for: `({}, {:?})`", tcx.item_path_str(def_id), ty)
+ }
+}
+
+impl<'tcx> QueryDescription for queries::is_object_safe<'tcx> {
+ fn describe(tcx: TyCtxt, def_id: DefId) -> String {
+ format!("determine object safety of trait `{}`", tcx.item_path_str(def_id))
+ }
+}
+
macro_rules! define_maps {
(<$tcx:tt>
$($(#[$attr:meta])*
output: $output:tt) => {
define_map_struct! {
tcx: $tcx,
- ready: ([pub] $attrs $name),
+ ready: ([] $attrs $name),
input: ($($input)*),
output: $output
}
[] item_body_nested_bodies: ItemBodyNestedBodies(DefId) -> Rc<BTreeMap<hir::BodyId, hir::Body>>,
[] const_is_rvalue_promotable_to_static: ConstIsRvaluePromotableToStatic(DefId) -> bool,
[] is_mir_available: IsMirAvailable(DefId) -> bool,
+
+ [] trait_impls_of: TraitImpls(DefId) -> ty::trait_def::TraitImpls,
+ // Note that TraitDef::for_each_relevant_impl() will do type simplication for you.
+ [] relevant_trait_impls_for: relevant_trait_impls_for((DefId, SimplifiedType))
+ -> ty::trait_def::TraitImpls,
+ [] specialization_graph_of: SpecializationGraph(DefId) -> Rc<specialization_graph::Graph>,
+ [] is_object_safe: ObjectSafety(DefId) -> bool,
}
fn coherent_trait_dep_node((_, def_id): (CrateNum, DefId)) -> DepNode<DefId> {
fn crate_variances(_: CrateNum) -> DepNode<DefId> {
DepNode::CrateVariances
}
+
+fn relevant_trait_impls_for((def_id, _): (DefId, SimplifiedType)) -> DepNode<DefId> {
+ DepNode::TraitImpls(def_id)
+}
pub use self::instance::{Instance, InstanceDef};
-pub use self::trait_def::{TraitDef, TraitFlags};
+pub use self::trait_def::TraitDef;
pub use self::maps::queries;
}
pub fn trait_has_default_impl(self, trait_def_id: DefId) -> bool {
- let def = self.trait_def(trait_def_id);
- def.flags.get().intersects(TraitFlags::HAS_DEFAULT_IMPL)
- }
-
- /// Populates the type context with all the implementations for the given
- /// trait if necessary.
- pub fn populate_implementations_for_trait_if_necessary(self, trait_id: DefId) {
- if trait_id.is_local() {
- return
- }
-
- // The type is not local, hence we are reading this out of
- // metadata and don't need to track edges.
- let _ignore = self.dep_graph.in_ignore();
-
- let def = self.trait_def(trait_id);
- if def.flags.get().intersects(TraitFlags::HAS_REMOTE_IMPLS) {
- return;
- }
-
- debug!("populate_implementations_for_trait_if_necessary: searching for {:?}", def);
-
- for impl_def_id in self.sess.cstore.implementations_of_trait(Some(trait_id)) {
- let trait_ref = self.impl_trait_ref(impl_def_id).unwrap();
-
- // Record the trait->implementation mapping.
- let parent = self.impl_parent(impl_def_id).unwrap_or(trait_id);
- def.record_remote_impl(self, impl_def_id, trait_ref, parent);
- }
-
- def.flags.set(def.flags.get() | TraitFlags::HAS_REMOTE_IMPLS);
+ self.trait_def(trait_def_id).has_default_impl
}
/// Given the def_id of an impl, return the def_id of the trait it implements.
adt_dtorck_constraint,
def_span,
trait_of_item,
+ trait_impls_of: trait_def::trait_impls_of_provider,
+ relevant_trait_impls_for: trait_def::relevant_trait_impls_provider,
..*providers
};
}
*providers = ty::maps::Providers {
adt_sized_constraint,
adt_dtorck_constraint,
+ trait_impls_of: trait_def::trait_impls_of_provider,
+ relevant_trait_impls_for: trait_def::relevant_trait_impls_provider,
..*providers
};
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use dep_graph::DepNode;
-use hir::def_id::{DefId, LOCAL_CRATE};
-use traits::{self, specialization_graph};
-use ty;
+use hir::def_id::DefId;
+use traits::specialization_graph;
use ty::fast_reject;
-use ty::{Ty, TyCtxt, TraitRef};
-use std::cell::{Cell, RefCell};
+use ty::fold::TypeFoldable;
+use ty::{Ty, TyCtxt};
+use std::rc::Rc;
use hir;
-use util::nodemap::FxHashMap;
-
-use syntax::ast;
-use syntax_pos::DUMMY_SP;
/// A trait's definition with type information.
pub struct TraitDef {
/// be usable with the sugar (or without it).
pub paren_sugar: bool,
- // Impls of a trait. To allow for quicker lookup, the impls are indexed by a
- // simplified version of their `Self` type: impls with a simplifiable `Self`
- // are stored in `nonblanket_impls` keyed by it, while all other impls are
- // stored in `blanket_impls`.
- //
- // A similar division is used within `specialization_graph`, but the ones
- // here are (1) stored as a flat list for the trait and (2) populated prior
- // to -- and used while -- determining specialization order.
- //
- // FIXME: solve the reentrancy issues and remove these lists in favor of the
- // ones in `specialization_graph`.
- //
- // These lists are tracked by `DepNode::TraitImpls`; we don't use
- // a DepTrackingMap but instead have the `TraitDef` insert the
- // required reads/writes.
-
- /// Impls of the trait.
- nonblanket_impls: RefCell<
- FxHashMap<fast_reject::SimplifiedType, Vec<DefId>>
- >,
-
- /// Blanket impls associated with the trait.
- blanket_impls: RefCell<Vec<DefId>>,
-
- /// The specialization order for impls of this trait.
- pub specialization_graph: RefCell<traits::specialization_graph::Graph>,
-
- /// Various flags
- pub flags: Cell<TraitFlags>,
-
- /// The number of impls we've added from the local crate.
- /// When this number matches up the list in the HIR map,
- /// we're done, and the specialization graph is correct.
- local_impl_count: Cell<usize>,
+ pub has_default_impl: bool,
/// The ICH of this trait's DefPath, cached here so it doesn't have to be
/// recomputed all the time.
pub def_path_hash: u64,
}
-impl<'a, 'gcx, 'tcx> TraitDef {
- pub fn new(def_id: DefId,
- unsafety: hir::Unsafety,
- paren_sugar: bool,
- def_path_hash: u64)
- -> TraitDef {
- TraitDef {
- def_id: def_id,
- paren_sugar: paren_sugar,
- unsafety: unsafety,
- nonblanket_impls: RefCell::new(FxHashMap()),
- blanket_impls: RefCell::new(vec![]),
- flags: Cell::new(ty::TraitFlags::NO_TRAIT_FLAGS),
- local_impl_count: Cell::new(0),
- specialization_graph: RefCell::new(traits::specialization_graph::Graph::new()),
- def_path_hash: def_path_hash,
- }
- }
+// We don't store the list of impls in a flat list because each cached list of
+// `relevant_impls_for` we would then duplicate all blanket impls. By keeping
+// blanket and non-blanket impls separate, we can share the list of blanket
+// impls.
+#[derive(Clone)]
+pub struct TraitImpls {
+ blanket_impls: Rc<Vec<DefId>>,
+ non_blanket_impls: Rc<Vec<DefId>>,
+}
- // returns None if not yet calculated
- pub fn object_safety(&self) -> Option<bool> {
- if self.flags.get().intersects(TraitFlags::OBJECT_SAFETY_VALID) {
- Some(self.flags.get().intersects(TraitFlags::IS_OBJECT_SAFE))
- } else {
- None
+impl TraitImpls {
+ pub fn iter(&self) -> TraitImplsIter {
+ TraitImplsIter {
+ blanket_impls: self.blanket_impls.clone(),
+ non_blanket_impls: self.non_blanket_impls.clone(),
+ index: 0
}
}
+}
- pub fn set_object_safety(&self, is_safe: bool) {
- assert!(self.object_safety().map(|cs| cs == is_safe).unwrap_or(true));
- self.flags.set(
- self.flags.get() | if is_safe {
- TraitFlags::OBJECT_SAFETY_VALID | TraitFlags::IS_OBJECT_SAFE
- } else {
- TraitFlags::OBJECT_SAFETY_VALID
- }
- );
- }
-
- fn write_trait_impls(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) {
- tcx.dep_graph.write(DepNode::TraitImpls(self.def_id));
- }
-
- fn read_trait_impls(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) {
- tcx.dep_graph.read(DepNode::TraitImpls(self.def_id));
- }
-
- /// Records a basic trait-to-implementation mapping.
- ///
- /// Returns `true` iff the impl has not previously been recorded.
- fn record_impl(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>,
- impl_def_id: DefId,
- impl_trait_ref: TraitRef<'tcx>)
- -> bool {
- debug!("TraitDef::record_impl for {:?}, from {:?}",
- self, impl_trait_ref);
+#[derive(Clone)]
+pub struct TraitImplsIter {
+ blanket_impls: Rc<Vec<DefId>>,
+ non_blanket_impls: Rc<Vec<DefId>>,
+ index: usize,
+}
- // Record the write into the impl set, but only for local
- // impls: external impls are handled differently.
- if impl_def_id.is_local() {
- self.write_trait_impls(tcx);
- }
+impl Iterator for TraitImplsIter {
+ type Item = DefId;
- // We don't want to borrow_mut after we already populated all impls,
- // so check if an impl is present with an immutable borrow first.
- if let Some(sty) = fast_reject::simplify_type(tcx,
- impl_trait_ref.self_ty(), false) {
- if let Some(is) = self.nonblanket_impls.borrow().get(&sty) {
- if is.contains(&impl_def_id) {
- return false; // duplicate - skip
- }
- }
-
- self.nonblanket_impls.borrow_mut().entry(sty).or_insert(vec![]).push(impl_def_id)
+ fn next(&mut self) -> Option<DefId> {
+ if self.index < self.blanket_impls.len() {
+ let bi_index = self.index;
+ self.index += 1;
+ Some(self.blanket_impls[bi_index])
} else {
- if self.blanket_impls.borrow().contains(&impl_def_id) {
- return false; // duplicate - skip
+ let nbi_index = self.index - self.blanket_impls.len();
+ if nbi_index < self.non_blanket_impls.len() {
+ self.index += 1;
+ Some(self.non_blanket_impls[nbi_index])
+ } else {
+ None
}
- self.blanket_impls.borrow_mut().push(impl_def_id)
}
-
- true
- }
-
- /// Records a trait-to-implementation mapping for a crate-local impl.
- pub fn record_local_impl(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>,
- impl_def_id: DefId,
- impl_trait_ref: TraitRef<'tcx>) {
- assert!(impl_def_id.is_local());
- let was_new = self.record_impl(tcx, impl_def_id, impl_trait_ref);
- assert!(was_new);
-
- self.local_impl_count.set(self.local_impl_count.get() + 1);
}
- /// Records a trait-to-implementation mapping.
- pub fn record_has_default_impl(&self) {
- self.flags.set(self.flags.get() | TraitFlags::HAS_DEFAULT_IMPL);
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ let items_left = (self.blanket_impls.len() + self.non_blanket_impls.len()) - self.index;
+ (items_left, Some(items_left))
}
+}
- /// Records a trait-to-implementation mapping for a non-local impl.
- ///
- /// The `parent_impl` is the immediately-less-specialized impl, or the
- /// trait's def ID if the impl is not a specialization -- information that
- /// should be pulled from the metadata.
- pub fn record_remote_impl(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>,
- impl_def_id: DefId,
- impl_trait_ref: TraitRef<'tcx>,
- parent_impl: DefId) {
- assert!(!impl_def_id.is_local());
+impl ExactSizeIterator for TraitImplsIter {}
- // if the impl has not previously been recorded
- if self.record_impl(tcx, impl_def_id, impl_trait_ref) {
- // if the impl is non-local, it's placed directly into the
- // specialization graph using parent information drawn from metadata.
- self.specialization_graph.borrow_mut()
- .record_impl_from_cstore(tcx, parent_impl, impl_def_id)
+impl<'a, 'gcx, 'tcx> TraitDef {
+ pub fn new(def_id: DefId,
+ unsafety: hir::Unsafety,
+ paren_sugar: bool,
+ has_default_impl: bool,
+ def_path_hash: u64)
+ -> TraitDef {
+ TraitDef {
+ def_id,
+ paren_sugar,
+ unsafety,
+ has_default_impl,
+ def_path_hash,
}
}
- /// Adds a local impl into the specialization graph, returning an error with
- /// overlap information if the impl overlaps but does not specialize an
- /// existing impl.
- pub fn add_impl_for_specialization(&self,
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
- impl_def_id: DefId)
- -> Result<(), traits::OverlapError> {
- assert!(impl_def_id.is_local());
-
- self.specialization_graph.borrow_mut()
- .insert(tcx, impl_def_id)
- }
-
- pub fn ancestors(&'a self, of_impl: DefId) -> specialization_graph::Ancestors<'a> {
- specialization_graph::ancestors(self, of_impl)
- }
-
- /// Whether the impl set and specialization graphs are complete.
- pub fn is_complete(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> bool {
- tcx.populate_implementations_for_trait_if_necessary(self.def_id);
- ty::queries::coherent_trait::try_get(tcx, DUMMY_SP, (LOCAL_CRATE, self.def_id)).is_ok()
- }
-
- /// If any local impls haven't been added yet, returns
- /// Some(list of local impls for this trait).
- fn missing_local_impls(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>)
- -> Option<&'gcx [ast::NodeId]> {
- if self.flags.get().intersects(TraitFlags::HAS_LOCAL_IMPLS) {
- return None;
- }
-
- if self.is_complete(tcx) {
- self.flags.set(self.flags.get() | TraitFlags::HAS_LOCAL_IMPLS);
- return None;
- }
-
- let impls = tcx.hir.trait_impls(self.def_id);
- assert!(self.local_impl_count.get() <= impls.len());
- if self.local_impl_count.get() == impls.len() {
- self.flags.set(self.flags.get() | TraitFlags::HAS_LOCAL_IMPLS);
- return None;
- }
-
- Some(impls)
+ pub fn ancestors(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>,
+ of_impl: DefId)
+ -> specialization_graph::Ancestors {
+ specialization_graph::ancestors(tcx, self.def_id, of_impl)
}
pub fn for_each_impl<F: FnMut(DefId)>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, mut f: F) {
- self.read_trait_impls(tcx);
- tcx.populate_implementations_for_trait_if_necessary(self.def_id);
-
- let local_impls = self.missing_local_impls(tcx);
- if let Some(impls) = local_impls {
- for &id in impls {
- f(tcx.hir.local_def_id(id));
- }
- }
- let mut f = |def_id: DefId| {
- if !(local_impls.is_some() && def_id.is_local()) {
- f(def_id);
- }
- };
-
- for &impl_def_id in self.blanket_impls.borrow().iter() {
+ for impl_def_id in tcx.trait_impls_of(self.def_id).iter() {
f(impl_def_id);
}
-
- for v in self.nonblanket_impls.borrow().values() {
- for &impl_def_id in v {
- f(impl_def_id);
- }
- }
}
/// Iterate over every impl that could possibly match the
self_ty: Ty<'tcx>,
mut f: F)
{
- self.read_trait_impls(tcx);
- tcx.populate_implementations_for_trait_if_necessary(self.def_id);
-
- let local_impls = self.missing_local_impls(tcx);
- if let Some(impls) = local_impls {
- for &id in impls {
- f(tcx.hir.local_def_id(id));
- }
- }
- let mut f = |def_id: DefId| {
- if !(local_impls.is_some() && def_id.is_local()) {
- f(def_id);
- }
- };
-
- for &impl_def_id in self.blanket_impls.borrow().iter() {
- f(impl_def_id);
- }
-
// simplify_type(.., false) basically replaces type parameters and
// projections with infer-variables. This is, of course, done on
// the impl trait-ref when it is instantiated, but not on the
// replace `S` with anything - this impl of course can't be
// selected, and as there are hundreds of similar impls,
// considering them would significantly harm performance.
- if let Some(simp) = fast_reject::simplify_type(tcx, self_ty, true) {
- if let Some(impls) = self.nonblanket_impls.borrow().get(&simp) {
- for &impl_def_id in impls {
- f(impl_def_id);
- }
- }
+ let relevant_impls = if let Some(simplified_self_ty) =
+ fast_reject::simplify_type(tcx, self_ty, true) {
+ tcx.relevant_trait_impls_for((self.def_id, simplified_self_ty))
} else {
- for v in self.nonblanket_impls.borrow().values() {
- for &impl_def_id in v {
- f(impl_def_id);
- }
- }
+ tcx.trait_impls_of(self.def_id)
+ };
+
+ for impl_def_id in relevant_impls.iter() {
+ f(impl_def_id);
}
}
}
-bitflags! {
- flags TraitFlags: u32 {
- const NO_TRAIT_FLAGS = 0,
- const HAS_DEFAULT_IMPL = 1 << 0,
- const IS_OBJECT_SAFE = 1 << 1,
- const OBJECT_SAFETY_VALID = 1 << 2,
- const HAS_REMOTE_IMPLS = 1 << 3,
- const HAS_LOCAL_IMPLS = 1 << 4,
+// Query provider for `trait_impls_of`.
+pub(super) fn trait_impls_of_provider<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ trait_id: DefId)
+ -> TraitImpls {
+ let remote_impls = if trait_id.is_local() {
+ // Traits defined in the current crate can't have impls in upstream
+ // crates, so we don't bother querying the cstore.
+ Vec::new()
+ } else {
+ tcx.sess.cstore.implementations_of_trait(Some(trait_id))
+ };
+
+ let mut blanket_impls = Vec::new();
+ let mut non_blanket_impls = Vec::new();
+
+ let local_impls = tcx.hir
+ .trait_impls(trait_id)
+ .into_iter()
+ .map(|&node_id| tcx.hir.local_def_id(node_id));
+
+ for impl_def_id in local_impls.chain(remote_impls.into_iter()) {
+ let impl_self_ty = tcx.type_of(impl_def_id);
+ if impl_def_id.is_local() && impl_self_ty.references_error() {
+ continue
+ }
+
+ if fast_reject::simplify_type(tcx, impl_self_ty, false).is_some() {
+ non_blanket_impls.push(impl_def_id);
+ } else {
+ blanket_impls.push(impl_def_id);
+ }
+ }
+
+ TraitImpls {
+ blanket_impls: Rc::new(blanket_impls),
+ non_blanket_impls: Rc::new(non_blanket_impls),
+ }
+}
+
+// Query provider for `relevant_trait_impls_for`.
+pub(super) fn relevant_trait_impls_provider<'a, 'tcx>(
+ tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ (trait_id, self_ty): (DefId, fast_reject::SimplifiedType))
+ -> TraitImpls
+{
+ let all_trait_impls = tcx.trait_impls_of(trait_id);
+
+ let relevant: Vec<DefId> = all_trait_impls
+ .non_blanket_impls
+ .iter()
+ .cloned()
+ .filter(|&impl_def_id| {
+ let impl_self_ty = tcx.type_of(impl_def_id);
+ let impl_simple_self_ty = fast_reject::simplify_type(tcx,
+ impl_self_ty,
+ false).unwrap();
+ impl_simple_self_ty == self_ty
+ })
+ .collect();
+
+ if all_trait_impls.non_blanket_impls.len() == relevant.len() {
+ // If we didn't filter anything out, re-use the existing vec.
+ all_trait_impls
+ } else {
+ TraitImpls {
+ blanket_impls: all_trait_impls.blanket_impls.clone(),
+ non_blanket_impls: Rc::new(relevant),
+ }
}
}
}
// Like std::macros::try!, but for Option<>.
+#[cfg(unix)]
macro_rules! option_try(
($e:expr) => (match $e { Some(e) => e, None => return None })
);
rustc_errors = { path = "../librustc_errors" }
rustc_incremental = { path = "../librustc_incremental" }
rustc_lint = { path = "../librustc_lint" }
-rustc_llvm = { path = "../librustc_llvm" }
rustc_metadata = { path = "../librustc_metadata" }
rustc_mir = { path = "../librustc_mir" }
rustc_passes = { path = "../librustc_passes" }
use rustc::middle::privacy::AccessLevels;
use rustc::mir::transform::{MIR_CONST, MIR_VALIDATED, MIR_OPTIMIZED, Passes};
use rustc::ty::{self, TyCtxt, Resolutions, GlobalArenas};
+use rustc::traits;
use rustc::util::common::time;
use rustc::util::nodemap::NodeSet;
use rustc::util::fs::rename_or_copy_remove;
let krate = ecx.monotonic_expander().expand_crate(krate);
+ ecx.check_unused_macros();
+
let mut missing_fragment_specifiers: Vec<_> =
ecx.parse_sess.missing_fragment_specifiers.borrow().iter().cloned().collect();
missing_fragment_specifiers.sort();
trans::provide(&mut local_providers);
typeck::provide(&mut local_providers);
ty::provide(&mut local_providers);
+ traits::provide(&mut local_providers);
reachable::provide(&mut local_providers);
rustc_const_eval::provide(&mut local_providers);
middle::region::provide(&mut local_providers);
cstore::provide(&mut extern_providers);
trans::provide(&mut extern_providers);
ty::provide_extern(&mut extern_providers);
+ traits::provide_extern(&mut extern_providers);
// FIXME(eddyb) get rid of this once we replace const_eval with miri.
rustc_const_eval::provide(&mut extern_providers);
extern crate rustc_trans;
extern crate rustc_typeck;
extern crate serialize;
-extern crate rustc_llvm as llvm;
#[macro_use]
extern crate log;
extern crate syntax;
use rustc_save_analysis as save;
use rustc_save_analysis::DumpHandler;
use rustc_trans::back::link;
-use rustc_trans::back::write::{create_target_machine, RELOC_MODEL_ARGS, CODE_GEN_MODEL_ARGS};
+use rustc_trans::back::write::{RELOC_MODEL_ARGS, CODE_GEN_MODEL_ARGS};
use rustc::dep_graph::DepGraph;
use rustc::session::{self, config, Session, build_session, CompileResult};
use rustc::session::config::{Input, PrintRequest, OutputType, ErrorOutputType};
let (sopts, cfg) = config::build_session_options_and_crate_config(&matches);
if sopts.debugging_opts.debug_llvm {
- unsafe { llvm::LLVMRustSetDebug(1); }
+ rustc_trans::enable_llvm_debug();
}
let descriptions = diagnostics_registry();
};
let dep_graph = DepGraph::new(sopts.build_dep_graph());
- let cstore = Rc::new(CStore::new(&dep_graph));
+ let cstore = Rc::new(CStore::new(&dep_graph, box rustc_trans::LlvmMetadataLoader));
let loader = file_loader.unwrap_or(box RealFileLoader);
let codemap = Rc::new(CodeMap::with_file_loader(loader, sopts.file_path_mapping()));
let mut sess = session::build_session_with_codemap(
sopts, &dep_graph, input_file_path, descriptions, cstore.clone(), codemap, emitter_dest,
);
+ rustc_trans::init(&sess);
rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess));
let mut cfg = config::build_configuration(&sess, cfg);
return None;
}
let dep_graph = DepGraph::new(sopts.build_dep_graph());
- let cstore = Rc::new(CStore::new(&dep_graph));
+ let cstore = Rc::new(CStore::new(&dep_graph, box rustc_trans::LlvmMetadataLoader));
let mut sess = build_session(sopts.clone(),
&dep_graph,
None,
descriptions.clone(),
cstore.clone());
+ rustc_trans::init(&sess);
rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess));
let mut cfg = config::build_configuration(&sess, cfg.clone());
target_features::add_configuration(&mut cfg, &sess);
&Input::File(ref ifile) => {
let path = &(*ifile);
let mut v = Vec::new();
- locator::list_file_metadata(&sess.target.target, path, &mut v).unwrap();
+ locator::list_file_metadata(&sess.target.target,
+ path,
+ sess.cstore.metadata_loader(),
+ &mut v)
+ .unwrap();
println!("{}", String::from_utf8(v).unwrap());
}
&Input::Str { .. } => {
println!("{}", cfg);
}
}
- PrintRequest::TargetCPUs => {
- let tm = create_target_machine(sess);
- unsafe { llvm::LLVMRustPrintTargetCPUs(tm); }
- }
- PrintRequest::TargetFeatures => {
- let tm = create_target_machine(sess);
- unsafe { llvm::LLVMRustPrintTargetFeatures(tm); }
- }
PrintRequest::RelocationModels => {
println!("Available relocation models:");
for &(name, _) in RELOC_MODEL_ARGS.iter() {
}
println!("");
}
+ PrintRequest::TargetCPUs | PrintRequest::TargetFeatures => {
+ rustc_trans::print(*req, sess);
+ }
}
}
return Compilation::Stop;
println!("commit-date: {}", unw(commit_date_str()));
println!("host: {}", config::host_triple());
println!("release: {}", unw(release_str()));
- unsafe {
- println!("LLVM version: {}.{}",
- llvm::LLVMRustVersionMajor(), llvm::LLVMRustVersionMinor());
- }
+ rustc_trans::print_version();
}
}
}
if cg_flags.contains(&"passes=list".to_string()) {
- unsafe {
- ::llvm::LLVMRustPrintPasses();
- }
+ rustc_trans::print_passes();
return None;
}
// except according to those terms.
use syntax::ast;
-use llvm::LLVMRustHasFeature;
use rustc::session::Session;
-use rustc_trans::back::write::create_target_machine;
use syntax::symbol::Symbol;
-use libc::c_char;
-
-// WARNING: the features must be known to LLVM or the feature
-// detection code will walk past the end of the feature array,
-// leading to crashes.
-
-const ARM_WHITELIST: &'static [&'static str] = &["neon\0", "vfp2\0", "vfp3\0", "vfp4\0"];
-
-const X86_WHITELIST: &'static [&'static str] = &["avx\0", "avx2\0", "bmi\0", "bmi2\0", "sse\0",
- "sse2\0", "sse3\0", "sse4.1\0", "sse4.2\0",
- "ssse3\0", "tbm\0", "lzcnt\0", "popcnt\0",
- "sse4a\0", "rdrnd\0", "rdseed\0", "fma\0"];
-
-const HEXAGON_WHITELIST: &'static [&'static str] = &["hvx\0", "hvx-double\0"];
+use rustc_trans;
/// Add `target_feature = "..."` cfgs for a variety of platform
/// specific features (SSE, NEON etc.).
/// This is performed by checking whether a whitelisted set of
/// features is available on the target machine, by querying LLVM.
pub fn add_configuration(cfg: &mut ast::CrateConfig, sess: &Session) {
- let target_machine = create_target_machine(sess);
-
- let whitelist = match &*sess.target.target.arch {
- "arm" => ARM_WHITELIST,
- "x86" | "x86_64" => X86_WHITELIST,
- "hexagon" => HEXAGON_WHITELIST,
- _ => &[],
- };
-
let tf = Symbol::intern("target_feature");
- for feat in whitelist {
- assert_eq!(feat.chars().last(), Some('\0'));
- if unsafe { LLVMRustHasFeature(target_machine, feat.as_ptr() as *const c_char) } {
- cfg.insert((tf, Some(Symbol::intern(&feat[..feat.len() - 1]))));
- }
+
+ for feat in rustc_trans::target_features(sess) {
+ cfg.insert((tf, Some(feat)));
}
let requested_features = sess.opts.cg.target_feature.split(',');
use rustc::dep_graph::DepGraph;
use rustc_lint;
use rustc_resolve::MakeGlobMap;
+use rustc_trans;
use rustc::middle::lang_items;
use rustc::middle::free_region::FreeRegionMap;
use rustc::middle::region::{CodeExtent, RegionMaps};
let dep_graph = DepGraph::new(false);
let _ignore = dep_graph.in_ignore();
- let cstore = Rc::new(CStore::new(&dep_graph));
+ let cstore = Rc::new(CStore::new(&dep_graph, box rustc_trans::LlvmMetadataLoader));
let sess = session::build_session_(options,
&dep_graph,
None,
diagnostic_handler,
Rc::new(CodeMap::new(FilePathMapping::empty())),
cstore.clone());
+ rustc_trans::init(&sess);
rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess));
let input = config::Input::Str {
name: driver::anon_src(),
use rustc::hir::itemlikevisit::ItemLikeVisitor;
use rustc::ich::{Fingerprint, StableHashingContext};
use rustc::ty::TyCtxt;
+use rustc::util::common::record_time;
use rustc_data_structures::stable_hasher::{StableHasher, HashStable};
use rustc_data_structures::fx::FxHashMap;
-use rustc::util::common::record_time;
+use rustc_data_structures::accumulate_vec::AccumulateVec;
pub type IchHasher = StableHasher<Fingerprint>;
// difference, filter them out.
return None
}
+ DepNode::AllLocalTraitImpls => {
+ // These are already covered by hashing
+ // the HIR.
+ return None
+ }
ref other => {
bug!("Found unexpected DepNode during \
SVH computation: {:?}",
true,
(module, (span, attrs)));
}
+
+ fn compute_and_store_ich_for_trait_impls(&mut self, krate: &'tcx hir::Crate)
+ {
+ let tcx = self.hcx.tcx();
+
+ let mut impls: Vec<(u64, Fingerprint)> = krate
+ .trait_impls
+ .iter()
+ .map(|(&trait_id, impls)| {
+ let trait_id = tcx.def_path_hash(trait_id);
+ let mut impls: AccumulateVec<[_; 32]> = impls
+ .iter()
+ .map(|&node_id| {
+ let def_id = tcx.hir.local_def_id(node_id);
+ tcx.def_path_hash(def_id)
+ })
+ .collect();
+
+ impls.sort_unstable();
+ let mut hasher = StableHasher::new();
+ impls.hash_stable(&mut self.hcx, &mut hasher);
+ (trait_id, hasher.finish())
+ })
+ .collect();
+
+ impls.sort_unstable();
+
+ let mut default_impls: AccumulateVec<[_; 32]> = krate
+ .trait_default_impl
+ .iter()
+ .map(|(&trait_def_id, &impl_node_id)| {
+ let impl_def_id = tcx.hir.local_def_id(impl_node_id);
+ (tcx.def_path_hash(trait_def_id), tcx.def_path_hash(impl_def_id))
+ })
+ .collect();
+
+ default_impls.sort_unstable();
+
+ let mut hasher = StableHasher::new();
+ impls.hash_stable(&mut self.hcx, &mut hasher);
+
+ self.hashes.insert(DepNode::AllLocalTraitImpls, hasher.finish());
+ }
}
impl<'a, 'tcx: 'a> ItemLikeVisitor<'tcx> for ComputeItemHashesVisitor<'a, 'tcx> {
}
}
+
+
pub fn compute_incremental_hashes_map<'a, 'tcx: 'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>)
-> IncrementalHashesMap {
let _ignore = tcx.dep_graph.in_ignore();
let fingerprint = hasher.finish();
visitor.hashes.insert(dep_node, fingerprint);
}
+
+ visitor.compute_and_store_ich_for_trait_impls(krate);
});
tcx.sess.perf_stats.incr_comp_hashes_count.set(visitor.hashes.len() as u64);
}
}
}
-
-macro_rules! set {
- ($( $value:expr ),*) => {
- {
- use $crate::rustc_data_structures::fx::FxHashSet;
- let mut set = FxHashSet();
- $(set.insert($value);)*
- set
- }
- }
-}
UNUSED_MUST_USE,
UNUSED_UNSAFE,
PATH_STATEMENTS,
- UNUSED_ATTRIBUTES);
+ UNUSED_ATTRIBUTES,
+ UNUSED_MACROS);
// Guidelines for creating a future incompatibility lint:
//
[dependencies]
flate = { path = "../libflate" }
log = "0.3"
+owning_ref = "0.3.3"
proc_macro = { path = "../libproc_macro" }
rustc = { path = "../librustc" }
rustc_back = { path = "../librustc_back" }
rustc_const_math = { path = "../librustc_const_math" }
rustc_data_structures = { path = "../librustc_data_structures" }
rustc_errors = { path = "../librustc_errors" }
-rustc_llvm = { path = "../librustc_llvm" }
serialize = { path = "../libserialize" }
syntax = { path = "../libsyntax" }
syntax_ext = { path = "../libsyntax_ext" }
let exported_symbols = crate_root.exported_symbols
.map(|x| x.decode(&metadata).collect());
+ let trait_impls = crate_root
+ .impls
+ .map(|impls| {
+ impls.decode(&metadata)
+ .map(|trait_impls| (trait_impls.trait_id, trait_impls.impls))
+ .collect()
+ });
+
let mut cmeta = cstore::CrateMetadata {
name: name,
extern_crate: Cell::new(None),
def_path_table: def_path_table,
exported_symbols: exported_symbols,
+ trait_impls: trait_impls,
proc_macros: crate_root.macro_derive_registrar.map(|_| {
self.load_derive_macros(&crate_root, dylib.clone().map(|p| p.0), span)
}),
rejected_via_filename: vec![],
should_match_name: true,
is_proc_macro: Some(false),
+ metadata_loader: &*self.cstore.metadata_loader,
};
self.load(&mut locate_ctxt).or_else(|| {
rejected_via_filename: vec![],
should_match_name: true,
is_proc_macro: None,
+ metadata_loader: &*self.cstore.metadata_loader,
};
let library = self.load(&mut locate_ctxt).or_else(|| {
if !is_cross {
// The crate store - a central repo for information collected about external
// crates and libraries
-use locator;
use schema::{self, Tracked};
use rustc::dep_graph::{DepGraph, DepNode, GlobalMetaDataKind};
use rustc::hir::def_id::{CRATE_DEF_INDEX, LOCAL_CRATE, CrateNum, DefIndex, DefId};
use rustc::hir::map::definitions::DefPathTable;
use rustc::hir::svh::Svh;
-use rustc::middle::cstore::{DepKind, ExternCrate};
+use rustc::middle::cstore::{DepKind, ExternCrate, MetadataLoader};
use rustc_back::PanicStrategy;
use rustc_data_structures::indexed_vec::IndexVec;
use rustc::util::nodemap::{FxHashMap, FxHashSet, NodeMap, DefIdMap};
use std::cell::{RefCell, Cell};
use std::rc::Rc;
-use flate::Bytes;
+use owning_ref::ErasedBoxRef;
use syntax::{ast, attr};
use syntax::ext::base::SyntaxExtension;
use syntax::symbol::Symbol;
// own crate numbers.
pub type CrateNumMap = IndexVec<CrateNum, CrateNum>;
-pub enum MetadataBlob {
- Inflated(Bytes),
- Archive(locator::ArchiveMetadata),
- Raw(Vec<u8>),
-}
+pub struct MetadataBlob(pub ErasedBoxRef<[u8]>);
/// Holds information about a syntax_pos::FileMap imported from another crate.
/// See `imported_filemaps()` for more information.
pub exported_symbols: Tracked<FxHashSet<DefIndex>>,
+ pub trait_impls: Tracked<FxHashMap<(u32, DefIndex), schema::LazySeq<DefIndex>>>,
+
pub dep_kind: Cell<DepKind>,
pub source: CrateSource,
statically_included_foreign_items: RefCell<FxHashSet<DefIndex>>,
pub dllimport_foreign_items: RefCell<FxHashSet<DefIndex>>,
pub visible_parent_map: RefCell<DefIdMap<DefId>>,
+ pub metadata_loader: Box<MetadataLoader>,
}
impl CStore {
- pub fn new(dep_graph: &DepGraph) -> CStore {
+ pub fn new(dep_graph: &DepGraph, metadata_loader: Box<MetadataLoader>) -> CStore {
CStore {
dep_graph: dep_graph.clone(),
metas: RefCell::new(FxHashMap()),
statically_included_foreign_items: RefCell::new(FxHashSet()),
dllimport_foreign_items: RefCell::new(FxHashSet()),
visible_parent_map: RefCell::new(FxHashMap()),
+ metadata_loader: metadata_loader,
}
}
use cstore;
use encoder;
-use locator;
use schema;
use rustc::dep_graph::DepTrackingMapConfig;
use rustc::middle::cstore::{CrateStore, CrateSource, LibSource, DepKind,
- ExternCrate, NativeLibrary, LinkMeta,
+ ExternCrate, NativeLibrary, MetadataLoader, LinkMeta,
LinkagePreference, LoadedMacro, EncodedMetadata};
use rustc::hir::def;
use rustc::middle::lang_items;
use syntax::symbol::Symbol;
use syntax_pos::{Span, NO_EXPANSION};
use rustc::hir::svh::Svh;
-use rustc_back::target::Target;
use rustc::hir;
macro_rules! provide {
self.get_crate_data(krate)
}
+ fn metadata_loader(&self) -> &MetadataLoader {
+ &*self.metadata_loader
+ }
+
fn visibility(&self, def: DefId) -> ty::Visibility {
self.dep_graph.read(DepNode::MetaData(def));
self.get_crate_data(def.krate).get_visibility(def.index)
fn implementations_of_trait(&self, filter: Option<DefId>) -> Vec<DefId>
{
- if let Some(def_id) = filter {
- self.dep_graph.read(DepNode::MetaData(def_id));
- }
let mut result = vec![];
+
self.iter_crate_data(|_, cdata| {
cdata.get_implementations_for_trait(filter, &self.dep_graph, &mut result)
});
{
self.get_used_link_args().borrow().clone()
}
-
- fn metadata_filename(&self) -> &str
- {
- locator::METADATA_FILENAME
- }
-
- fn metadata_section_name(&self, target: &Target) -> &str
- {
- locator::meta_section_name(target)
- }
-
fn used_crates(&self, prefer: LinkagePreference) -> Vec<(CrateNum, LibSource)>
{
self.do_get_used_crates(prefer)
drop(visible_parent_map);
self.visible_parent_map.borrow()
}
-}
+}
\ No newline at end of file
impl<'a, 'tcx> Metadata<'a, 'tcx> for &'a MetadataBlob {
fn raw_bytes(self) -> &'a [u8] {
- match *self {
- MetadataBlob::Inflated(ref vec) => vec,
- MetadataBlob::Archive(ref ar) => ar.as_slice(),
- MetadataBlob::Raw(ref vec) => vec,
- }
+ &self.0
}
}
_ => bug!(),
};
- let def = ty::TraitDef::new(self.local_def_id(item_id),
- data.unsafety,
- data.paren_sugar,
- self.def_path_table.def_path_hash(item_id));
-
- if data.has_default_impl {
- def.record_has_default_impl();
- }
-
- def
+ ty::TraitDef::new(self.local_def_id(item_id),
+ data.unsafety,
+ data.paren_sugar,
+ data.has_default_impl,
+ self.def_path_table.def_path_hash(item_id))
}
fn get_variant(&self, item: &Entry, index: DefIndex) -> ty::VariantDef {
None => None,
};
- // FIXME(eddyb) Make this O(1) instead of O(n).
let dep_node = self.metadata_dep_node(GlobalMetaDataKind::Impls);
- for trait_impls in self.root.impls.get(dep_graph, dep_node).decode(self) {
- if filter.is_some() && filter != Some(trait_impls.trait_id) {
- continue;
- }
-
- result.extend(trait_impls.impls.decode(self).map(|index| self.local_def_id(index)));
- if filter.is_some() {
- break;
+ if let Some(filter) = filter {
+ if let Some(impls) = self.trait_impls
+ .get(dep_graph, dep_node)
+ .get(&filter) {
+ result.extend(impls.decode(self).map(|idx| self.local_def_id(idx)));
+ }
+ } else {
+ for impls in self.trait_impls.get(dep_graph, dep_node).values() {
+ result.extend(impls.decode(self).map(|idx| self.local_def_id(idx)));
}
}
}
let trait_ref = tcx.impl_trait_ref(def_id);
let parent = if let Some(trait_ref) = trait_ref {
let trait_def = tcx.trait_def(trait_ref.def_id);
- trait_def.ancestors(def_id).skip(1).next().and_then(|node| {
+ trait_def.ancestors(tcx, def_id).skip(1).next().and_then(|node| {
match node {
specialization_graph::Node::Impl(parent) => Some(parent),
_ => None,
/// Encodes an index, mapping each trait to its (local) implementations.
fn encode_impls(&mut self, _: ()) -> LazySeq<TraitImpls> {
+ debug!("IsolatedEncoder::encode_impls()");
+ let tcx = self.tcx;
let mut visitor = ImplVisitor {
- tcx: self.tcx,
+ tcx: tcx,
impls: FxHashMap(),
};
- self.tcx.hir.krate().visit_all_item_likes(&mut visitor);
+ tcx.hir.krate().visit_all_item_likes(&mut visitor);
+
+ let mut all_impls: Vec<_> = visitor.impls.into_iter().collect();
- let all_impls: Vec<_> = visitor.impls
+ // Bring everything into deterministic order for hashing
+ all_impls.sort_unstable_by_key(|&(trait_def_id, _)| {
+ tcx.def_path_hash(trait_def_id)
+ });
+
+ let all_impls: Vec<_> = all_impls
.into_iter()
- .map(|(trait_def_id, impls)| {
+ .map(|(trait_def_id, mut impls)| {
+ // Bring everything into deterministic order for hashing
+ impls.sort_unstable_by_key(|&def_index| {
+ tcx.hir.definitions().def_path_hash(def_index)
+ });
+
TraitImpls {
trait_id: (trait_def_id.krate.as_u32(), trait_def_id.index),
- impls: self.lazy_seq(impls),
+ impls: self.lazy_seq_from_slice(&impls[..]),
}
})
.collect();
- self.lazy_seq(all_impls)
+ self.lazy_seq_from_slice(&all_impls[..])
}
// Encodes all symbols exported from this crate into the metadata.
#![cfg_attr(stage0, unstable(feature = "rustc_private", issue = "27812"))]
#![cfg_attr(stage0, feature(staged_api))]
+#![feature(sort_unstable)]
#[macro_use]
extern crate log;
extern crate syntax_pos;
extern crate flate;
extern crate serialize as rustc_serialize; // used by deriving
+extern crate owning_ref;
extern crate rustc_errors as errors;
extern crate syntax_ext;
extern crate proc_macro;
extern crate rustc_back;
extern crate rustc_const_math;
extern crate rustc_data_structures;
-extern crate rustc_llvm;
mod diagnostics;
use schema::{METADATA_HEADER, rustc_version};
use rustc::hir::svh::Svh;
+use rustc::middle::cstore::MetadataLoader;
use rustc::session::{config, Session};
use rustc::session::filesearch::{FileSearch, FileMatches, FileDoesntMatch};
use rustc::session::search_paths::PathKind;
-use rustc::util::common;
use rustc::util::nodemap::FxHashMap;
-use rustc_llvm as llvm;
-use rustc_llvm::{False, ObjectFile, mk_section_iter};
-use rustc_llvm::archive_ro::ArchiveRO;
use errors::DiagnosticBuilder;
use syntax::symbol::Symbol;
use syntax_pos::Span;
use std::fs::{self, File};
use std::io::{self, Read};
use std::path::{Path, PathBuf};
-use std::ptr;
-use std::slice;
use std::time::Instant;
use flate;
+use owning_ref::{ErasedBoxRef, OwningRef};
pub struct CrateMismatch {
path: PathBuf,
pub rejected_via_filename: Vec<CrateMismatch>,
pub should_match_name: bool,
pub is_proc_macro: Option<bool>,
-}
-
-pub struct ArchiveMetadata {
- _archive: ArchiveRO,
- // points into self._archive
- data: *const [u8],
+ pub metadata_loader: &'a MetadataLoader,
}
pub struct CratePaths {
pub rmeta: Option<PathBuf>,
}
-pub const METADATA_FILENAME: &'static str = "rust.metadata.bin";
-
#[derive(Copy, Clone, PartialEq)]
enum CrateFlavor {
Rlib,
let mut err: Option<DiagnosticBuilder> = None;
for (lib, kind) in m {
info!("{} reading metadata from: {}", flavor, lib.display());
- let (hash, metadata) = match get_metadata_section(self.target, flavor, &lib) {
- Ok(blob) => {
- if let Some(h) = self.crate_matches(&blob, &lib) {
- (h, blob)
- } else {
- info!("metadata mismatch");
+ let (hash, metadata) =
+ match get_metadata_section(self.target, flavor, &lib, self.metadata_loader) {
+ Ok(blob) => {
+ if let Some(h) = self.crate_matches(&blob, &lib) {
+ (h, blob)
+ } else {
+ info!("metadata mismatch");
+ continue;
+ }
+ }
+ Err(err) => {
+ info!("no metadata found: {}", err);
continue;
}
- }
- Err(err) => {
- info!("no metadata found: {}", err);
- continue;
- }
- };
+ };
// If we see multiple hashes, emit an error about duplicate candidates.
if slot.as_ref().map_or(false, |s| s.0 != hash) {
let mut e = struct_span_err!(self.sess,
err.note(&format!("crate name: {}", name));
}
-impl ArchiveMetadata {
- fn new(ar: ArchiveRO) -> Option<ArchiveMetadata> {
- let data = {
- let section = ar.iter()
- .filter_map(|s| s.ok())
- .find(|sect| sect.name() == Some(METADATA_FILENAME));
- match section {
- Some(s) => s.data() as *const [u8],
- None => {
- debug!("didn't find '{}' in the archive", METADATA_FILENAME);
- return None;
- }
- }
- };
-
- Some(ArchiveMetadata {
- _archive: ar,
- data: data,
- })
- }
-
- pub fn as_slice<'a>(&'a self) -> &'a [u8] {
- unsafe { &*self.data }
- }
-}
-
-fn verify_decompressed_encoding_version(blob: &MetadataBlob,
- filename: &Path)
- -> Result<(), String> {
- if !blob.is_compatible() {
- Err((format!("incompatible metadata version found: '{}'",
- filename.display())))
- } else {
- Ok(())
- }
-}
-
// Just a small wrapper to time how long reading metadata takes.
fn get_metadata_section(target: &Target,
flavor: CrateFlavor,
- filename: &Path)
+ filename: &Path,
+ loader: &MetadataLoader)
-> Result<MetadataBlob, String> {
let start = Instant::now();
- let ret = get_metadata_section_imp(target, flavor, filename);
+ let ret = get_metadata_section_imp(target, flavor, filename, loader);
info!("reading {:?} => {:?}",
filename.file_name().unwrap(),
start.elapsed());
fn get_metadata_section_imp(target: &Target,
flavor: CrateFlavor,
- filename: &Path)
+ filename: &Path,
+ loader: &MetadataLoader)
-> Result<MetadataBlob, String> {
if !filename.exists() {
return Err(format!("no such file: '{}'", filename.display()));
}
- if flavor == CrateFlavor::Rlib {
- // Use ArchiveRO for speed here, it's backed by LLVM and uses mmap
- // internally to read the file. We also avoid even using a memcpy by
- // just keeping the archive along while the metadata is in use.
- let archive = match ArchiveRO::open(filename) {
- Some(ar) => ar,
- None => {
- debug!("llvm didn't like `{}`", filename.display());
- return Err(format!("failed to read rlib metadata: '{}'", filename.display()));
+ let raw_bytes: ErasedBoxRef<[u8]> = match flavor {
+ CrateFlavor::Rlib => loader.get_rlib_metadata(target, filename)?,
+ CrateFlavor::Dylib => {
+ let buf = loader.get_dylib_metadata(target, filename)?;
+ // The header is uncompressed
+ let header_len = METADATA_HEADER.len();
+ debug!("checking {} bytes of metadata-version stamp", header_len);
+ let header = &buf[..cmp::min(header_len, buf.len())];
+ if header != METADATA_HEADER {
+ return Err(format!("incompatible metadata version found: '{}'",
+ filename.display()));
}
- };
- return match ArchiveMetadata::new(archive).map(|ar| MetadataBlob::Archive(ar)) {
- None => Err(format!("failed to read rlib metadata: '{}'", filename.display())),
- Some(blob) => {
- verify_decompressed_encoding_version(&blob, filename)?;
- Ok(blob)
- }
- };
- } else if flavor == CrateFlavor::Rmeta {
- let mut file = File::open(filename).map_err(|_|
- format!("could not open file: '{}'", filename.display()))?;
- let mut buf = vec![];
- file.read_to_end(&mut buf).map_err(|_|
- format!("failed to read rlib metadata: '{}'", filename.display()))?;
- let blob = MetadataBlob::Raw(buf);
- verify_decompressed_encoding_version(&blob, filename)?;
- return Ok(blob);
- }
- unsafe {
- let buf = common::path2cstr(filename);
- let mb = llvm::LLVMRustCreateMemoryBufferWithContentsOfFile(buf.as_ptr());
- if mb as isize == 0 {
- return Err(format!("error reading library: '{}'", filename.display()));
- }
- let of = match ObjectFile::new(mb) {
- Some(of) => of,
- _ => {
- return Err((format!("provided path not an object file: '{}'", filename.display())))
- }
- };
- let si = mk_section_iter(of.llof);
- while llvm::LLVMIsSectionIteratorAtEnd(of.llof, si.llsi) == False {
- let mut name_buf = ptr::null();
- let name_len = llvm::LLVMRustGetSectionName(si.llsi, &mut name_buf);
- let name = slice::from_raw_parts(name_buf as *const u8, name_len as usize).to_vec();
- let name = String::from_utf8(name).unwrap();
- debug!("get_metadata_section: name {}", name);
- if read_meta_section_name(target) == name {
- let cbuf = llvm::LLVMGetSectionContents(si.llsi);
- let csz = llvm::LLVMGetSectionSize(si.llsi) as usize;
- let cvbuf: *const u8 = cbuf as *const u8;
- let vlen = METADATA_HEADER.len();
- debug!("checking {} bytes of metadata-version stamp", vlen);
- let minsz = cmp::min(vlen, csz);
- let buf0 = slice::from_raw_parts(cvbuf, minsz);
- let version_ok = buf0 == METADATA_HEADER;
- if !version_ok {
- return Err((format!("incompatible metadata version found: '{}'",
- filename.display())));
- }
- let cvbuf1 = cvbuf.offset(vlen as isize);
- debug!("inflating {} bytes of compressed metadata", csz - vlen);
- let bytes = slice::from_raw_parts(cvbuf1, csz - vlen);
- match flate::inflate_bytes(bytes) {
- Ok(inflated) => {
- let blob = MetadataBlob::Inflated(inflated);
- verify_decompressed_encoding_version(&blob, filename)?;
- return Ok(blob);
- }
- Err(_) => {}
+ // Header is okay -> inflate the actual metadata
+ let compressed_bytes = &buf[header_len..];
+ debug!("inflating {} bytes of compressed metadata", compressed_bytes.len());
+ match flate::inflate_bytes(compressed_bytes) {
+ Ok(inflated) => {
+ let buf = unsafe { OwningRef::new_assert_stable_address(inflated) };
+ buf.map_owner_box().erase_owner()
+ }
+ Err(_) => {
+ return Err(format!("failed to decompress metadata: {}", filename.display()));
}
}
- llvm::LLVMMoveToNextSection(si.llsi);
}
- Err(format!("metadata not found: '{}'", filename.display()))
- }
-}
-
-pub fn meta_section_name(target: &Target) -> &'static str {
- // Historical note:
- //
- // When using link.exe it was seen that the section name `.note.rustc`
- // was getting shortened to `.note.ru`, and according to the PE and COFF
- // specification:
- //
- // > Executable images do not use a string table and do not support
- // > section names longer than 8 characters
- //
- // https://msdn.microsoft.com/en-us/library/windows/hardware/gg463119.aspx
- //
- // As a result, we choose a slightly shorter name! As to why
- // `.note.rustc` works on MinGW, that's another good question...
-
- if target.options.is_like_osx {
- "__DATA,.rustc"
+ CrateFlavor::Rmeta => {
+ let mut file = File::open(filename).map_err(|_|
+ format!("could not open file: '{}'", filename.display()))?;
+ let mut buf = vec![];
+ file.read_to_end(&mut buf).map_err(|_|
+ format!("failed to read rmeta metadata: '{}'", filename.display()))?;
+ OwningRef::new(buf).map_owner_box().erase_owner()
+ }
+ };
+ let blob = MetadataBlob(raw_bytes);
+ if blob.is_compatible() {
+ Ok(blob)
} else {
- ".rustc"
+ Err(format!("incompatible metadata version found: '{}'", filename.display()))
}
}
-pub fn read_meta_section_name(_target: &Target) -> &'static str {
- ".rustc"
-}
-
// A diagnostic function for dumping crate metadata to an output stream
-pub fn list_file_metadata(target: &Target, path: &Path, out: &mut io::Write) -> io::Result<()> {
+pub fn list_file_metadata(target: &Target,
+ path: &Path,
+ loader: &MetadataLoader,
+ out: &mut io::Write)
+ -> io::Result<()> {
let filename = path.file_name().unwrap().to_str().unwrap();
let flavor = if filename.ends_with(".rlib") {
CrateFlavor::Rlib
} else {
CrateFlavor::Dylib
};
- match get_metadata_section(target, flavor, path) {
+ match get_metadata_section(target, flavor, path, loader) {
Ok(metadata) => metadata.list_crate_metadata(out),
Err(msg) => write!(out, "{}\n", msg),
}
}
}
+impl<'a, 'tcx, T> HashStable<StableHashingContext<'a, 'tcx>> for Tracked<T>
+ where T: HashStable<StableHashingContext<'a, 'tcx>>
+{
+ fn hash_stable<W: StableHasherResult>(&self,
+ hcx: &mut StableHashingContext<'a, 'tcx>,
+ hasher: &mut StableHasher<W>) {
+ let Tracked {
+ ref state
+ } = *self;
+
+ state.hash_stable(hcx, hasher);
+ }
+}
+
#[derive(RustcEncodable, RustcDecodable)]
pub struct CrateRoot {
MirSource::Promoted(_, i) => write!(w, "{:?} in", i)?
}
- write!(w, " {}", tcx.node_path_str(src.item_id()))?;
+ item_path::with_forced_impl_filename_line(|| { // see notes on #41697 elsewhere
+ write!(w, " {}", tcx.node_path_str(src.item_id()))
+ })?;
if let MirSource::Fn(_) = src {
write!(w, "(")?;
}
self.syntax_exts.push((name, match extension {
NormalTT(ext, _, allow_internal_unstable) => {
- NormalTT(ext, Some(self.krate_span), allow_internal_unstable)
+ let nid = ast::CRATE_NODE_ID;
+ NormalTT(ext, Some((nid, self.krate_span)), allow_internal_unstable)
}
IdentTT(ext, _, allow_internal_unstable) => {
IdentTT(ext, Some(self.krate_span), allow_internal_unstable)
pub whitelisted_legacy_custom_derives: Vec<Name>,
pub found_unresolved_macro: bool,
+ // List of crate local macros that we need to warn about as being unused.
+ // Right now this only includes macro_rules! macros.
+ unused_macros: FxHashSet<DefId>,
+
// Maps the `Mark` of an expansion to its containing module or block.
invocations: FxHashMap<Mark, &'a InvocationData<'a>>,
potentially_unused_imports: Vec::new(),
struct_constructors: DefIdMap(),
found_unresolved_macro: false,
+ unused_macros: FxHashSet(),
}
}
use rustc::hir::def_id::{DefId, BUILTIN_MACROS_CRATE, CRATE_DEF_INDEX, DefIndex};
use rustc::hir::def::{Def, Export};
use rustc::hir::map::{self, DefCollector};
-use rustc::ty;
+use rustc::{ty, lint};
use syntax::ast::{self, Name, Ident};
use syntax::attr::{self, HasAttrs};
use syntax::errors::DiagnosticBuilder;
},
};
self.macro_defs.insert(invoc.expansion_data.mark, def.def_id());
+ self.unused_macros.remove(&def.def_id());
Ok(Some(self.get_macro(def)))
}
fn resolve_macro(&mut self, scope: Mark, path: &ast::Path, kind: MacroKind, force: bool)
-> Result<Rc<SyntaxExtension>, Determinacy> {
- self.resolve_macro_to_def(scope, path, kind, force).map(|def| self.get_macro(def))
+ self.resolve_macro_to_def(scope, path, kind, force).map(|def| {
+ self.unused_macros.remove(&def.def_id());
+ self.get_macro(def)
+ })
+ }
+
+ fn check_unused_macros(&self) {
+ for did in self.unused_macros.iter() {
+ let id_span = match *self.macro_map[did] {
+ SyntaxExtension::NormalTT(_, isp, _) => isp,
+ _ => None,
+ };
+ if let Some((id, span)) = id_span {
+ let lint = lint::builtin::UNUSED_MACROS;
+ let msg = "unused macro definition".to_string();
+ self.session.add_lint(lint, id, span, msg);
+ } else {
+ bug!("attempted to create unused macro error, but span not available");
+ }
+ }
}
}
if attr::contains_name(&item.attrs, "macro_export") {
let def = Def::Macro(def_id, MacroKind::Bang);
self.macro_exports.push(Export { name: ident.name, def: def, span: item.span });
+ } else {
+ self.unused_macros.insert(def_id);
}
}
[dependencies]
flate = { path = "../libflate" }
log = "0.3"
+owning_ref = "0.3.3"
rustc = { path = "../librustc" }
rustc_back = { path = "../librustc_back" }
rustc_bitflags = { path = "../librustc_bitflags" }
use libc;
use llvm::archive_ro::{ArchiveRO, Child};
use llvm::{self, ArchiveKind};
+use metadata::METADATA_FILENAME;
use rustc::session::Session;
pub struct ArchiveConfig<'a> {
// Ignoring all bytecode files, no matter of
// name
let bc_ext = ".bytecode.deflate";
- let metadata_filename =
- self.config.sess.cstore.metadata_filename().to_owned();
self.add_archive(rlib, move |fname: &str| {
- if fname.ends_with(bc_ext) || fname == metadata_filename {
+ if fname.ends_with(bc_ext) || fname == METADATA_FILENAME {
return true
}
use super::rpath::RPathConfig;
use super::rpath;
use super::msvc;
+use metadata::METADATA_FILENAME;
use session::config;
use session::config::NoDebugInfo;
use session::config::{OutputFilenames, Input, OutputType};
// contain the metadata in a separate file. We use a temp directory
// here so concurrent builds in the same directory don't try to use
// the same filename for metadata (stomping over one another)
- let metadata = tmpdir.join(sess.cstore.metadata_filename());
+ let metadata = tmpdir.join(METADATA_FILENAME);
emit_metadata(sess, trans, &metadata);
ab.add_file(&metadata);
if let Some(args) = sess.target.target.options.pre_link_args.get(&flavor) {
cmd.args(args);
}
+ if let Some(ref args) = sess.opts.debugging_opts.pre_link_args {
+ cmd.args(args);
+ }
+ cmd.args(&sess.opts.debugging_opts.pre_link_arg);
let pre_link_objects = if crate_type == config::CrateTypeExecutable {
&sess.target.target.options.pre_link_objects_exe
archive.update_symbols();
for f in archive.src_files() {
- if f.ends_with("bytecode.deflate") ||
- f == sess.cstore.metadata_filename() {
+ if f.ends_with("bytecode.deflate") || f == METADATA_FILENAME {
archive.remove_file(&f);
continue
}
let mut any_objects = false;
for f in archive.src_files() {
- if f.ends_with("bytecode.deflate") ||
- f == sess.cstore.metadata_filename() {
+ if f.ends_with("bytecode.deflate") || f == METADATA_FILENAME {
archive.remove_file(&f);
continue
}
use back::symbol_export::{self, ExportedSymbols};
use llvm::{ContextRef, Linkage, ModuleRef, ValueRef, Vector, get_param};
use llvm;
+use metadata;
use rustc::hir::def_id::LOCAL_CRATE;
use middle::lang_items::StartFnLangItem;
use middle::cstore::EncodedMetadata;
};
unsafe {
llvm::LLVMSetInitializer(llglobal, llconst);
- let section_name =
- tcx.sess.cstore.metadata_section_name(&tcx.sess.target.target);
+ let section_name = metadata::metadata_section_name(&tcx.sess.target.target);
let name = CString::new(section_name).unwrap();
llvm::LLVMSetSection(llglobal, name.as_ptr());
extern crate flate;
extern crate libc;
+extern crate owning_ref;
#[macro_use] extern crate rustc;
extern crate rustc_back;
extern crate rustc_data_structures;
pub use base::trans_crate;
pub use back::symbol_names::provide;
+pub use metadata::LlvmMetadataLoader;
+pub use llvm_util::{init, target_features, print_version, print_passes, print, enable_llvm_debug};
+
pub mod back {
pub use rustc::hir::svh;
mod declare;
mod glue;
mod intrinsic;
+mod llvm_util;
mod machine;
+mod metadata;
mod meth;
mod mir;
mod monomorphize;
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use syntax_pos::symbol::Symbol;
+use back::write::create_target_machine;
+use llvm;
+use rustc::session::Session;
+use rustc::session::config::PrintRequest;
+use libc::{c_int, c_char};
+use std::ffi::CString;
+
+use std::sync::atomic::{AtomicBool, Ordering};
+use std::sync::Once;
+
+pub fn init(sess: &Session) {
+ unsafe {
+ // Before we touch LLVM, make sure that multithreading is enabled.
+ static POISONED: AtomicBool = AtomicBool::new(false);
+ static INIT: Once = Once::new();
+ INIT.call_once(|| {
+ if llvm::LLVMStartMultithreaded() != 1 {
+ // use an extra bool to make sure that all future usage of LLVM
+ // cannot proceed despite the Once not running more than once.
+ POISONED.store(true, Ordering::SeqCst);
+ }
+
+ configure_llvm(sess);
+ });
+
+ if POISONED.load(Ordering::SeqCst) {
+ bug!("couldn't enable multi-threaded LLVM");
+ }
+ }
+}
+
+unsafe fn configure_llvm(sess: &Session) {
+ let mut llvm_c_strs = Vec::new();
+ let mut llvm_args = Vec::new();
+
+ {
+ let mut add = |arg: &str| {
+ let s = CString::new(arg).unwrap();
+ llvm_args.push(s.as_ptr());
+ llvm_c_strs.push(s);
+ };
+ add("rustc"); // fake program name
+ if sess.time_llvm_passes() { add("-time-passes"); }
+ if sess.print_llvm_passes() { add("-debug-pass=Structure"); }
+
+ for arg in &sess.opts.cg.llvm_args {
+ add(&(*arg));
+ }
+ }
+
+ llvm::LLVMInitializePasses();
+
+ llvm::initialize_available_targets();
+
+ llvm::LLVMRustSetLLVMOptions(llvm_args.len() as c_int,
+ llvm_args.as_ptr());
+}
+
+// WARNING: the features must be known to LLVM or the feature
+// detection code will walk past the end of the feature array,
+// leading to crashes.
+
+const ARM_WHITELIST: &'static [&'static str] = &["neon\0", "vfp2\0", "vfp3\0", "vfp4\0"];
+
+const X86_WHITELIST: &'static [&'static str] = &["avx\0", "avx2\0", "bmi\0", "bmi2\0", "sse\0",
+ "sse2\0", "sse3\0", "sse4.1\0", "sse4.2\0",
+ "ssse3\0", "tbm\0", "lzcnt\0", "popcnt\0",
+ "sse4a\0", "rdrnd\0", "rdseed\0", "fma\0"];
+
+const HEXAGON_WHITELIST: &'static [&'static str] = &["hvx\0", "hvx-double\0"];
+
+pub fn target_features(sess: &Session) -> Vec<Symbol> {
+ let target_machine = create_target_machine(sess);
+
+ let whitelist = match &*sess.target.target.arch {
+ "arm" => ARM_WHITELIST,
+ "x86" | "x86_64" => X86_WHITELIST,
+ "hexagon" => HEXAGON_WHITELIST,
+ _ => &[],
+ };
+
+ let mut features = Vec::new();
+ for feat in whitelist {
+ assert_eq!(feat.chars().last(), Some('\0'));
+ if unsafe { llvm::LLVMRustHasFeature(target_machine, feat.as_ptr() as *const c_char) } {
+ features.push(Symbol::intern(&feat[..feat.len() - 1]));
+ }
+ }
+ features
+}
+
+pub fn print_version() {
+ unsafe {
+ println!("LLVM version: {}.{}",
+ llvm::LLVMRustVersionMajor(), llvm::LLVMRustVersionMinor());
+ }
+}
+
+pub fn print_passes() {
+ unsafe { llvm::LLVMRustPrintPasses(); }
+}
+
+pub fn print(req: PrintRequest, sess: &Session) {
+ let tm = create_target_machine(sess);
+ unsafe {
+ match req {
+ PrintRequest::TargetCPUs => llvm::LLVMRustPrintTargetCPUs(tm),
+ PrintRequest::TargetFeatures => llvm::LLVMRustPrintTargetFeatures(tm),
+ _ => bug!("rustc_trans can't handle print request: {:?}", req),
+ }
+ }
+}
+
+pub fn enable_llvm_debug() {
+ unsafe { llvm::LLVMRustSetDebug(1); }
+}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use rustc::util::common;
+use rustc::middle::cstore::MetadataLoader;
+use rustc_back::target::Target;
+use llvm;
+use llvm::{False, ObjectFile, mk_section_iter};
+use llvm::archive_ro::ArchiveRO;
+
+use owning_ref::{ErasedBoxRef, OwningRef};
+use std::path::Path;
+use std::ptr;
+use std::slice;
+
+pub const METADATA_FILENAME: &str = "rust.metadata.bin";
+
+pub struct LlvmMetadataLoader;
+
+impl MetadataLoader for LlvmMetadataLoader {
+ fn get_rlib_metadata(&self, _: &Target, filename: &Path) -> Result<ErasedBoxRef<[u8]>, String> {
+ // Use ArchiveRO for speed here, it's backed by LLVM and uses mmap
+ // internally to read the file. We also avoid even using a memcpy by
+ // just keeping the archive along while the metadata is in use.
+ let archive = ArchiveRO::open(filename)
+ .map(|ar| OwningRef::new(box ar))
+ .ok_or_else(|| {
+ debug!("llvm didn't like `{}`", filename.display());
+ format!("failed to read rlib metadata: '{}'", filename.display())
+ })?;
+ let buf: OwningRef<_, [u8]> = archive
+ .try_map(|ar| {
+ ar.iter()
+ .filter_map(|s| s.ok())
+ .find(|sect| sect.name() == Some(METADATA_FILENAME))
+ .map(|s| s.data())
+ .ok_or_else(|| {
+ debug!("didn't find '{}' in the archive", METADATA_FILENAME);
+ format!("failed to read rlib metadata: '{}'",
+ filename.display())
+ })
+ })?;
+ Ok(buf.erase_owner())
+ }
+
+ fn get_dylib_metadata(&self,
+ target: &Target,
+ filename: &Path)
+ -> Result<ErasedBoxRef<[u8]>, String> {
+ unsafe {
+ let buf = common::path2cstr(filename);
+ let mb = llvm::LLVMRustCreateMemoryBufferWithContentsOfFile(buf.as_ptr());
+ if mb as isize == 0 {
+ return Err(format!("error reading library: '{}'", filename.display()));
+ }
+ let of = ObjectFile::new(mb)
+ .map(|of| OwningRef::new(box of))
+ .ok_or_else(|| format!("provided path not an object file: '{}'",
+ filename.display()))?;
+ let buf = of.try_map(|of| search_meta_section(of, target, filename))?;
+ Ok(buf.erase_owner())
+ }
+ }
+}
+
+fn search_meta_section<'a>(of: &'a ObjectFile,
+ target: &Target,
+ filename: &Path)
+ -> Result<&'a [u8], String> {
+ unsafe {
+ let si = mk_section_iter(of.llof);
+ while llvm::LLVMIsSectionIteratorAtEnd(of.llof, si.llsi) == False {
+ let mut name_buf = ptr::null();
+ let name_len = llvm::LLVMRustGetSectionName(si.llsi, &mut name_buf);
+ let name = slice::from_raw_parts(name_buf as *const u8, name_len as usize).to_vec();
+ let name = String::from_utf8(name).unwrap();
+ debug!("get_metadata_section: name {}", name);
+ if read_metadata_section_name(target) == name {
+ let cbuf = llvm::LLVMGetSectionContents(si.llsi);
+ let csz = llvm::LLVMGetSectionSize(si.llsi) as usize;
+ // The buffer is valid while the object file is around
+ let buf: &'a [u8] = slice::from_raw_parts(cbuf as *const u8, csz);
+ return Ok(buf);
+ }
+ llvm::LLVMMoveToNextSection(si.llsi);
+ }
+ }
+ Err(format!("metadata not found: '{}'", filename.display()))
+}
+
+pub fn metadata_section_name(target: &Target) -> &'static str {
+ // Historical note:
+ //
+ // When using link.exe it was seen that the section name `.note.rustc`
+ // was getting shortened to `.note.ru`, and according to the PE and COFF
+ // specification:
+ //
+ // > Executable images do not use a string table and do not support
+ // > section names longer than 8 characters
+ //
+ // https://msdn.microsoft.com/en-us/library/windows/hardware/gg463119.aspx
+ //
+ // As a result, we choose a slightly shorter name! As to why
+ // `.note.rustc` works on MinGW, that's another good question...
+
+ if target.options.is_like_osx {
+ "__DATA,.rustc"
+ } else {
+ ".rustc"
+ }
+}
+
+fn read_metadata_section_name(_target: &Target) -> &'static str {
+ ".rustc"
+}
let bound_list = unsatisfied_predicates.iter()
.map(|p| format!("`{} : {}`", p.self_ty(), p))
.collect::<Vec<_>>()
- .join(", ");
+ .join("\n");
err.note(&format!("the method `{}` exists but the following trait bounds \
- were not satisfied: {}",
+ were not satisfied:\n{}",
item_name,
bound_list));
}
impl_id: DefId,
impl_item: &hir::ImplItem)
{
- let ancestors = trait_def.ancestors(impl_id);
+ let ancestors = trait_def.ancestors(tcx, impl_id);
let kind = match impl_item.node {
hir::ImplItemKind::Const(..) => ty::AssociatedKind::Const,
let mut invalidated_items = Vec::new();
let associated_type_overridden = overridden_associated_type.is_some();
for trait_item in tcx.associated_items(impl_trait_ref.def_id) {
- let is_implemented = trait_def.ancestors(impl_id)
+ let is_implemented = trait_def.ancestors(tcx, impl_id)
.defs(tcx, trait_item.name, trait_item.kind)
.next()
.map(|node_item| !node_item.node.is_from_trait())
return err_info;
}
+ // Here we are considering a case of converting
+ // `S<P0...Pn>` to S<Q0...Qn>`. As an example, let's imagine a struct `Foo<T, U>`,
+ // which acts like a pointer to `U`, but carries along some extra data of type `T`:
+ //
+ // struct Foo<T, U> {
+ // extra: T,
+ // ptr: *mut U,
+ // }
+ //
+ // We might have an impl that allows (e.g.) `Foo<T, [i32; 3]>` to be unsized
+ // to `Foo<T, [i32]>`. That impl would look like:
+ //
+ // impl<T, U: Unsize<V>, V> CoerceUnsized<Foo<T, V>> for Foo<T, U> {}
+ //
+ // Here `U = [i32; 3]` and `V = [i32]`. At runtime,
+ // when this coercion occurs, we would be changing the
+ // field `ptr` from a thin pointer of type `*mut [i32;
+ // 3]` to a fat pointer of type `*mut [i32]` (with
+ // extra data `3`). **The purpose of this check is to
+ // make sure that we know how to do this conversion.**
+ //
+ // To check if this impl is legal, we would walk down
+ // the fields of `Foo` and consider their types with
+ // both substitutes. We are looking to find that
+ // exactly one (non-phantom) field has changed its
+ // type, which we will expect to be the pointer that
+ // is becoming fat (we could probably generalize this
+ // to mutiple thin pointers of the same type becoming
+ // fat, but we don't). In this case:
+ //
+ // - `extra` has type `T` before and type `T` after
+ // - `ptr` has type `*mut U` before and type `*mut V` after
+ //
+ // Since just one field changed, we would then check
+ // that `*mut U: CoerceUnsized<*mut V>` is implemented
+ // (in other words, that we know how to do this
+ // conversion). This will work out because `U:
+ // Unsize<V>`, and we have a builtin rule that `*mut
+ // U` can be coerced to `*mut V` if `U: Unsize<V>`.
let fields = &def_a.struct_variant().fields;
let diff_fields = fields.iter()
.enumerate()
return None;
}
- // Ignore fields that aren't significantly changed
- if let Ok(ok) = infcx.sub_types(false, &cause, b, a) {
+ // Ignore fields that aren't changed; it may
+ // be that we could get away with subtyping or
+ // something more accepting, but we use
+ // equality because we want to be able to
+ // perform this check without computing
+ // variance where possible. (This is because
+ // we may have to evaluate constraint
+ // expressions in the course of execution.)
+ // See e.g. #41936.
+ if let Ok(ok) = infcx.eq_types(false, &cause, b, a) {
if ok.obligations.is_empty() {
return None;
}
}
enforce_trait_manually_implementable(tcx, impl_def_id, trait_ref.def_id);
- let trait_def = tcx.trait_def(trait_ref.def_id);
- trait_def.record_local_impl(tcx, impl_def_id, trait_ref);
}
}
fn coherent_trait<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
(_, def_id): (CrateNum, DefId)) {
- tcx.populate_implementations_for_trait_if_necessary(def_id);
-
let impls = tcx.hir.trait_impls(def_id);
for &impl_id in impls {
check_impl(tcx, impl_id);
let _task =
tcx.dep_graph.in_task(DepNode::CoherenceOverlapCheck(trait_def_id));
- let def = tcx.trait_def(trait_def_id);
-
- // attempt to insert into the specialization graph
- let insert_result = def.add_impl_for_specialization(tcx, impl_def_id);
-
- // insertion failed due to overlap
- if let Err(overlap) = insert_result {
- let mut err = struct_span_err!(tcx.sess,
- tcx.span_of_impl(impl_def_id).unwrap(),
- E0119,
- "conflicting implementations of trait `{}`{}:",
- overlap.trait_desc,
- overlap.self_desc.clone().map_or(String::new(),
- |ty| {
- format!(" for type `{}`", ty)
- }));
-
- match tcx.span_of_impl(overlap.with_impl) {
- Ok(span) => {
- err.span_label(span, "first implementation here");
- err.span_label(tcx.span_of_impl(impl_def_id).unwrap(),
- format!("conflicting implementation{}",
- overlap.self_desc
- .map_or(String::new(),
- |ty| format!(" for `{}`", ty))));
- }
- Err(cname) => {
- err.note(&format!("conflicting implementation in crate `{}`", cname));
- }
- }
+ // Trigger building the specialization graph for the trait of this impl.
+ // This will detect any overlap errors.
+ tcx.specialization_graph_of(trait_def_id);
- err.emit();
- }
// check for overlap with the automatic `impl Trait for Trait`
if let ty::TyDynamic(ref data, ..) = trait_ref.self_ty().sty {
}
let def_path_hash = tcx.def_path_hash(def_id);
- let def = ty::TraitDef::new(def_id, unsafety, paren_sugar, def_path_hash);
-
- if tcx.hir.trait_is_auto(def_id) {
- def.record_has_default_impl();
- }
-
+ let has_default_impl = tcx.hir.trait_is_auto(def_id);
+ let def = ty::TraitDef::new(def_id,
+ unsafety,
+ paren_sugar,
+ has_default_impl,
+ def_path_hash);
tcx.alloc_trait_def(def)
}
```
"##,
-E0119: r##"
-There are conflicting trait implementations for the same type.
-Example of erroneous code:
-
-```compile_fail,E0119
-trait MyTrait {
- fn get(&self) -> usize;
-}
-
-impl<T> MyTrait for T {
- fn get(&self) -> usize { 0 }
-}
-
-struct Foo {
- value: usize
-}
-
-impl MyTrait for Foo { // error: conflicting implementations of trait
- // `MyTrait` for type `Foo`
- fn get(&self) -> usize { self.value }
-}
-```
-
-When looking for the implementation for the trait, the compiler finds
-both the `impl<T> MyTrait for T` where T is all types and the `impl
-MyTrait for Foo`. Since a trait cannot be implemented multiple times,
-this is an error. So, when you write:
-
-```
-trait MyTrait {
- fn get(&self) -> usize;
-}
-
-impl<T> MyTrait for T {
- fn get(&self) -> usize { 0 }
-}
-```
-
-This makes the trait implemented on all types in the scope. So if you
-try to implement it on another one after that, the implementations will
-conflict. Example:
-
-```
-trait MyTrait {
- fn get(&self) -> usize;
-}
-
-impl<T> MyTrait for T {
- fn get(&self) -> usize { 0 }
-}
-
-struct Foo;
-
-fn main() {
- let f = Foo;
-
- f.get(); // the trait is implemented so we can use it
-}
-```
-"##,
-
E0120: r##"
An attempt was made to implement Drop on a trait, which is not allowed: only
structs and enums can implement Drop. An example causing this error:
use rustc::hir::map as hir_map;
use rustc::lint;
use rustc::util::nodemap::FxHashMap;
+use rustc_trans;
use rustc_trans::back::link;
use rustc_resolve as resolve;
use rustc_metadata::cstore::CStore;
let dep_graph = DepGraph::new(false);
let _ignore = dep_graph.in_ignore();
- let cstore = Rc::new(CStore::new(&dep_graph));
+ let cstore = Rc::new(CStore::new(&dep_graph, box rustc_trans::LlvmMetadataLoader));
let mut sess = session::build_session_(
sessopts, &dep_graph, cpath, diagnostic_handler, codemap, cstore.clone()
);
+ rustc_trans::init(&sess);
rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess));
let mut cfg = config::build_configuration(&sess, config::parse_cfgspecs(cfgs));
let quot = if f.alternate() { "\"" } else { """ };
match self.0 {
Abi::Rust => Ok(()),
- Abi::C => write!(f, "extern "),
abi => write!(f, "extern {0}{1}{0} ", quot, abi.name()),
}
}
top: 0;
}
+h3 > .collapse-toggle, h4 > .collapse-toggle {
+ font-size: 0.8em;
+ top: 5px;
+}
+
.toggle-wrapper > .collapse-toggle {
left: -24px;
margin-top: 0px;
use rustc_driver::driver::phase_2_configure_and_expand;
use rustc_metadata::cstore::CStore;
use rustc_resolve::MakeGlobMap;
+use rustc_trans;
use rustc_trans::back::link;
use syntax::ast;
use syntax::codemap::CodeMap;
let dep_graph = DepGraph::new(false);
let _ignore = dep_graph.in_ignore();
- let cstore = Rc::new(CStore::new(&dep_graph));
+ let cstore = Rc::new(CStore::new(&dep_graph, box rustc_trans::LlvmMetadataLoader));
let mut sess = session::build_session_(
sessopts, &dep_graph, Some(input_path.clone()), handler, codemap.clone(), cstore.clone(),
);
+ rustc_trans::init(&sess);
rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess));
sess.parse_sess.config =
config::build_configuration(&sess, config::parse_cfgspecs(cfgs.clone()));
let diagnostic_handler = errors::Handler::with_emitter(true, false, box emitter);
let dep_graph = DepGraph::new(false);
- let cstore = Rc::new(CStore::new(&dep_graph));
+ let cstore = Rc::new(CStore::new(&dep_graph, box rustc_trans::LlvmMetadataLoader));
let mut sess = session::build_session_(
sessopts, &dep_graph, None, diagnostic_handler, codemap, cstore.clone(),
);
+ rustc_trans::init(&sess);
rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess));
let outdir = Mutex::new(TempDir::new("rustdoctest").ok().expect("rustdoc needs a tempdir"));
}
/// Describes the result of a process after it has terminated.
+///
+/// This `struct` is used to represent the exit status of a child process.
+/// Child processes are created via the [`Command`] struct and their exit
+/// status is exposed through the [`status`] method.
+///
+/// [`Command`]: struct.Command.html
+/// [`status`]: struct.Command.html#method.status
#[derive(PartialEq, Eq, Clone, Copy, Debug)]
#[stable(feature = "process", since = "1.0.0")]
pub struct ExitStatus(imp::ExitStatus);
/// On Unix, this will return `None` if the process was terminated
/// by a signal; `std::os::unix` provides an extension trait for
/// extracting the signal and other details from the `ExitStatus`.
+ ///
+ /// # Examples
+ ///
+ /// ```no_run
+ /// use std::process::Command;
+ ///
+ /// let status = Command::new("mkdir")
+ /// .arg("projects")
+ /// .status()
+ /// .expect("failed to execute mkdir");
+ ///
+ /// match status.code() {
+ /// Some(code) => println!("Exited with status code: {}", code),
+ /// None => println!("Process terminated by signal")
+ /// }
+ /// ```
#[stable(feature = "process", since = "1.0.0")]
pub fn code(&self) -> Option<i32> {
self.0.code()
/// Creates an `OsString` from a potentially ill-formed UTF-16 slice of
/// 16-bit code units.
///
- /// This is lossless: calling `.encode_wide()` on the resulting string
+ /// This is lossless: calling [`encode_wide`] on the resulting string
/// will always return the original code units.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::ffi::OsString;
+ /// use std::os::windows::prelude::*;
+ ///
+ /// // UTF-16 encoding for "Unicode".
+ /// let source = [0x0055, 0x006E, 0x0069, 0x0063, 0x006F, 0x0064, 0x0065];
+ ///
+ /// let string = OsString::from_wide(&source[..]);
+ /// ```
+ ///
+ /// [`encode_wide`]: ./trait.OsStrExt.html#tymethod.encode_wide
#[stable(feature = "rust1", since = "1.0.0")]
fn from_wide(wide: &[u16]) -> Self;
}
/// Windows-specific extensions to `OsStr`.
#[stable(feature = "rust1", since = "1.0.0")]
pub trait OsStrExt {
- /// Re-encodes an `OsStr` as a wide character sequence,
- /// i.e. potentially ill-formed UTF-16.
+ /// Re-encodes an `OsStr` as a wide character sequence, i.e. potentially
+ /// ill-formed UTF-16.
+ ///
+ /// This is lossless: calling [`OsString::from_wide`] and then
+ /// `encode_wide` on the result will yield the original code units.
+ /// Note that the encoding does not add a final null terminator.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::ffi::OsString;
+ /// use std::os::windows::prelude::*;
+ ///
+ /// // UTF-16 encoding for "Unicode".
+ /// let source = [0x0055, 0x006E, 0x0069, 0x0063, 0x006F, 0x0064, 0x0065];
+ ///
+ /// let string = OsString::from_wide(&source[..]);
+ ///
+ /// let result: Vec<u16> = string.encode_wide().collect();
+ /// assert_eq!(&source[..], &result[..]);
+ /// ```
///
- /// This is lossless. Note that the encoding does not include a final
- /// null.
+ /// [`OsString::from_wide`]: ./trait.OsStringExt.html#tymethod.from_wide
#[stable(feature = "rust1", since = "1.0.0")]
fn encode_wide(&self) -> EncodeWide;
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-//! Windows-specific extensions for the primitives in `std::fs`
+//! Windows-specific extensions for the primitives in the `std::fs` module.
#![stable(feature = "rust1", since = "1.0.0")]
use sys;
use sys_common::{AsInnerMut, AsInner};
-/// Windows-specific extensions to `File`
+/// Windows-specific extensions to [`File`].
+///
+/// [`File`]: ../../../fs/struct.File.html
#[stable(feature = "file_offset", since = "1.15.0")]
pub trait FileExt {
/// Seeks to a given position and reads a number of bytes.
/// Note that similar to `File::read`, it is not an error to return with a
/// short read. When returning from such a short read, the file pointer is
/// still updated.
+ ///
+ /// # Examples
+ ///
+ /// ```no_run
+ /// use std::io;
+ /// use std::fs::File;
+ /// use std::os::windows::prelude::*;
+ ///
+ /// # fn foo() -> io::Result<()> {
+ /// let mut file = File::open("foo.txt")?;
+ /// let mut buffer = [0; 10];
+ ///
+ /// // Read 10 bytes, starting 72 bytes from the
+ /// // start of the file.
+ /// file.seek_read(&mut buffer[..], 72)?;
+ /// # Ok(())
+ /// # }
+ /// ```
#[stable(feature = "file_offset", since = "1.15.0")]
fn seek_read(&self, buf: &mut [u8], offset: u64) -> io::Result<usize>;
/// Note that similar to `File::write`, it is not an error to return a
/// short write. When returning from such a short write, the file pointer
/// is still updated.
+ ///
+ /// # Examples
+ ///
+ /// ```no_run
+ /// use std::fs::File;
+ /// use std::os::windows::prelude::*;
+ ///
+ /// # fn foo() -> std::io::Result<()> {
+ /// let mut buffer = File::create("foo.txt")?;
+ ///
+ /// // Write a byte string starting 72 bytes from
+ /// // the start of the file.
+ /// buffer.seek_write(b"some bytes", 72)?;
+ /// # Ok(())
+ /// # }
+ /// ```
#[stable(feature = "file_offset", since = "1.15.0")]
fn seek_write(&self, buf: &[u8], offset: u64) -> io::Result<usize>;
}
}
}
-/// Windows-specific extensions to `OpenOptions`
+/// Windows-specific extensions to [`OpenOptions`].
+///
+/// [`OpenOptions`]: ../../../fs/struct.OpenOptions.html
#[stable(feature = "open_options_ext", since = "1.10.0")]
pub trait OpenOptionsExt {
- /// Overrides the `dwDesiredAccess` argument to the call to `CreateFile`
+ /// Overrides the `dwDesiredAccess` argument to the call to [`CreateFile`]
/// with the specified value.
///
/// This will override the `read`, `write`, and `append` flags on the
/// `OpenOptions` structure. This method provides fine-grained control over
/// the permissions to read, write and append data, attributes (like hidden
- /// and system) and extended attributes.
+ /// and system), and extended attributes.
///
/// # Examples
///
/// ```no_run
/// use std::fs::OpenOptions;
- /// use std::os::windows::fs::OpenOptionsExt;
+ /// use std::os::windows::prelude::*;
///
/// // Open without read and write permission, for example if you only need
- /// // to call `stat()` on the file
+ /// // to call `stat` on the file
/// let file = OpenOptions::new().access_mode(0).open("foo.txt");
/// ```
+ ///
+ /// [`CreateFile`]: https://msdn.microsoft.com/en-us/library/windows/desktop/aa363858.aspx
#[stable(feature = "open_options_ext", since = "1.10.0")]
fn access_mode(&mut self, access: u32) -> &mut Self;
- /// Overrides the `dwShareMode` argument to the call to `CreateFile` with
+ /// Overrides the `dwShareMode` argument to the call to [`CreateFile`] with
/// the specified value.
///
/// By default `share_mode` is set to
- /// `FILE_SHARE_READ | FILE_SHARE_WRITE | FILE_SHARE_DELETE`. Specifying
- /// less permissions denies others to read from, write to and/or delete the
- /// file while it is open.
+ /// `FILE_SHARE_READ | FILE_SHARE_WRITE | FILE_SHARE_DELETE`. This allows
+ /// other processes to to read, write, and delete/rename the same file
+ /// while it is open. Removing any of the flags will prevent other
+ /// processes from performing the corresponding operation until the file
+ /// handle is closed.
///
/// # Examples
///
/// ```no_run
/// use std::fs::OpenOptions;
- /// use std::os::windows::fs::OpenOptionsExt;
+ /// use std::os::windows::prelude::*;
///
/// // Do not allow others to read or modify this file while we have it open
- /// // for writing
- /// let file = OpenOptions::new().write(true)
- /// .share_mode(0)
- /// .open("foo.txt");
+ /// // for writing.
+ /// let file = OpenOptions::new()
+ /// .write(true)
+ /// .share_mode(0)
+ /// .open("foo.txt");
/// ```
+ ///
+ /// [`CreateFile`]: https://msdn.microsoft.com/en-us/library/windows/desktop/aa363858.aspx
#[stable(feature = "open_options_ext", since = "1.10.0")]
fn share_mode(&mut self, val: u32) -> &mut Self;
/// Sets extra flags for the `dwFileFlags` argument to the call to
- /// `CreateFile2` (or combines it with `attributes` and `security_qos_flags`
- /// to set the `dwFlagsAndAttributes` for `CreateFile`).
+ /// [`CreateFile2`] to the specified value (or combines it with
+ /// `attributes` and `security_qos_flags` to set the `dwFlagsAndAttributes`
+ /// for [`CreateFile`]).
///
- /// Custom flags can only set flags, not remove flags set by Rusts options.
- /// This options overwrites any previously set custom flags.
+ /// Custom flags can only set flags, not remove flags set by Rust's options.
+ /// This option overwrites any previously set custom flags.
///
/// # Examples
///
- /// ```rust,ignore
+ /// ```ignore
/// extern crate winapi;
+ ///
/// use std::fs::OpenOptions;
- /// use std::os::windows::fs::OpenOptionsExt;
- ///
- /// let mut options = OpenOptions::new();
- /// options.create(true).write(true);
- /// if cfg!(windows) {
- /// options.custom_flags(winapi::FILE_FLAG_DELETE_ON_CLOSE);
- /// }
- /// let file = options.open("foo.txt");
+ /// use std::os::windows::prelude::*;
+ ///
+ /// let file = OpenOptions::new()
+ /// .create(true)
+ /// .write(true)
+ /// .custom_flags(winapi::FILE_FLAG_DELETE_ON_CLOSE)
+ /// .open("foo.txt");
/// ```
+ ///
+ /// [`CreateFile`]: https://msdn.microsoft.com/en-us/library/windows/desktop/aa363858.aspx
+ /// [`CreateFile2`]: https://msdn.microsoft.com/en-us/library/windows/desktop/hh449422.aspx
#[stable(feature = "open_options_ext", since = "1.10.0")]
fn custom_flags(&mut self, flags: u32) -> &mut Self;
- /// Sets the `dwFileAttributes` argument to the call to `CreateFile2` to
+ /// Sets the `dwFileAttributes` argument to the call to [`CreateFile2`] to
/// the specified value (or combines it with `custom_flags` and
/// `security_qos_flags` to set the `dwFlagsAndAttributes` for
- /// `CreateFile`).
+ /// [`CreateFile`]).
///
/// If a _new_ file is created because it does not yet exist and
/// `.create(true)` or `.create_new(true)` are specified, the new file is
///
/// # Examples
///
- /// ```rust,ignore
+ /// ```ignore
/// extern crate winapi;
+ ///
/// use std::fs::OpenOptions;
- /// use std::os::windows::fs::OpenOptionsExt;
+ /// use std::os::windows::prelude::*;
///
- /// let file = OpenOptions::new().write(true).create(true)
- /// .attributes(winapi::FILE_ATTRIBUTE_HIDDEN)
- /// .open("foo.txt");
+ /// let file = OpenOptions::new()
+ /// .write(true)
+ /// .create(true)
+ /// .attributes(winapi::FILE_ATTRIBUTE_HIDDEN)
+ /// .open("foo.txt");
/// ```
+ ///
+ /// [`CreateFile`]: https://msdn.microsoft.com/en-us/library/windows/desktop/aa363858.aspx
+ /// [`CreateFile2`]: https://msdn.microsoft.com/en-us/library/windows/desktop/hh449422.aspx
#[stable(feature = "open_options_ext", since = "1.10.0")]
fn attributes(&mut self, val: u32) -> &mut Self;
- /// Sets the `dwSecurityQosFlags` argument to the call to `CreateFile2` to
+ /// Sets the `dwSecurityQosFlags` argument to the call to [`CreateFile2`] to
/// the specified value (or combines it with `custom_flags` and `attributes`
- /// to set the `dwFlagsAndAttributes` for `CreateFile`).
+ /// to set the `dwFlagsAndAttributes` for [`CreateFile`]).
+ ///
+ /// By default, `security_qos_flags` is set to `SECURITY_ANONYMOUS`. For
+ /// information about possible values, see [Impersonation Levels] on the
+ /// Windows Dev Center site.
+ ///
+ /// # Examples
+ ///
+ /// ```no_run
+ /// use std::fs::OpenOptions;
+ /// use std::os::windows::prelude::*;
+ ///
+ /// let file = OpenOptions::new()
+ /// .write(true)
+ /// .create(true)
+ ///
+ /// // Sets the flag value to `SecurityIdentification`.
+ /// .security_qos_flags(1)
+ ///
+ /// .open("foo.txt");
+ /// ```
+ ///
+ /// [`CreateFile`]: https://msdn.microsoft.com/en-us/library/windows/desktop/aa363858.aspx
+ /// [`CreateFile2`]: https://msdn.microsoft.com/en-us/library/windows/desktop/hh449422.aspx
+ /// [Impersonation Levels]:
+ /// https://msdn.microsoft.com/en-us/library/windows/desktop/aa379572.aspx
#[stable(feature = "open_options_ext", since = "1.10.0")]
fn security_qos_flags(&mut self, flags: u32) -> &mut OpenOptions;
}
}
}
-/// Extension methods for `fs::Metadata` to access the raw fields contained
+/// Extension methods for [`fs::Metadata`] to access the raw fields contained
/// within.
+///
+/// The data members that this trait exposes correspond to the members
+/// of the [`BY_HANDLE_FILE_INFORMATION`] structure.
+///
+/// [`fs::Metadata`]: ../../../fs/struct.Metadata.html
+/// [`BY_HANDLE_FILE_INFORMATION`]:
+/// https://msdn.microsoft.com/en-us/library/windows/desktop/aa363788.aspx
#[stable(feature = "metadata_ext", since = "1.1.0")]
pub trait MetadataExt {
/// Returns the value of the `dwFileAttributes` field of this metadata.
///
/// This field contains the file system attribute information for a file
- /// or directory.
+ /// or directory. For possible values and their descriptions, see
+ /// [File Attribute Constants] in the Windows Dev Center.
+ ///
+ /// # Examples
+ ///
+ /// ```no_run
+ /// use std::io;
+ /// use std::fs;
+ /// use std::os::windows::prelude::*;
+ ///
+ /// # fn foo() -> io::Result<()> {
+ /// let metadata = fs::metadata("foo.txt")?;
+ /// let attributes = metadata.file_attributes();
+ /// # Ok(())
+ /// # }
+ /// ```
+ ///
+ /// [File Attribute Constants]:
+ /// https://msdn.microsoft.com/en-us/library/windows/desktop/gg258117.aspx
#[stable(feature = "metadata_ext", since = "1.1.0")]
fn file_attributes(&self) -> u32;
/// Returns the value of the `ftCreationTime` field of this metadata.
///
- /// The returned 64-bit value represents the number of 100-nanosecond
- /// intervals since January 1, 1601 (UTC).
+ /// The returned 64-bit value is equivalent to a [`FILETIME`] struct,
+ /// which represents the number of 100-nanosecond intervals since
+ /// January 1, 1601 (UTC). The struct is automatically
+ /// converted to a `u64` value, as that is the recommended way
+ /// to use it.
+ ///
+ /// If the underlying filesystem does not support creation time, the
+ /// returned value is 0.
+ ///
+ /// # Examples
+ ///
+ /// ```no_run
+ /// use std::io;
+ /// use std::fs;
+ /// use std::os::windows::prelude::*;
+ ///
+ /// # fn foo() -> io::Result<()> {
+ /// let metadata = fs::metadata("foo.txt")?;
+ /// let creation_time = metadata.creation_time();
+ /// # Ok(())
+ /// # }
+ /// ```
+ ///
+ /// [`FILETIME`]: https://msdn.microsoft.com/en-us/library/windows/desktop/ms724284.aspx
#[stable(feature = "metadata_ext", since = "1.1.0")]
fn creation_time(&self) -> u64;
/// Returns the value of the `ftLastAccessTime` field of this metadata.
///
- /// The returned 64-bit value represents the number of 100-nanosecond
- /// intervals since January 1, 1601 (UTC).
+ /// The returned 64-bit value is equivalent to a [`FILETIME`] struct,
+ /// which represents the number of 100-nanosecond intervals since
+ /// January 1, 1601 (UTC). The struct is automatically
+ /// converted to a `u64` value, as that is the recommended way
+ /// to use it.
+ ///
+ /// For a file, the value specifies the last time that a file was read
+ /// from or written to. For a directory, the value specifies when
+ /// the directory was created. For both files and directories, the
+ /// specified date is correct, but the time of day is always set to
+ /// midnight.
+ ///
+ /// If the underlying filesystem does not support last access time, the
+ /// returned value is 0.
+ ///
+ /// # Examples
+ ///
+ /// ```no_run
+ /// use std::io;
+ /// use std::fs;
+ /// use std::os::windows::prelude::*;
+ ///
+ /// # fn foo() -> io::Result<()> {
+ /// let metadata = fs::metadata("foo.txt")?;
+ /// let last_access_time = metadata.last_access_time();
+ /// # Ok(())
+ /// # }
+ /// ```
+ ///
+ /// [`FILETIME`]: https://msdn.microsoft.com/en-us/library/windows/desktop/ms724284.aspx
#[stable(feature = "metadata_ext", since = "1.1.0")]
fn last_access_time(&self) -> u64;
/// Returns the value of the `ftLastWriteTime` field of this metadata.
///
- /// The returned 64-bit value represents the number of 100-nanosecond
- /// intervals since January 1, 1601 (UTC).
+ /// The returned 64-bit value is equivalent to a [`FILETIME`] struct,
+ /// which represents the number of 100-nanosecond intervals since
+ /// January 1, 1601 (UTC). The struct is automatically
+ /// converted to a `u64` value, as that is the recommended way
+ /// to use it.
+ ///
+ /// For a file, the value specifies the last time that a file was written
+ /// to. For a directory, the structure specifies when the directory was
+ /// created.
+ ///
+ /// If the underlying filesystem does not support the last write time
+ /// time, the returned value is 0.
+ ///
+ /// # Examples
+ ///
+ /// ```no_run
+ /// use std::io;
+ /// use std::fs;
+ /// use std::os::windows::prelude::*;
+ ///
+ /// # fn foo() -> io::Result<()> {
+ /// let metadata = fs::metadata("foo.txt")?;
+ /// let last_write_time = metadata.last_write_time();
+ /// # Ok(())
+ /// # }
+ /// ```
+ ///
+ /// [`FILETIME`]: https://msdn.microsoft.com/en-us/library/windows/desktop/ms724284.aspx
#[stable(feature = "metadata_ext", since = "1.1.0")]
fn last_write_time(&self) -> u64;
/// metadata.
///
/// The returned value does not have meaning for directories.
+ ///
+ /// # Examples
+ ///
+ /// ```no_run
+ /// use std::io;
+ /// use std::fs;
+ /// use std::os::windows::prelude::*;
+ ///
+ /// # fn foo() -> io::Result<()> {
+ /// let metadata = fs::metadata("foo.txt")?;
+ /// let file_size = metadata.file_size();
+ /// # Ok(())
+ /// # }
+ /// ```
#[stable(feature = "metadata_ext", since = "1.1.0")]
fn file_size(&self) -> u64;
}
///
/// # Examples
///
-/// ```ignore
+/// ```no_run
/// use std::os::windows::fs;
///
/// # fn foo() -> std::io::Result<()> {
///
/// # Examples
///
-/// ```ignore
+/// ```no_run
/// use std::os::windows::fs;
///
/// # fn foo() -> std::io::Result<()> {
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-//! Experimental extensions to `std` for Windows.
+//! Platform-specific extensions to `std` for Windows.
//!
-//! For now, this module is limited to extracting handles, file
-//! descriptors, and sockets, but its functionality will grow over
-//! time.
+//! Provides access to platform-level information for Windows, and exposes
+//! Windows-specific idioms that would otherwise be inappropriate as part
+//! the core `std` library. These extensions allow developers to use
+//! `std` types and idioms with Windows in a way that the normal
+//! platform-agnostic idioms would not normally support.
#![stable(feature = "rust1", since = "1.0.0")]
}
}
+/// Generates a wide character sequence for potentially ill-formed UTF-16.
#[stable(feature = "rust1", since = "1.0.0")]
#[derive(Clone)]
pub struct EncodeWide<'a> {
/// A thread local storage key which owns its contents.
///
/// This key uses the fastest possible implementation available to it for the
-/// target platform. It is instantiated with the `thread_local!` macro and the
-/// primary method is the `with` method.
+/// target platform. It is instantiated with the [`thread_local!`] macro and the
+/// primary method is the [`with`] method.
///
-/// The `with` method yields a reference to the contained value which cannot be
+/// The [`with`] method yields a reference to the contained value which cannot be
/// sent across threads or escape the given closure.
///
/// # Initialization and Destruction
///
-/// Initialization is dynamically performed on the first call to `with()`
-/// within a thread, and values that implement `Drop` get destructed when a
+/// Initialization is dynamically performed on the first call to [`with`]
+/// within a thread, and values that implement [`Drop`] get destructed when a
/// thread exits. Some caveats apply, which are explained below.
///
/// # Examples
/// 3. On macOS, initializing TLS during destruction of other TLS slots can
/// sometimes cancel *all* destructors for the current thread, whether or not
/// the slots have already had their destructors run or not.
+///
+/// [`with`]: ../../std/thread/struct.LocalKey.html#method.with
+/// [`thread_local!`]: ../../std/macro.thread_local.html
+/// [`Drop`]: ../../std/ops/trait.Drop.html
#[stable(feature = "rust1", since = "1.0.0")]
pub struct LocalKey<T: 'static> {
// This outer `LocalKey<T>` type is what's going to be stored in statics,
}
}
-/// Declare a new thread local storage key of type `std::thread::LocalKey`.
+/// Declare a new thread local storage key of type [`std::thread::LocalKey`].
///
/// # Syntax
///
/// # fn main() {}
/// ```
///
-/// See [LocalKey documentation](thread/struct.LocalKey.html) for more
+/// See [LocalKey documentation][`std::thread::LocalKey`] for more
/// information.
+///
+/// [`std::thread::LocalKey`]: ../std/thread/struct.LocalKey.html
#[macro_export]
#[stable(feature = "rust1", since = "1.0.0")]
#[allow_internal_unstable]
#[derive(Debug, Eq, PartialEq, Copy, Clone)]
pub enum LocalKeyState {
/// All keys are in this state whenever a thread starts. Keys will
- /// transition to the `Valid` state once the first call to `with` happens
+ /// transition to the `Valid` state once the first call to [`with`] happens
/// and the initialization expression succeeds.
///
/// Keys in the `Uninitialized` state will yield a reference to the closure
- /// passed to `with` so long as the initialization routine does not panic.
+ /// passed to [`with`] so long as the initialization routine does not panic.
+ ///
+ /// [`with`]: ../../std/thread/struct.LocalKey.html#method.with
Uninitialized,
/// Once a key has been accessed successfully, it will enter the `Valid`
/// `Destroyed` state.
///
/// Keys in the `Valid` state will be guaranteed to yield a reference to the
- /// closure passed to `with`.
+ /// closure passed to [`with`].
+ ///
+ /// [`with`]: ../../std/thread/struct.LocalKey.html#method.with
Valid,
/// When a thread exits, the destructors for keys will be run (if
/// destructor has run, a key is in the `Destroyed` state.
///
/// Keys in the `Destroyed` states will trigger a panic when accessed via
- /// `with`.
+ /// [`with`].
+ ///
+ /// [`with`]: ../../std/thread/struct.LocalKey.html#method.with
Destroyed,
}
/// Query the current state of this key.
///
/// A key is initially in the `Uninitialized` state whenever a thread
- /// starts. It will remain in this state up until the first call to `with`
+ /// starts. It will remain in this state up until the first call to [`with`]
/// within a thread has run the initialization expression successfully.
///
/// Once the initialization expression succeeds, the key transitions to the
- /// `Valid` state which will guarantee that future calls to `with` will
+ /// `Valid` state which will guarantee that future calls to [`with`] will
/// succeed within the thread.
///
/// When a thread exits, each key will be destroyed in turn, and as keys are
/// destroyed they will enter the `Destroyed` state just before the
/// destructor starts to run. Keys may remain in the `Destroyed` state after
/// destruction has completed. Keys without destructors (e.g. with types
- /// that are `Copy`), may never enter the `Destroyed` state.
+ /// that are [`Copy`]), may never enter the `Destroyed` state.
///
/// Keys in the `Uninitialized` state can be accessed so long as the
/// initialization does not panic. Keys in the `Valid` state are guaranteed
/// to be able to be accessed. Keys in the `Destroyed` state will panic on
- /// any call to `with`.
+ /// any call to [`with`].
+ ///
+ /// [`with`]: ../../std/thread/struct.LocalKey.html#method.with
+ /// [`Copy`]: ../../std/marker/trait.Copy.html
#[unstable(feature = "thread_local_state",
reason = "state querying was recently added",
issue = "27716")]
// Builder
////////////////////////////////////////////////////////////////////////////////
-/// Thread configuration. Provides detailed control over the properties
-/// and behavior of new threads.
+/// Thread factory, which can be used in order to configure the properties of
+/// a new thread.
+///
+/// Methods can be chained on it in order to configure it.
+///
+/// The two configurations available are:
+///
+/// - [`name`]: allows to give a name to the thread which is currently
+/// only used in `panic` messages.
+/// - [`stack_size`]: specifies the desired stack size. Note that this can
+/// be overriden by the OS.
+///
+/// If the [`stack_size`] field is not specified, the stack size
+/// will be the `RUST_MIN_STACK` environment variable. If it is
+/// not specified either, a sensible default will be set.
+///
+/// If the [`name`] field is not specified, the thread will not be named.
+///
+/// The [`spawn`] method will take ownership of the builder and create an
+/// [`io::Result`] to the thread handle with the given configuration.
+///
+/// The [`thread::spawn`] free function uses a `Builder` with default
+/// configuration and [`unwrap`]s its return value.
+///
+/// You may want to use [`spawn`] instead of [`thread::spawn`], when you want
+/// to recover from a failure to launch a thread, indeed the free function will
+/// panick where the `Builder` method will return a [`io::Result`].
///
/// # Examples
///
///
/// handler.join().unwrap();
/// ```
+///
+/// [`thread::spawn`]: ../../std/thread/fn.spawn.html
+/// [`stack_size`]: ../../std/thread/struct.Builder.html#method.stack_size
+/// [`name`]: ../../std/thread/struct.Builder.html#method.name
+/// [`spawn`]: ../../std/thread/struct.Builder.html#method.spawn
+/// [`io::Result`]: ../../std/io/type.Result.html
+/// [`unwrap`]: ../../std/result/enum.Result.html#method.unwrap
#[stable(feature = "rust1", since = "1.0.0")]
#[derive(Debug)]
pub struct Builder {
/// Generates the base configuration for spawning a thread, from which
/// configuration methods can be chained.
///
- /// If the [`stack_size`] field is not specified, the stack size
- /// will be the `RUST_MIN_STACK` environment variable. If it is
- /// not specified either, a sensible default will be set (2MB as
- /// of the writting of this doc).
- ///
/// # Examples
///
/// ```
///
/// handler.join().unwrap();
/// ```
- ///
- /// [`stack_size`]: ../../std/thread/struct.Builder.html#method.stack_size
#[stable(feature = "rust1", since = "1.0.0")]
pub fn new() -> Builder {
Builder {
self
}
- /// Spawns a new thread, and returns a join handle for it.
+ /// Spawns a new thread by taking ownership of the `Builder`, and returns an
+ /// [`io::Result`] to its [`JoinHandle`].
///
- /// The child thread may outlive the parent (unless the parent thread
+ /// The spawned thread may outlive the caller (unless the caller thread
/// is the main thread; the whole process is terminated when the main
/// thread finishes). The join handle can be used to block on
/// termination of the child thread, including recovering its panics.
///
/// [`spawn`]: ../../std/thread/fn.spawn.html
/// [`io::Result`]: ../../std/io/type.Result.html
+ /// [`JoinHandle`]: ../../std/thread/struct.JoinHandle.html
///
/// # Examples
///
/// Cooperatively gives up a timeslice to the OS scheduler.
///
+/// This is used when the programmer knows that the thread will have nothing
+/// to do for some time, and thus avoid wasting computing time.
+///
+/// For example when polling on a resource, it is common to check that it is
+/// available, and if not to yield in order to avoid busy waiting.
+///
+/// Thus the pattern of `yield`ing after a failed poll is rather common when
+/// implementing low-level shared resources or synchronization primitives.
+///
+/// However programmers will usualy prefer to use, [`channel`]s, [`Condvar`]s,
+/// [`Mutex`]es or [`join`] for their synchronisation routines, as they avoid
+/// thinking about thread schedulling.
+///
+/// Note that [`channel`]s for example are implemented using this primitive.
+/// Indeed when you call `send` or `recv`, which are blocking, they will yield
+/// if the channel is not available.
+///
/// # Examples
///
/// ```
///
/// thread::yield_now();
/// ```
+///
+/// [`channel`]: ../../std/sync/mpsc/index.html
+/// [`spawn`]: ../../std/thread/fn.spawn.html
+/// [`join`]: ../../std/thread/struct.JoinHandle.html#method.join
+/// [`Mutex`]: ../../std/sync/struct.Mutex.html
+/// [`Condvar`]: ../../std/sync/struct.Condvar.html
#[stable(feature = "rust1", since = "1.0.0")]
pub fn yield_now() {
imp::Thread::yield_now()
StmtKind::Mac(mac) => StmtKind::Mac(mac.map(|(mac, _style, attrs)| {
(mac, MacStmtStyle::Semicolon, attrs)
})),
- node @ _ => node,
+ node => node,
};
self
}
pub fn is_unsuffixed(&self) -> bool {
match *self {
// unsuffixed variants
- LitKind::Str(..) => true,
- LitKind::ByteStr(..) => true,
- LitKind::Byte(..) => true,
- LitKind::Char(..) => true,
- LitKind::Int(_, LitIntType::Unsuffixed) => true,
- LitKind::FloatUnsuffixed(..) => true,
+ LitKind::Str(..) |
+ LitKind::ByteStr(..) |
+ LitKind::Byte(..) |
+ LitKind::Char(..) |
+ LitKind::Int(_, LitIntType::Unsuffixed) |
+ LitKind::FloatUnsuffixed(..) |
LitKind::Bool(..) => true,
// suffixed variants
- LitKind::Int(_, LitIntType::Signed(..)) => false,
- LitKind::Int(_, LitIntType::Unsigned(..)) => false,
+ LitKind::Int(_, LitIntType::Signed(..)) |
+ LitKind::Int(_, LitIntType::Unsigned(..)) |
LitKind::Float(..) => false,
}
}
/// Returns the MetaItem if self is a NestedMetaItemKind::MetaItem.
pub fn meta_item(&self) -> Option<&MetaItem> {
match self.node {
- NestedMetaItemKind::MetaItem(ref item) => Some(&item),
+ NestedMetaItemKind::MetaItem(ref item) => Some(item),
_ => None
}
}
/// Returns the Lit if self is a NestedMetaItemKind::Literal.
pub fn literal(&self) -> Option<&Lit> {
match self.node {
- NestedMetaItemKind::Literal(ref lit) => Some(&lit),
+ NestedMetaItemKind::Literal(ref lit) => Some(lit),
_ => None
}
}
match self.node {
MetaItemKind::NameValue(ref v) => {
match v.node {
- LitKind::Str(ref s, _) => Some((*s).clone()),
+ LitKind::Str(ref s, _) => Some(*s),
_ => None,
}
},
Token::Literal(token::Lit::Float(symbol), Some(Symbol::intern(ty.ty_to_string())))
}
LitKind::FloatUnsuffixed(symbol) => Token::Literal(token::Lit::Float(symbol), None),
- LitKind::Bool(value) => Token::Ident(Ident::with_empty_ctxt(Symbol::intern(match value {
- true => "true",
- false => "false",
+ LitKind::Bool(value) => Token::Ident(Ident::with_empty_ctxt(Symbol::intern(if value {
+ "true"
+ } else {
+ "false"
}))),
}
}
impl HasAttrs for Vec<Attribute> {
fn attrs(&self) -> &[Attribute] {
- &self
+ self
}
fn map_attrs<F: FnOnce(Vec<Attribute>) -> Vec<Attribute>>(self, f: F) -> Self {
f(self)
impl HasAttrs for ThinVec<Attribute> {
fn attrs(&self) -> &[Attribute] {
- &self
+ self
}
fn map_attrs<F: FnOnce(Vec<Attribute>) -> Vec<Attribute>>(self, f: F) -> Self {
f(self.into()).into()
match self.span_to_snippet(sp) {
Ok(snippet) => {
let snippet = snippet.split(c).nth(0).unwrap_or("").trim_right();
- if snippet.len() > 0 && !snippet.contains('\n') {
+ if !snippet.is_empty() && !snippet.contains('\n') {
Span { hi: BytePos(sp.lo.0 + snippet.len() as u32), ..sp }
} else {
sp
pub fn get_filemap(&self, filename: &str) -> Option<Rc<FileMap>> {
for fm in self.files.borrow().iter() {
if filename == fm.name {
- (self.dep_tracking_callback.borrow())(&fm);
+ (self.dep_tracking_callback.borrow())(fm);
return Some(fm.clone());
}
}
return false;
}
- let mis = if !is_cfg(&attr) {
+ let mis = if !is_cfg(attr) {
return true;
} else if let Some(mis) = attr.meta_item_list() {
mis
// flag the offending attributes
for attr in attrs.iter() {
if !self.features.map(|features| features.stmt_expr_attributes).unwrap_or(true) {
- let mut err = feature_err(&self.sess,
+ let mut err = feature_err(self.sess,
"stmt_expr_attributes",
attr.span,
GateIssue::Language,
pub fn configure_struct_expr_field(&mut self, field: ast::Field) -> Option<ast::Field> {
if !self.features.map(|features| features.struct_field_attributes).unwrap_or(true) {
if !field.attrs.is_empty() {
- let mut err = feature_err(&self.sess,
+ let mut err = feature_err(self.sess,
"struct_field_attributes",
field.span,
GateIssue::Language,
for attr in attrs.iter() {
if !self.features.map(|features| features.struct_field_attributes).unwrap_or(true) {
let mut err = feature_err(
- &self.sess,
+ self.sess,
"struct_field_attributes",
attr.span,
GateIssue::Language,
// URLs can be unavoidably longer than the line limit, so we allow them.
// Allowed format is: `[name]: https://www.rust-lang.org/`
- let is_url = |l: &str| l.starts_with('[') && l.contains("]:") && l.contains("http");
+ let is_url = |l: &str| l.starts_with("[") && l.contains("]:") && l.contains("http");
if msg.lines().any(|line| line.len() > MAX_DESCRIPTION_WIDTH && !is_url(line)) {
ecx.span_err(span, &format!(
if let Err(e) = output_metadata(ecx,
&target_triple,
&crate_name.name.as_str(),
- &diagnostics) {
+ diagnostics) {
ecx.span_bug(span, &format!(
"error writing metadata for triple `{}` and crate `{}`, error: {}, \
cause: {:?}",
MacEager::items(SmallVector::many(vec![
P(ast::Item {
- ident: name.clone(),
+ ident: *name,
attrs: Vec::new(),
id: ast::DUMMY_NODE_ID,
node: ast::ItemKind::Const(
///
/// The `bool` dictates whether the contents of the macro can
/// directly use `#[unstable]` things (true == yes).
- NormalTT(Box<TTMacroExpander>, Option<Span>, bool),
+ NormalTT(Box<TTMacroExpander>, Option<(ast::NodeId, Span)>, bool),
/// A function-like syntax extension that has an extra ident before
/// the block.
-> Result<Option<Rc<SyntaxExtension>>, Determinacy>;
fn resolve_macro(&mut self, scope: Mark, path: &ast::Path, kind: MacroKind, force: bool)
-> Result<Rc<SyntaxExtension>, Determinacy>;
+ fn check_unused_macros(&self);
}
#[derive(Copy, Clone, Debug)]
_force: bool) -> Result<Rc<SyntaxExtension>, Determinacy> {
Err(Determinacy::Determined)
}
+ fn check_unused_macros(&self) {}
}
#[derive(Clone)]
}
/// One of these is made during expansion and incrementally updated as we go;
-/// when a macro expansion occurs, the resulting nodes have the backtrace()
-/// -> expn_info of their expansion context stored into their span.
+/// when a macro expansion occurs, the resulting nodes have the `backtrace()
+/// -> expn_info` of their expansion context stored into their span.
pub struct ExtCtxt<'a> {
pub parse_sess: &'a parse::ParseSess,
pub ecfg: expand::ExpansionConfig<'a>,
/// Returns span for the macro which originally caused the current expansion to happen.
///
/// Stops backtracing at include! boundary.
- pub fn expansion_cause(&self) -> Span {
+ pub fn expansion_cause(&self) -> Option<Span> {
let mut ctxt = self.backtrace();
let mut last_macro = None;
loop {
}
ctxt = info.call_site.ctxt;
last_macro = Some(info.call_site);
- return Some(());
+ Some(())
}).is_none() {
break
}
}
- last_macro.expect("missing expansion backtrace")
+ last_macro
}
pub fn struct_span_warn(&self,
}
pub fn trace_macros_diag(&self) {
for (sp, notes) in self.expansions.iter() {
- let mut db = self.parse_sess.span_diagnostic.span_note_diag(*sp, &"trace_macro");
+ let mut db = self.parse_sess.span_diagnostic.span_note_diag(*sp, "trace_macro");
for note in notes {
- db.note(¬e);
+ db.note(note);
}
db.emit();
}
v.push(self.ident_of(s));
}
v.extend(components.iter().map(|s| self.ident_of(s)));
- return v
+ v
}
pub fn name_of(&self, st: &str) -> ast::Name {
Symbol::intern(st)
}
+
+ pub fn check_unused_macros(&self) {
+ self.resolver.check_unused_macros();
+ }
}
/// Extract a string literal from the macro expanded version of `expr`,
match *ext {
MultiModifier(ref mac) => {
- let meta = panictry!(attr.parse_meta(&self.cx.parse_sess));
+ let meta = panictry!(attr.parse_meta(self.cx.parse_sess));
let item = mac.expand(self.cx, attr.span, &meta, item);
kind.expect_from_annotatables(item)
}
MultiDecorator(ref mac) => {
let mut items = Vec::new();
- let meta = panictry!(attr.parse_meta(&self.cx.parse_sess));
+ let meta = panictry!(attr.parse_meta(self.cx.parse_sess));
mac.expand(self.cx, attr.span, &meta, &item, &mut |item| items.push(item));
items.push(item);
kind.expect_from_annotatables(items)
}
SyntaxExtension::AttrProcMacro(ref mac) => {
- let item_toks = stream_for_item(&item, &self.cx.parse_sess);
+ let item_toks = stream_for_item(&item, self.cx.parse_sess);
let span = Span { ctxt: self.cx.backtrace(), ..attr.span };
let tok_result = mac.expand(self.cx, attr.span, attr.tokens, item_toks);
}
_ => {
let msg = &format!("macro `{}` may not be used in attributes", attr.path);
- self.cx.span_err(attr.span, &msg);
+ self.cx.span_err(attr.span, msg);
kind.dummy(attr.span)
}
}
};
let path = &mac.node.path;
- let ident = ident.unwrap_or(keywords::Invalid.ident());
+ let ident = ident.unwrap_or_else(|| keywords::Invalid.ident());
let marked_tts = noop_fold_tts(mac.node.stream(), &mut Marker(mark));
let opt_expanded = match *ext {
NormalTT(ref expandfun, exp_span, allow_internal_unstable) => {
call_site: span,
callee: NameAndSpan {
format: MacroBang(Symbol::intern(&format!("{}", path))),
- span: exp_span,
+ span: exp_span.map(|(_, s)| s),
allow_internal_unstable: allow_internal_unstable,
},
});
}
_ => {
let msg = &format!("macro `{}` may not be used for derive attributes", attr.path);
- self.cx.span_err(span, &msg);
+ self.cx.span_err(span, msg);
kind.dummy(span)
}
}
fn check_attributes(&mut self, attrs: &[ast::Attribute]) {
let features = self.cx.ecfg.features.unwrap();
for attr in attrs.iter() {
- feature_gate::check_attribute(&attr, &self.cx.parse_sess, features);
+ feature_gate::check_attribute(attr, self.cx.parse_sess, features);
}
}
}
pub fn find_attr_invoc(attrs: &mut Vec<ast::Attribute>) -> Option<ast::Attribute> {
- for i in 0 .. attrs.len() {
- if !attr::is_known(&attrs[i]) && !is_builtin_attr(&attrs[i]) {
- return Some(attrs.remove(i));
- }
- }
-
- None
+ attrs.iter()
+ .position(|a| !attr::is_known(a) && !is_builtin_attr(a))
+ .map(|i| attrs.remove(i))
}
// These are pretty nasty. Ideally, we would keep the tokens around, linked from
let result = noop_fold_item(item, self);
self.cx.current_expansion.module = orig_module;
self.cx.current_expansion.directory_ownership = orig_directory_ownership;
- return result;
+ result
}
// Ensure that test functions are accessible from the test harness.
ast::ItemKind::Fn(..) if self.cx.ecfg.should_test => {
///
/// This is registered as a set of expression syntax extension called quote!
/// that lifts its argument token-tree to an AST representing the
-/// construction of the same token tree, with token::SubstNt interpreted
+/// construction of the same token tree, with `token::SubstNt` interpreted
/// as antiquotes (splices).
pub mod rt {
result = results.pop().unwrap();
result.push(tree);
}
- tree @ _ => result.push(tree),
+ tree => result.push(tree),
}
}
result
#[allow(non_upper_case_globals)]
fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
macro_rules! mk_lit {
- ($name: expr, $suffix: expr, $($args: expr),*) => {{
- let inner = cx.expr_call(sp, mk_token_path(cx, sp, $name), vec![$($args),*]);
+ ($name: expr, $suffix: expr, $content: expr $(, $count: expr)*) => {{
+ let name = mk_name(cx, sp, ast::Ident::with_empty_ctxt($content));
+ let inner = cx.expr_call(sp, mk_token_path(cx, sp, $name), vec![
+ name $(, cx.expr_usize(sp, $count))*
+ ]);
let suffix = match $suffix {
Some(name) => cx.expr_some(sp, mk_name(cx, sp, ast::Ident::with_empty_ctxt(name))),
None => cx.expr_none(sp)
cx.expr_call(sp, mk_token_path(cx, sp, "Literal"), vec![inner, suffix])
}}
}
- match *tok {
+
+ let name = match *tok {
token::BinOp(binop) => {
return cx.expr_call(sp, mk_token_path(cx, sp, "BinOp"), vec![mk_binop(cx, sp, binop)]);
}
vec![mk_delim(cx, sp, delim)]);
}
- token::Literal(token::Byte(i), suf) => {
- let e_byte = mk_name(cx, sp, ast::Ident::with_empty_ctxt(i));
- return mk_lit!("Byte", suf, e_byte);
- }
-
- token::Literal(token::Char(i), suf) => {
- let e_char = mk_name(cx, sp, ast::Ident::with_empty_ctxt(i));
- return mk_lit!("Char", suf, e_char);
- }
-
- token::Literal(token::Integer(i), suf) => {
- let e_int = mk_name(cx, sp, ast::Ident::with_empty_ctxt(i));
- return mk_lit!("Integer", suf, e_int);
- }
-
- token::Literal(token::Float(fident), suf) => {
- let e_fident = mk_name(cx, sp, ast::Ident::with_empty_ctxt(fident));
- return mk_lit!("Float", suf, e_fident);
- }
-
- token::Literal(token::Str_(ident), suf) => {
- return mk_lit!("Str_", suf, mk_name(cx, sp, ast::Ident::with_empty_ctxt(ident)))
- }
-
- token::Literal(token::StrRaw(ident, n), suf) => {
- return mk_lit!("StrRaw", suf, mk_name(cx, sp, ast::Ident::with_empty_ctxt(ident)),
- cx.expr_usize(sp, n))
- }
+ token::Literal(token::Byte(i), suf) => return mk_lit!("Byte", suf, i),
+ token::Literal(token::Char(i), suf) => return mk_lit!("Char", suf, i),
+ token::Literal(token::Integer(i), suf) => return mk_lit!("Integer", suf, i),
+ token::Literal(token::Float(i), suf) => return mk_lit!("Float", suf, i),
+ token::Literal(token::Str_(i), suf) => return mk_lit!("Str_", suf, i),
+ token::Literal(token::StrRaw(i, n), suf) => return mk_lit!("StrRaw", suf, i, n),
+ token::Literal(token::ByteStr(i), suf) => return mk_lit!("ByteStr", suf, i),
+ token::Literal(token::ByteStrRaw(i, n), suf) => return mk_lit!("ByteStrRaw", suf, i, n),
token::Ident(ident) => {
return cx.expr_call(sp,
token::Interpolated(_) => panic!("quote! with interpolated token"),
- _ => ()
- }
-
- let name = match *tok {
token::Eq => "Eq",
token::Lt => "Lt",
token::Le => "Le",
token::At => "At",
token::Dot => "Dot",
token::DotDot => "DotDot",
+ token::DotDotDot => "DotDotDot",
token::Comma => "Comma",
token::Semi => "Semi",
token::Colon => "Colon",
token::Question => "Question",
token::Underscore => "Underscore",
token::Eof => "Eof",
- _ => panic!("unhandled token in quote!"),
+
+ token::Whitespace | token::SubstNt(_) | token::Comment | token::Shebang(_) => {
+ panic!("unhandled token in quote!");
+ }
};
mk_token_path(cx, sp, name)
}
-> Box<base::MacResult+'static> {
base::check_zero_tts(cx, sp, tts, "line!");
- let topmost = cx.expansion_cause();
+ let topmost = cx.expansion_cause().unwrap_or(sp);
let loc = cx.codemap().lookup_char_pos(topmost.lo);
base::MacEager::expr(cx.expr_u32(topmost, loc.line as u32))
-> Box<base::MacResult+'static> {
base::check_zero_tts(cx, sp, tts, "column!");
- let topmost = cx.expansion_cause();
+ let topmost = cx.expansion_cause().unwrap_or(sp);
let loc = cx.codemap().lookup_char_pos(topmost.lo);
base::MacEager::expr(cx.expr_u32(topmost, loc.col.to_usize() as u32))
-> Box<base::MacResult+'static> {
base::check_zero_tts(cx, sp, tts, "file!");
- let topmost = cx.expansion_cause();
+ let topmost = cx.expansion_cause().unwrap_or(sp);
let loc = cx.codemap().lookup_char_pos(topmost.lo);
base::MacEager::expr(cx.expr_str(topmost, Symbol::intern(&loc.file.name)))
}
cx.span_err(sp,
&format!("{} wasn't a utf-8 file",
file.display()));
- return DummyResult::expr(sp);
+ DummyResult::expr(sp)
}
}
}
Err(e) => {
cx.span_err(sp,
&format!("couldn't read {}: {}", file.display(), e));
- return DummyResult::expr(sp);
+ DummyResult::expr(sp)
}
Ok(..) => {
// Add this input file to the code map to make it available as
//! repetitions indicated by Kleene stars. It only advances or calls out to the
//! real Rust parser when no `cur_eis` items remain
//!
-//! Example: Start parsing `a a a a b` against [· a $( a )* a b].
+//! Example:
//!
-//! Remaining input: `a a a a b`
+//! ```text, ignore
+//! Start parsing a a a a b against [· a $( a )* a b].
+//!
+//! Remaining input: a a a a b
//! next_eis: [· a $( a )* a b]
//!
-//! - - - Advance over an `a`. - - -
+//! - - - Advance over an a. - - -
//!
-//! Remaining input: `a a a b`
+//! Remaining input: a a a b
//! cur: [a · $( a )* a b]
//! Descend/Skip (first item).
//! next: [a $( · a )* a b] [a $( a )* · a b].
//!
-//! - - - Advance over an `a`. - - -
+//! - - - Advance over an a. - - -
//!
-//! Remaining input: `a a b`
+//! Remaining input: a a b
//! cur: [a $( a · )* a b] next: [a $( a )* a · b]
//! Finish/Repeat (first item)
//! next: [a $( a )* · a b] [a $( · a )* a b] [a $( a )* a · b]
//!
-//! - - - Advance over an `a`. - - - (this looks exactly like the last step)
+//! - - - Advance over an a. - - - (this looks exactly like the last step)
//!
-//! Remaining input: `a b`
+//! Remaining input: a b
//! cur: [a $( a · )* a b] next: [a $( a )* a · b]
//! Finish/Repeat (first item)
//! next: [a $( a )* · a b] [a $( · a )* a b] [a $( a )* a · b]
//!
-//! - - - Advance over an `a`. - - - (this looks exactly like the last step)
+//! - - - Advance over an a. - - - (this looks exactly like the last step)
//!
-//! Remaining input: `b`
+//! Remaining input: b
//! cur: [a $( a · )* a b] next: [a $( a )* a · b]
//! Finish/Repeat (first item)
//! next: [a $( a )* · a b] [a $( · a )* a b]
//!
-//! - - - Advance over a `b`. - - -
+//! - - - Advance over a b. - - -
//!
-//! Remaining input: ``
+//! Remaining input: ''
//! eof: [a $( a )* a b ·]
+//! ```
pub use self::NamedMatch::*;
pub use self::ParseResult::*;
})
}
-/// NamedMatch is a pattern-match result for a single token::MATCH_NONTERMINAL:
+/// `NamedMatch` is a pattern-match result for a single `token::MATCH_NONTERMINAL`:
/// so it is associated with a single ident in a parse, and all
-/// `MatchedNonterminal`s in the NamedMatch have the same nonterminal type
-/// (expr, item, etc). Each leaf in a single NamedMatch corresponds to a
-/// single token::MATCH_NONTERMINAL in the TokenTree that produced it.
+/// `MatchedNonterminal`s in the `NamedMatch` have the same nonterminal type
+/// (expr, item, etc). Each leaf in a single `NamedMatch` corresponds to a
+/// single `token::MATCH_NONTERMINAL` in the `TokenTree` that produced it.
///
-/// The in-memory structure of a particular NamedMatch represents the match
+/// The in-memory structure of a particular `NamedMatch` represents the match
/// that occurred when a particular subset of a matcher was applied to a
/// particular token tree.
///
-/// The width of each MatchedSeq in the NamedMatch, and the identity of the
-/// `MatchedNonterminal`s, will depend on the token tree it was applied to:
-/// each MatchedSeq corresponds to a single TTSeq in the originating
-/// token tree. The depth of the NamedMatch structure will therefore depend
+/// The width of each `MatchedSeq` in the `NamedMatch`, and the identity of
+/// the `MatchedNonterminal`s, will depend on the token tree it was applied
+/// to: each `MatchedSeq` corresponds to a single `TTSeq` in the originating
+/// token tree. The depth of the `NamedMatch` structure will therefore depend
/// only on the nesting depth of `ast::TTSeq`s in the originating
/// token tree it was derived from.
/// Perform a token equality check, ignoring syntax context (that is, an unhygienic comparison)
fn token_name_eq(t1 : &Token, t2 : &Token) -> bool {
- match (t1,t2) {
- (&token::Ident(id1),&token::Ident(id2))
- | (&token::Lifetime(id1),&token::Lifetime(id2)) =>
- id1.name == id2.name,
- _ => *t1 == *t2
+ if let (Some(id1), Some(id2)) = (t1.ident(), t2.ident()) {
+ id1.name == id2.name
+ } else if let (&token::Lifetime(id1), &token::Lifetime(id2)) = (t1, t2) {
+ id1.name == id2.name
+ } else {
+ *t1 == *t2
}
}
// Check if we need a separator
if idx == len && ei.sep.is_some() {
// We have a separator, and it is the current token.
- if ei.sep.as_ref().map(|ref sep| token_name_eq(&token, sep)).unwrap_or(false) {
+ if ei.sep.as_ref().map(|sep| token_name_eq(token, sep)).unwrap_or(false) {
ei.idx += 1;
next_eis.push(ei);
}
cur_eis.push(ei);
}
TokenTree::Token(_, ref t) => {
- if token_name_eq(t, &token) {
+ if token_name_eq(t, token) {
ei.idx += 1;
next_eis.push(ei);
}
}
fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal {
- match name {
- "tt" => {
- return token::NtTT(p.parse_token_tree());
- }
- _ => {}
+ if name == "tt" {
+ return token::NtTT(p.parse_token_tree());
}
// check at the beginning and the parser checks after each bump
p.process_potential_macro_variable();
-> Box<MacResult+'cx> {
if cx.trace_macros() {
let sp = sp.macro_backtrace().last().map(|trace| trace.call_site).unwrap_or(sp);
- let mut values: &mut Vec<String> = cx.expansions.entry(sp).or_insert(vec![]);
+ let mut values: &mut Vec<String> = cx.expansions.entry(sp).or_insert_with(Vec::new);
values.push(format!("expands to `{}! {{ {} }}`", name, arg));
}
let mut valid = true;
// Extract the arguments:
- let lhses = match **argument_map.get(&lhs_nm).unwrap() {
+ let lhses = match *argument_map[&lhs_nm] {
MatchedSeq(ref s, _) => {
s.iter().map(|m| {
if let MatchedNonterminal(ref nt) = **m {
_ => sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
};
- let rhses = match **argument_map.get(&rhs_nm).unwrap() {
+ let rhses = match *argument_map[&rhs_nm] {
MatchedSeq(ref s, _) => {
s.iter().map(|m| {
if let MatchedNonterminal(ref nt) = **m {
valid: valid,
});
- NormalTT(exp, Some(def.span), attr::contains_name(&def.attrs, "allow_internal_unstable"))
+ NormalTT(exp,
+ Some((def.id, def.span)),
+ attr::contains_name(&def.attrs, "allow_internal_unstable"))
}
fn check_lhs_nt_follows(sess: &ParseSess,
lhs: "ed::TokenTree) -> bool {
// lhs is going to be like TokenTree::Delimited(...), where the
// entire lhs is those tts. Or, it can be a "bare sequence", not wrapped in parens.
- match lhs {
- "ed::TokenTree::Delimited(_, ref tts) => check_matcher(sess, features, &tts.tts),
- _ => {
- let msg = "invalid macro matcher; matchers must be contained in balanced delimiters";
- sess.span_diagnostic.span_err(lhs.span(), msg);
- false
- }
+ if let quoted::TokenTree::Delimited(_, ref tts) = *lhs {
+ check_matcher(sess, features, &tts.tts)
+ } else {
+ let msg = "invalid macro matcher; matchers must be contained in balanced delimiters";
+ sess.span_diagnostic.span_err(lhs.span(), msg);
+ false
}
// we don't abort on errors on rejection, the driver will do that for us
// after parsing/expansion. we can report every error in every macro this way.
return false;
},
TokenTree::Sequence(span, ref seq) => {
- if seq.separator.is_none() {
- if seq.tts.iter().all(|seq_tt| {
- match *seq_tt {
- TokenTree::Sequence(_, ref sub_seq) =>
- sub_seq.op == quoted::KleeneOp::ZeroOrMore,
- _ => false,
- }
- }) {
- sess.span_diagnostic.span_err(span, "repetition matches empty token tree");
- return false;
+ if seq.separator.is_none() && seq.tts.iter().all(|seq_tt| {
+ match *seq_tt {
+ TokenTree::Sequence(_, ref sub_seq) =>
+ sub_seq.op == quoted::KleeneOp::ZeroOrMore,
+ _ => false,
}
+ }) {
+ sess.span_diagnostic.span_err(span, "repetition matches empty token tree");
+ return false;
}
if !check_lhs_no_empty_seq(sess, &seq.tts) {
return false;
}
}
- return first;
+ first
}
}
// we only exit the loop if `tts` was empty or if every
// element of `tts` matches the empty sequence.
assert!(first.maybe_empty);
- return first;
+ first
}
}
let build_suffix_first = || {
let mut s = first_sets.first(suffix);
if s.maybe_empty { s.add_all(follow); }
- return s;
+ s
};
// (we build `suffix_first` on demand below; you can tell
match *tt {
quoted::TokenTree::Token(_, ref tok) => ::print::pprust::token_to_string(tok),
quoted::TokenTree::MetaVarDecl(_, name, kind) => format!("${}:{}", name, kind),
- _ => panic!("unexpected quoted::TokenTree::{Sequence or Delimited} in follow set checker"),
+ _ => panic!("unexpected quoted::TokenTree::{{Sequence or Delimited}} \
+ in follow set checker"),
}
}
}
}
+ pub fn is_empty(&self) -> bool {
+ match *self {
+ TokenTree::Delimited(_, ref delimed) => match delimed.delim {
+ token::NoDelim => delimed.tts.is_empty(),
+ _ => false,
+ },
+ TokenTree::Sequence(_, ref seq) => seq.tts.is_empty(),
+ _ => true,
+ }
+ }
+
pub fn get_tt(&self, index: usize) -> TokenTree {
match (self, index) {
(&TokenTree::Delimited(_, ref delimed), _) if delimed.delim == token::NoDelim => {
}
_ => end_sp,
},
- tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
+ tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
},
- tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(start_sp),
+ tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(start_sp),
};
sess.missing_fragment_specifiers.borrow_mut().insert(span);
result.push(TokenTree::MetaVarDecl(span, ident, keywords::Invalid.ident()));
Some(op) => return (Some(tok), op),
None => span,
},
- tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
+ tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
}
},
- tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
+ tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
};
sess.span_diagnostic.span_err(span, "expected `*` or `+`");
&repeats) {
LockstepIterSize::Unconstrained => {
panic!(sp_diag.span_fatal(
- sp.clone(), /* blame macro writer */
+ sp, /* blame macro writer */
"attempted to repeat an expression \
containing no syntax \
variables matched as repeating at this depth"));
}
LockstepIterSize::Contradiction(ref msg) => {
// FIXME #2887 blame macro invoker instead
- panic!(sp_diag.span_fatal(sp.clone(), &msg[..]));
+ panic!(sp_diag.span_fatal(sp, &msg[..]));
}
LockstepIterSize::Constraint(len, _) => {
if len == 0 {
if seq.op == quoted::KleeneOp::OneOrMore {
// FIXME #2887 blame invoker
- panic!(sp_diag.span_fatal(sp.clone(),
+ panic!(sp_diag.span_fatal(sp,
"this must repeat at least once"));
}
} else {
impl ::std::fmt::Debug for AttributeGate {
fn fmt(&self, fmt: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
match *self {
- Gated(ref stab, ref name, ref expl, _) =>
+ Gated(ref stab, name, expl, _) =>
write!(fmt, "Gated({:?}, {}, {})", stab, name, expl),
Ungated => write!(fmt, "Ungated")
}
];
// cfg(...)'s that are feature gated
-const GATED_CFGS: &'static [(&'static str, &'static str, fn(&Features) -> bool)] = &[
+const GATED_CFGS: &[(&str, &str, fn(&Features) -> bool)] = &[
// (name in cfg, feature, function to check if the feature is enabled)
("target_feature", "cfg_target_feature", cfg_fn!(cfg_target_feature)),
("target_vendor", "cfg_target_vendor", cfg_fn!(cfg_target_vendor)),
let name = unwrap_or!(attr.name(), return).as_str();
for &(n, ty, ref gateage) in BUILTIN_ATTRIBUTES {
if name == n {
- if let &Gated(_, ref name, ref desc, ref has_feature) = gateage {
+ if let Gated(_, name, desc, ref has_feature) = *gateage {
gate_feature_fn!(self, has_feature, attr.span, name, desc);
}
debug!("check_attribute: {:?} is builtin, {:?}, {:?}", attr.path, ty, gateage);
NameValue(ref lit) => !lit.node.is_str(),
List(ref list) => list.iter().any(|li| {
match li.node {
- MetaItem(ref mi) => contains_novel_literal(&mi),
+ MetaItem(ref mi) => contains_novel_literal(mi),
Literal(_) => true,
}
}),
return
}
- let meta = panictry!(attr.parse_meta(&self.context.parse_sess));
+ let meta = panictry!(attr.parse_meta(self.context.parse_sess));
if contains_novel_literal(&meta) {
gate_feature_post!(&self, attr_literals, attr.span,
"non-string literals in attributes, or string \
}
ast::ItemKind::Impl(_, polarity, defaultness, _, _, _, _) => {
- match polarity {
- ast::ImplPolarity::Negative => {
- gate_feature_post!(&self, optin_builtin_traits,
- i.span,
- "negative trait bounds are not yet fully implemented; \
- use marker types for now");
- },
- _ => {}
+ if polarity == ast::ImplPolarity::Negative {
+ gate_feature_post!(&self, optin_builtin_traits,
+ i.span,
+ "negative trait bounds are not yet fully implemented; \
+ use marker types for now");
}
if let ast::Defaultness::Default = defaultness {
fn visit_fn_ret_ty(&mut self, ret_ty: &'a ast::FunctionRetTy) {
if let ast::FunctionRetTy::Ty(ref output_ty) = *ret_ty {
- match output_ty.node {
- ast::TyKind::Never => return,
- _ => (),
- };
- self.visit_ty(output_ty)
+ if output_ty.node != ast::TyKind::Never {
+ self.visit_ty(output_ty)
+ }
}
}
span: Span,
_node_id: NodeId) {
// check for const fn declarations
- match fn_kind {
- FnKind::ItemFn(_, _, _, Spanned { node: ast::Constness::Const, .. }, _, _, _) => {
- gate_feature_post!(&self, const_fn, span, "const fn is unstable");
- }
- _ => {
- // stability of const fn methods are covered in
- // visit_trait_item and visit_impl_item below; this is
- // because default methods don't pass through this
- // point.
- }
+ if let FnKind::ItemFn(_, _, _, Spanned { node: ast::Constness::Const, .. }, _, _, _) =
+ fn_kind {
+ gate_feature_post!(&self, const_fn, span, "const fn is unstable");
}
+ // stability of const fn methods are covered in
+ // visit_trait_item and visit_impl_item below; this is
+ // because default methods don't pass through this
+ // point.
match fn_kind {
FnKind::ItemFn(_, _, _, _, abi, _, _) |
})
.collect()
})
- .unwrap_or(vec![])
+ .unwrap_or_else(|_| vec![])
}
}
_ => break,
}
}
- return Ok(attrs);
+ Ok(attrs)
}
/// Matches `attribute = # ! [ meta_item ]`
}
let attr = self.parse_attribute(true)?;
- assert!(attr.style == ast::AttrStyle::Inner);
+ assert_eq!(attr.style, ast::AttrStyle::Inner);
attrs.push(attr);
}
token::DocComment(s) => {
}
/// this statement requires a semicolon after it.
-/// note that in one case (stmt_semi), we've already
+/// note that in one case (`stmt_semi`), we've already
/// seen the semicolon, and thus don't need another.
pub fn stmt_ends_with_semi(stmt: &ast::StmtKind) -> bool {
match *stmt {
ast::StmtKind::Local(_) => true,
- ast::StmtKind::Item(_) => false,
ast::StmtKind::Expr(ref e) => expr_requires_semi_to_be_stmt(e),
- ast::StmtKind::Semi(..) => false,
+ ast::StmtKind::Item(_) |
+ ast::StmtKind::Semi(..) |
ast::StmtKind::Mac(..) => false,
}
}
use parse::token;
-/// SeqSep : a sequence separator (token)
+/// `SeqSep` : a sequence separator (token)
/// and whether a trailing separator is allowed.
pub struct SeqSep {
pub sep: Option<token::Token>,
while j > i && lines[j - 1].trim().is_empty() {
j -= 1;
}
- lines[i..j].iter().cloned().collect()
+ lines[i..j].to_vec()
}
/// remove a "[ \t]*\*" block from each line, if possible
impl<'a> StringReader<'a> {
/// For comments.rs, which hackily pokes into next_pos and ch
- pub fn new_raw<'b>(sess: &'a ParseSess, filemap: Rc<syntax_pos::FileMap>) -> Self {
+ pub fn new_raw(sess: &'a ParseSess, filemap: Rc<syntax_pos::FileMap>) -> Self {
let mut sr = StringReader::new_raw_internal(sess, filemap);
sr.bump();
sr
pub fn new(sess: &'a ParseSess, filemap: Rc<syntax_pos::FileMap>) -> Self {
let mut sr = StringReader::new_raw(sess, filemap);
- if let Err(_) = sr.advance_token() {
+ if sr.advance_token().is_err() {
sr.emit_fatal_errors();
panic!(FatalError);
}
sr.bump();
- if let Err(_) = sr.advance_token() {
+ if sr.advance_token().is_err() {
sr.emit_fatal_errors();
panic!(FatalError);
}
self.bump();
}
- return if doc_comment {
+ if doc_comment {
self.with_str_from(start_bpos, |string| {
// comments with only more "/"s are not doc comments
let tok = if is_doc_comment(string) {
tok: token::Comment,
sp: mk_sp(start_bpos, self.pos),
})
- };
+ }
}
Some('*') => {
self.bump();
}
let pos = self.pos;
self.check_float_base(start_bpos, pos, base);
- return token::Float(self.name_from(start_bpos));
+ token::Float(self.name_from(start_bpos))
} else {
// it might be a float if it has an exponent
if self.ch_is('e') || self.ch_is('E') {
return token::Float(self.name_from(start_bpos));
}
// but we certainly have an integer!
- return token::Integer(self.name_from(start_bpos));
+ token::Integer(self.name_from(start_bpos))
}
}
self.bump();
if self.ch_is('=') {
self.bump();
- return token::BinOpEq(op);
+ token::BinOpEq(op)
} else {
- return token::BinOp(op);
+ token::BinOp(op)
}
}
// One-byte tokens.
';' => {
self.bump();
- return Ok(token::Semi);
+ Ok(token::Semi)
}
',' => {
self.bump();
- return Ok(token::Comma);
+ Ok(token::Comma)
}
'.' => {
self.bump();
- return if self.ch_is('.') {
+ if self.ch_is('.') {
self.bump();
if self.ch_is('.') {
self.bump();
}
} else {
Ok(token::Dot)
- };
+ }
}
'(' => {
self.bump();
- return Ok(token::OpenDelim(token::Paren));
+ Ok(token::OpenDelim(token::Paren))
}
')' => {
self.bump();
- return Ok(token::CloseDelim(token::Paren));
+ Ok(token::CloseDelim(token::Paren))
}
'{' => {
self.bump();
- return Ok(token::OpenDelim(token::Brace));
+ Ok(token::OpenDelim(token::Brace))
}
'}' => {
self.bump();
- return Ok(token::CloseDelim(token::Brace));
+ Ok(token::CloseDelim(token::Brace))
}
'[' => {
self.bump();
- return Ok(token::OpenDelim(token::Bracket));
+ Ok(token::OpenDelim(token::Bracket))
}
']' => {
self.bump();
- return Ok(token::CloseDelim(token::Bracket));
+ Ok(token::CloseDelim(token::Bracket))
}
'@' => {
self.bump();
- return Ok(token::At);
+ Ok(token::At)
}
'#' => {
self.bump();
- return Ok(token::Pound);
+ Ok(token::Pound)
}
'~' => {
self.bump();
- return Ok(token::Tilde);
+ Ok(token::Tilde)
}
'?' => {
self.bump();
- return Ok(token::Question);
+ Ok(token::Question)
}
':' => {
self.bump();
if self.ch_is(':') {
self.bump();
- return Ok(token::ModSep);
+ Ok(token::ModSep)
} else {
- return Ok(token::Colon);
+ Ok(token::Colon)
}
}
'$' => {
self.bump();
- return Ok(token::Dollar);
+ Ok(token::Dollar)
}
// Multi-byte tokens.
self.bump();
if self.ch_is('=') {
self.bump();
- return Ok(token::EqEq);
+ Ok(token::EqEq)
} else if self.ch_is('>') {
self.bump();
- return Ok(token::FatArrow);
+ Ok(token::FatArrow)
} else {
- return Ok(token::Eq);
+ Ok(token::Eq)
}
}
'!' => {
self.bump();
if self.ch_is('=') {
self.bump();
- return Ok(token::Ne);
+ Ok(token::Ne)
} else {
- return Ok(token::Not);
+ Ok(token::Not)
}
}
'<' => {
match self.ch.unwrap_or('\x00') {
'=' => {
self.bump();
- return Ok(token::Le);
+ Ok(token::Le)
}
'<' => {
- return Ok(self.binop(token::Shl));
+ Ok(self.binop(token::Shl))
}
'-' => {
self.bump();
match self.ch.unwrap_or('\x00') {
_ => {
- return Ok(token::LArrow);
+ Ok(token::LArrow)
}
}
}
_ => {
- return Ok(token::Lt);
+ Ok(token::Lt)
}
}
}
match self.ch.unwrap_or('\x00') {
'=' => {
self.bump();
- return Ok(token::Ge);
+ Ok(token::Ge)
}
'>' => {
- return Ok(self.binop(token::Shr));
+ Ok(self.binop(token::Shr))
}
_ => {
- return Ok(token::Gt);
+ Ok(token::Gt)
}
}
}
};
self.bump(); // advance ch past token
let suffix = self.scan_optional_raw_name();
- return Ok(token::Literal(token::Char(id), suffix));
+ Ok(token::Literal(token::Char(id), suffix))
}
'b' => {
self.bump();
_ => unreachable!(), // Should have been a token::Ident above.
};
let suffix = self.scan_optional_raw_name();
- return Ok(token::Literal(lit, suffix));
+ Ok(token::Literal(lit, suffix))
}
'"' => {
let start_bpos = self.pos;
};
self.bump();
let suffix = self.scan_optional_raw_name();
- return Ok(token::Literal(token::Str_(id), suffix));
+ Ok(token::Literal(token::Str_(id), suffix))
}
'r' => {
let start_bpos = self.pos;
Symbol::intern("??")
};
let suffix = self.scan_optional_raw_name();
- return Ok(token::Literal(token::StrRaw(id, hash_count), suffix));
+ Ok(token::Literal(token::StrRaw(id, hash_count), suffix))
}
'-' => {
if self.nextch_is('>') {
self.bump();
self.bump();
- return Ok(token::RArrow);
+ Ok(token::RArrow)
} else {
- return Ok(self.binop(token::Minus));
+ Ok(self.binop(token::Minus))
}
}
'&' => {
if self.nextch_is('&') {
self.bump();
self.bump();
- return Ok(token::AndAnd);
+ Ok(token::AndAnd)
} else {
- return Ok(self.binop(token::And));
+ Ok(self.binop(token::And))
}
}
'|' => {
Some('|') => {
self.bump();
self.bump();
- return Ok(token::OrOr);
+ Ok(token::OrOr)
}
_ => {
- return Ok(self.binop(token::Or));
+ Ok(self.binop(token::Or))
}
}
}
'+' => {
- return Ok(self.binop(token::Plus));
+ Ok(self.binop(token::Plus))
}
'*' => {
- return Ok(self.binop(token::Star));
+ Ok(self.binop(token::Star))
}
'/' => {
- return Ok(self.binop(token::Slash));
+ Ok(self.binop(token::Slash))
}
'^' => {
- return Ok(self.binop(token::Caret));
+ Ok(self.binop(token::Caret))
}
'%' => {
- return Ok(self.binop(token::Percent));
+ Ok(self.binop(token::Percent))
}
c => {
let last_bpos = self.pos;
bpos,
"unknown start of token",
c);
- unicode_chars::check_for_substitution(&self, c, &mut err);
+ unicode_chars::check_for_substitution(self, c, &mut err);
self.fatal_errs.push(err);
Err(())
}
if self.ch_is('\n') {
self.bump();
}
- return val;
+ val
}
fn read_one_line_comment(&mut self) -> String {
let val = self.read_to_eol();
assert!((val.as_bytes()[0] == b'/' && val.as_bytes()[1] == b'/') ||
(val.as_bytes()[0] == b'#' && val.as_bytes()[1] == b'!'));
- return val;
+ val
}
fn consume_non_eol_whitespace(&mut self) {
Symbol::intern("?")
};
self.bump(); // advance ch past token
- return token::Byte(id);
+ token::Byte(id)
}
fn scan_byte_escape(&mut self, delim: char, below_0x7f_only: bool) -> bool {
Symbol::intern("??")
};
self.bump();
- return token::ByteStr(id);
+ token::ByteStr(id)
}
fn scan_raw_byte_string(&mut self) -> token::Lit {
self.bump();
}
self.bump();
- return token::ByteStrRaw(self.name_from_to(content_start_bpos, content_end_bpos),
- hash_count);
+ token::ByteStrRaw(self.name_from_to(content_start_bpos, content_end_bpos),
+ hash_count)
}
}
}
fn is_dec_digit(c: Option<char>) -> bool {
- return in_range(c, '0', '9');
+ in_range(c, '0', '9')
}
pub fn is_doc_comment(s: &str) -> bool {
parser.parse_inner_attributes()
}
-pub fn parse_crate_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
- -> PResult<'a, ast::Crate> {
+pub fn parse_crate_from_source_str(name: String, source: String, sess: &ParseSess)
+ -> PResult<ast::Crate> {
new_parser_from_source_str(sess, name, source).parse_crate_mod()
}
-pub fn parse_crate_attrs_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
- -> PResult<'a, Vec<ast::Attribute>> {
+pub fn parse_crate_attrs_from_source_str(name: String, source: String, sess: &ParseSess)
+ -> PResult<Vec<ast::Attribute>> {
new_parser_from_source_str(sess, name, source).parse_inner_attributes()
}
-pub fn parse_expr_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
- -> PResult<'a, P<ast::Expr>> {
+pub fn parse_expr_from_source_str(name: String, source: String, sess: &ParseSess)
+ -> PResult<P<ast::Expr>> {
new_parser_from_source_str(sess, name, source).parse_expr()
}
///
/// Returns `Ok(Some(item))` when successful, `Ok(None)` when no item was found, and`Err`
/// when a syntax error occurred.
-pub fn parse_item_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
- -> PResult<'a, Option<P<ast::Item>>> {
+pub fn parse_item_from_source_str(name: String, source: String, sess: &ParseSess)
+ -> PResult<Option<P<ast::Item>>> {
new_parser_from_source_str(sess, name, source).parse_item()
}
-pub fn parse_meta_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
- -> PResult<'a, ast::MetaItem> {
+pub fn parse_meta_from_source_str(name: String, source: String, sess: &ParseSess)
+ -> PResult<ast::MetaItem> {
new_parser_from_source_str(sess, name, source).parse_meta_item()
}
-pub fn parse_stmt_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
- -> PResult<'a, Option<ast::Stmt>> {
+pub fn parse_stmt_from_source_str(name: String, source: String, sess: &ParseSess)
+ -> PResult<Option<ast::Stmt>> {
new_parser_from_source_str(sess, name, source).parse_stmt()
}
-pub fn parse_stream_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
+pub fn parse_stream_from_source_str(name: String, source: String, sess: &ParseSess)
-> TokenStream {
filemap_to_stream(sess, sess.codemap().new_filemap(name, source))
}
// Create a new parser from a source string
-pub fn new_parser_from_source_str<'a>(sess: &'a ParseSess, name: String, source: String)
- -> Parser<'a> {
+pub fn new_parser_from_source_str(sess: &ParseSess, name: String, source: String)
+ -> Parser {
filemap_to_parser(sess, sess.codemap().new_filemap(name, source))
}
}
/// Given a filemap and config, return a parser
-pub fn filemap_to_parser<'a>(sess: &'a ParseSess, filemap: Rc<FileMap>, ) -> Parser<'a> {
+pub fn filemap_to_parser(sess: & ParseSess, filemap: Rc<FileMap>, ) -> Parser {
let end_pos = filemap.end_pos;
let mut parser = stream_to_parser(sess, filemap_to_stream(sess, filemap));
// must preserve old name for now, because quote! from the *existing*
// compiler expands into it
-pub fn new_parser_from_tts<'a>(sess: &'a ParseSess, tts: Vec<TokenTree>) -> Parser<'a> {
+pub fn new_parser_from_tts(sess: &ParseSess, tts: Vec<TokenTree>) -> Parser {
stream_to_parser(sess, tts.into_iter().collect())
}
panictry!(srdr.parse_all_token_trees())
}
-/// Given stream and the ParseSess, produce a parser
-pub fn stream_to_parser<'a>(sess: &'a ParseSess, stream: TokenStream) -> Parser<'a> {
+/// Given stream and the `ParseSess`, produce a parser
+pub fn stream_to_parser(sess: &ParseSess, stream: TokenStream) -> Parser {
Parser::new(sess, stream, None, false)
}
(c, 4)
}
'u' => {
- assert!(lit.as_bytes()[2] == b'{');
+ assert_eq!(lit.as_bytes()[2], b'{');
let idx = lit.find('}').unwrap();
let v = u32::from_str_radix(&lit[3..idx], 16).unwrap();
let c = char::from_u32(v).unwrap();
}
let mut chars = lit.char_indices().peekable();
- loop {
- match chars.next() {
- Some((i, c)) => {
- match c {
- '\\' => {
- let ch = chars.peek().unwrap_or_else(|| {
- panic!("{}", error(i))
- }).1;
-
- if ch == '\n' {
- eat(&mut chars);
- } else if ch == '\r' {
- chars.next();
- let ch = chars.peek().unwrap_or_else(|| {
- panic!("{}", error(i))
- }).1;
-
- if ch != '\n' {
- panic!("lexer accepted bare CR");
- }
- eat(&mut chars);
- } else {
- // otherwise, a normal escape
- let (c, n) = char_lit(&lit[i..]);
- for _ in 0..n - 1 { // we don't need to move past the first \
- chars.next();
- }
- res.push(c);
- }
- },
- '\r' => {
- let ch = chars.peek().unwrap_or_else(|| {
- panic!("{}", error(i))
- }).1;
+ while let Some((i, c)) = chars.next() {
+ match c {
+ '\\' => {
+ let ch = chars.peek().unwrap_or_else(|| {
+ panic!("{}", error(i))
+ }).1;
+
+ if ch == '\n' {
+ eat(&mut chars);
+ } else if ch == '\r' {
+ chars.next();
+ let ch = chars.peek().unwrap_or_else(|| {
+ panic!("{}", error(i))
+ }).1;
- if ch != '\n' {
- panic!("lexer accepted bare CR");
- }
+ if ch != '\n' {
+ panic!("lexer accepted bare CR");
+ }
+ eat(&mut chars);
+ } else {
+ // otherwise, a normal escape
+ let (c, n) = char_lit(&lit[i..]);
+ for _ in 0..n - 1 { // we don't need to move past the first \
chars.next();
- res.push('\n');
}
- c => res.push(c),
+ res.push(c);
}
},
- None => break
+ '\r' => {
+ let ch = chars.peek().unwrap_or_else(|| {
+ panic!("{}", error(i))
+ }).1;
+
+ if ch != '\n' {
+ panic!("lexer accepted bare CR");
+ }
+ chars.next();
+ res.push('\n');
+ }
+ c => res.push(c),
}
}
debug!("raw_str_lit: given {}", escape_default(lit));
let mut res = String::with_capacity(lit.len());
- // FIXME #8372: This could be a for-loop if it didn't borrow the iterator
let mut chars = lit.chars().peekable();
- loop {
- match chars.next() {
- Some(c) => {
- if c == '\r' {
- if *chars.peek().unwrap() != '\n' {
- panic!("lexer accepted bare CR");
- }
- chars.next();
- res.push('\n');
- } else {
- res.push(c);
- }
- },
- None => break
+ while let Some(c) = chars.next() {
+ if c == '\r' {
+ if *chars.peek().unwrap() != '\n' {
+ panic!("lexer accepted bare CR");
+ }
+ chars.next();
+ res.push('\n');
+ } else {
+ res.push(c);
}
}
if lit.len() == 1 {
(lit.as_bytes()[0], 1)
} else {
- assert!(lit.as_bytes()[0] == b'\\', err(0));
+ assert_eq!(lit.as_bytes()[0], b'\\', "{}", err(0));
let b = match lit.as_bytes()[1] {
b'"' => b'"',
b'n' => b'\n',
}
}
};
- return (b, 2);
+ (b, 2)
}
}
let error = |i| format!("lexer should have rejected {} at {}", lit, i);
/// Eat everything up to a non-whitespace
- fn eat<'a, I: Iterator<Item=(usize, u8)>>(it: &mut iter::Peekable<I>) {
+ fn eat<I: Iterator<Item=(usize, u8)>>(it: &mut iter::Peekable<I>) {
loop {
match it.peek().map(|x| x.1) {
Some(b' ') | Some(b'\n') | Some(b'\r') | Some(b'\t') => {
if let Some(err) = err {
err!(diag, |span, diag| diag.span_err(span, err));
}
- return filtered_float_lit(Symbol::intern(&s), Some(suf), diag)
+ return filtered_float_lit(Symbol::intern(s), Some(suf), diag)
}
}
if !self.obsolete_set.contains(&kind) &&
(error || self.sess.span_diagnostic.can_emit_warnings) {
- err.note(&format!("{}", desc));
+ err.note(desc);
self.obsolete_set.insert(kind);
}
err.emit();
fn next_desugared(&mut self) -> TokenAndSpan {
let (sp, name) = match self.next() {
TokenAndSpan { sp, tok: token::DocComment(name) } => (sp, name),
- tok @ _ => return tok,
+ tok => return tok,
};
let stripped = strip_doc_comment_decoration(&name.as_str());
}
impl Error {
- pub fn span_err<'a>(self, sp: Span, handler: &'a errors::Handler) -> DiagnosticBuilder<'a> {
+ pub fn span_err(self, sp: Span, handler: &errors::Handler) -> DiagnosticBuilder {
match self {
Error::FileNotFoundForModule { ref mod_name,
ref default_path,
}
fn next_tok(&mut self) -> TokenAndSpan {
- let mut next = match self.desugar_doc_comments {
- true => self.token_cursor.next_desugared(),
- false => self.token_cursor.next(),
+ let mut next = if self.desugar_doc_comments {
+ self.token_cursor.next_desugared()
+ } else {
+ self.token_cursor.next()
};
if next.sp == syntax_pos::DUMMY_SP {
next.sp = self.prev_span;
// This might be a sign we need a connect method on Iterator.
let b = i.next()
.map_or("".to_string(), |t| t.to_string());
- i.enumerate().fold(b, |mut b, (i, ref a)| {
+ i.enumerate().fold(b, |mut b, (i, a)| {
if tokens.len() > 2 && i == tokens.len() - 2 {
b.push_str(", or ");
} else if tokens.len() == 2 && i == tokens.len() - 2 {
token::CloseDelim(..) | token::Eof => break,
_ => {}
};
- match sep.sep {
- Some(ref t) => {
- if first {
- first = false;
- } else {
- if let Err(e) = self.expect(t) {
- fe(e);
- break;
- }
+ if let Some(ref t) = sep.sep {
+ if first {
+ first = false;
+ } else {
+ if let Err(e) = self.expect(t) {
+ fe(e);
+ break;
}
}
- _ => ()
}
if sep.trailing_sep_allowed && kets.iter().any(|k| self.check(k)) {
break;
let sum_span = ty.span.to(self.prev_span);
let mut err = struct_span_err!(self.sess.span_diagnostic, sum_span, E0178,
- "expected a path on the left-hand side of `+`, not `{}`", pprust::ty_to_string(&ty));
+ "expected a path on the left-hand side of `+`, not `{}`", pprust::ty_to_string(ty));
match ty.node {
TyKind::Rptr(ref lifetime, ref mut_ty) => {
pub fn is_named_argument(&mut self) -> bool {
let offset = match self.token {
- token::BinOp(token::And) => 1,
+ token::BinOp(token::And) |
token::AndAnd => 1,
_ if self.token.is_keyword(keywords::Mut) => 1,
_ => 0
let attrs = self.parse_outer_attributes()?;
let pats = self.parse_pats()?;
- let mut guard = None;
- if self.eat_keyword(keywords::If) {
- guard = Some(self.parse_expr()?);
- }
+ let guard = if self.eat_keyword(keywords::If) {
+ Some(self.parse_expr()?)
+ } else {
+ None
+ };
self.expect(&token::FatArrow)?;
let expr = self.parse_expr_res(RESTRICTION_STMT_EXPR, None)?;
let lo = self.span;
let pat = self.parse_pat()?;
- let mut ty = None;
- if self.eat(&token::Colon) {
- ty = Some(self.parse_ty()?);
- }
+ let ty = if self.eat(&token::Colon) {
+ Some(self.parse_ty()?)
+ } else {
+ None
+ };
let init = self.parse_initializer()?;
Ok(P(ast::Local {
ty: ty,
},
None => {
let unused_attrs = |attrs: &[_], s: &mut Self| {
- if attrs.len() > 0 {
+ if !attrs.is_empty() {
if s.prev_token_kind == PrevTokenKind::DocComment {
s.span_fatal_err(s.prev_span, Error::UselessDocComment).emit();
} else {
self.expect(&token::Not)?;
}
- self.complain_if_pub_macro(&vis, prev_span);
+ self.complain_if_pub_macro(vis, prev_span);
// eat a matched-delimiter token tree:
*at_end = true;
}
}
} else {
- match polarity {
- ast::ImplPolarity::Negative => {
- // This is a negated type implementation
- // `impl !MyType {}`, which is not allowed.
- self.span_err(neg_span, "inherent implementation can't be negated");
- },
- _ => {}
+ if polarity == ast::ImplPolarity::Negative {
+ // This is a negated type implementation
+ // `impl !MyType {}`, which is not allowed.
+ self.span_err(neg_span, "inherent implementation can't be negated");
}
None
};
let path_span = self.prev_span;
let help_msg = format!("make this visible only to module `{}` with `in`:", path);
self.expect(&token::CloseDelim(token::Paren))?; // `)`
- let mut err = self.span_fatal_help(path_span, &msg, &suggestion);
+ let mut err = self.span_fatal_help(path_span, msg, suggestion);
err.span_suggestion(path_span, &help_msg, format!("in {}", path));
err.emit(); // emit diagnostic, but continue with public visibility
}
pub fn len(self) -> usize {
if self == NoDelim { 0 } else { 1 }
}
+
+ pub fn is_empty(self) -> bool {
+ self == NoDelim
+ }
}
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)]
pub fn can_begin_expr(&self) -> bool {
match *self {
Ident(ident) => ident_can_begin_expr(ident), // value name or keyword
- OpenDelim(..) => true, // tuple, array or block
- Literal(..) => true, // literal
- Not => true, // operator not
- BinOp(Minus) => true, // unary minus
- BinOp(Star) => true, // dereference
- BinOp(Or) | OrOr => true, // closure
- BinOp(And) => true, // reference
- AndAnd => true, // double reference
- DotDot | DotDotDot => true, // range notation
- Lt | BinOp(Shl) => true, // associated path
- ModSep => true, // global path
+ OpenDelim(..) | // tuple, array or block
+ Literal(..) | // literal
+ Not | // operator not
+ BinOp(Minus) | // unary minus
+ BinOp(Star) | // dereference
+ BinOp(Or) | OrOr | // closure
+ BinOp(And) | // reference
+ AndAnd | // double reference
+ DotDot | DotDotDot | // range notation
+ Lt | BinOp(Shl) | // associated path
+ ModSep | // global path
Pound => true, // expression attributes
Interpolated(ref nt) => match **nt {
NtIdent(..) | NtExpr(..) | NtBlock(..) | NtPath(..) => true,
pub fn can_begin_type(&self) -> bool {
match *self {
Ident(ident) => ident_can_begin_type(ident), // type name or keyword
- OpenDelim(Paren) => true, // tuple
- OpenDelim(Bracket) => true, // array
- Underscore => true, // placeholder
- Not => true, // never
- BinOp(Star) => true, // raw pointer
- BinOp(And) => true, // reference
- AndAnd => true, // double reference
- Question => true, // maybe bound in trait object
- Lifetime(..) => true, // lifetime bound in trait object
- Lt | BinOp(Shl) => true, // associated path
+ OpenDelim(Paren) | // tuple
+ OpenDelim(Bracket) | // array
+ Underscore | // placeholder
+ Not | // never
+ BinOp(Star) | // raw pointer
+ BinOp(And) | // reference
+ AndAnd | // double reference
+ Question | // maybe bound in trait object
+ Lifetime(..) | // lifetime bound in trait object
+ Lt | BinOp(Shl) | // associated path
ModSep => true, // global path
Interpolated(ref nt) => match **nt {
NtIdent(..) | NtTy(..) | NtPath(..) => true,
//! between using 'left' and 'right' terms to denote the wrapped-to-ring-buffer
//! and point-in-infinite-stream senses freely.
//!
-//! There is a parallel ring buffer, 'size', that holds the calculated size of
+//! There is a parallel ring buffer, `size`, that holds the calculated size of
//! each token. Why calculated? Because for Begin/End pairs, the "size"
//! includes everything between the pair. That is, the "size" of Begin is
//! actually the sum of the sizes of everything between Begin and the paired
-//! End that follows. Since that is arbitrarily far in the future, 'size' is
+//! End that follows. Since that is arbitrarily far in the future, `size` is
//! being rewritten regularly while the printer runs; in fact most of the
-//! machinery is here to work out 'size' entries on the fly (and give up when
+//! machinery is here to work out `size` entries on the fly (and give up when
//! they're so obviously over-long that "infinity" is a good enough
//! approximation for purposes of line breaking).
//!
//! The "input side" of the printer is managed as an abstract process called
-//! SCAN, which uses 'scan_stack', to manage calculating 'size'. SCAN is, in
+//! SCAN, which uses `scan_stack`, to manage calculating `size`. SCAN is, in
//! other words, the process of calculating 'size' entries.
//!
//! The "output side" of the printer is managed by an abstract process called
-//! PRINT, which uses 'print_stack', 'margin' and 'space' to figure out what to
+//! PRINT, which uses `print_stack`, `margin` and `space` to figure out what to
//! do with each token/size pair it consumes as it goes. It's trying to consume
//! the entire buffered window, but can't output anything until the size is >=
//! 0 (sizes are set to negative while they're pending calculation).
pub fn advance_right(&mut self) {
self.right += 1;
self.right %= self.buf_len;
- assert!(self.right != self.left);
+ assert_ne!(self.right, self.left);
}
pub fn advance_left(&mut self) -> io::Result<()> {
debug!("advance_left Vec<{},{}>, sizeof({})={}", self.left, self.right,
token::CloseDelim(token::Bracket) => "]".to_string(),
token::OpenDelim(token::Brace) => "{".to_string(),
token::CloseDelim(token::Brace) => "}".to_string(),
- token::OpenDelim(token::NoDelim) => " ".to_string(),
+ token::OpenDelim(token::NoDelim) |
token::CloseDelim(token::NoDelim) => " ".to_string(),
token::Pound => "#".to_string(),
token::Dollar => "$".to_string(),
let mut out = match lit {
token::Byte(b) => format!("b'{}'", b),
token::Char(c) => format!("'{}'", c),
- token::Float(c) => c.to_string(),
+ token::Float(c) |
token::Integer(c) => c.to_string(),
token::Str_(s) => format!("\"{}\"", s),
token::StrRaw(s, n) => format!("r{delim}\"{string}\"{delim}",
token::Shebang(s) => format!("/* shebang: {}*/", s),
token::Interpolated(ref nt) => match **nt {
- token::NtExpr(ref e) => expr_to_string(&e),
- token::NtMeta(ref e) => meta_item_to_string(&e),
- token::NtTy(ref e) => ty_to_string(&e),
- token::NtPath(ref e) => path_to_string(&e),
- token::NtItem(ref e) => item_to_string(&e),
- token::NtBlock(ref e) => block_to_string(&e),
- token::NtStmt(ref e) => stmt_to_string(&e),
- token::NtPat(ref e) => pat_to_string(&e),
+ token::NtExpr(ref e) => expr_to_string(e),
+ token::NtMeta(ref e) => meta_item_to_string(e),
+ token::NtTy(ref e) => ty_to_string(e),
+ token::NtPath(ref e) => path_to_string(e),
+ token::NtItem(ref e) => item_to_string(e),
+ token::NtBlock(ref e) => block_to_string(e),
+ token::NtStmt(ref e) => stmt_to_string(e),
+ token::NtPat(ref e) => pat_to_string(e),
token::NtIdent(ref e) => ident_to_string(e.node),
token::NtTT(ref tree) => tt_to_string(tree.clone()),
- token::NtArm(ref e) => arm_to_string(&e),
- token::NtImplItem(ref e) => impl_item_to_string(&e),
- token::NtTraitItem(ref e) => trait_item_to_string(&e),
- token::NtGenerics(ref e) => generics_to_string(&e),
- token::NtWhereClause(ref e) => where_clause_to_string(&e),
- token::NtArg(ref e) => arg_to_string(&e),
- token::NtVis(ref e) => vis_to_string(&e),
+ token::NtArm(ref e) => arm_to_string(e),
+ token::NtImplItem(ref e) => impl_item_to_string(e),
+ token::NtTraitItem(ref e) => trait_item_to_string(e),
+ token::NtGenerics(ref e) => generics_to_string(e),
+ token::NtWhereClause(ref e) => where_clause_to_string(e),
+ token::NtArg(ref e) => arg_to_string(e),
+ token::NtVis(ref e) => vis_to_string(e),
}
}
}
let mut result = None;
- if let &Some(ref lits) = self.literals()
- {
+ if let Some(ref lits) = *self.literals() {
while cur_lit < lits.len() {
let ltrl = (*lits)[cur_lit].clone();
if ltrl.pos > pos { break; }
fn print_literal(&mut self, lit: &ast::Lit) -> io::Result<()> {
self.maybe_print_comment(lit.span.lo)?;
- match self.next_lit(lit.span.lo) {
- Some(ref ltrl) => {
- return word(self.writer(), &(*ltrl).lit);
- }
- _ => ()
+ if let Some(ref ltrl) = self.next_lit(lit.span.lo) {
+ return word(self.writer(), &(*ltrl).lit);
}
match lit.node {
ast::LitKind::Str(st, style) => self.print_string(&st.as_str(), style),
self.popen()?;
self.commasep(Consistent,
&items[..],
- |s, i| s.print_meta_list_item(&i))?;
+ |s, i| s.print_meta_list_item(i))?;
self.pclose()?;
}
}
pub fn commasep_exprs(&mut self, b: Breaks,
exprs: &[P<ast::Expr>]) -> io::Result<()> {
- self.commasep_cmnt(b, exprs, |s, e| s.print_expr(&e), |e| e.span)
+ self.commasep_cmnt(b, exprs, |s, e| s.print_expr(e), |e| e.span)
}
pub fn print_mod(&mut self, _mod: &ast::Mod,
attrs: &[ast::Attribute]) -> io::Result<()> {
self.print_inner_attributes(attrs)?;
for item in &_mod.items {
- self.print_item(&item)?;
+ self.print_item(item)?;
}
Ok(())
}
match ty.node {
ast::TyKind::Slice(ref ty) => {
word(&mut self.s, "[")?;
- self.print_type(&ty)?;
+ self.print_type(ty)?;
word(&mut self.s, "]")?;
}
ast::TyKind::Ptr(ref mt) => {
ast::TyKind::Tup(ref elts) => {
self.popen()?;
self.commasep(Inconsistent, &elts[..],
- |s, ty| s.print_type(&ty))?;
+ |s, ty| s.print_type(ty))?;
if elts.len() == 1 {
word(&mut self.s, ",")?;
}
}
ast::TyKind::Paren(ref typ) => {
self.popen()?;
- self.print_type(&typ)?;
+ self.print_type(typ)?;
self.pclose()?;
}
ast::TyKind::BareFn(ref f) => {
}
ast::TyKind::Array(ref ty, ref v) => {
word(&mut self.s, "[")?;
- self.print_type(&ty)?;
+ self.print_type(ty)?;
word(&mut self.s, "; ")?;
- self.print_expr(&v)?;
+ self.print_expr(v)?;
word(&mut self.s, "]")?;
}
ast::TyKind::Typeof(ref e) => {
word(&mut self.s, "typeof(")?;
- self.print_expr(&e)?;
+ self.print_expr(e)?;
word(&mut self.s, ")")?;
}
ast::TyKind::Infer => {
}
self.print_ident(item.ident)?;
self.word_space(":")?;
- self.print_type(&t)?;
+ self.print_type(t)?;
word(&mut self.s, ";")?;
self.end()?; // end the head-ibox
self.end() // end the outer cbox
self.head(&visibility_qualified(&item.vis, "extern crate"))?;
if let Some(p) = *optional_path {
let val = p.as_str();
- if val.contains("-") {
+ if val.contains('-') {
self.print_string(&val, ast::StrStyle::Cooked)?;
} else {
self.print_name(p)?;
}
ast::ItemKind::Use(ref vp) => {
self.head(&visibility_qualified(&item.vis, "use"))?;
- self.print_view_path(&vp)?;
+ self.print_view_path(vp)?;
word(&mut self.s, ";")?;
self.end()?; // end inner head-block
self.end()?; // end outer head-block
}
self.print_ident(item.ident)?;
self.word_space(":")?;
- self.print_type(&ty)?;
+ self.print_type(ty)?;
space(&mut self.s)?;
self.end()?; // end the head-ibox
self.word_space("=")?;
- self.print_expr(&expr)?;
+ self.print_expr(expr)?;
word(&mut self.s, ";")?;
self.end()?; // end the outer cbox
}
self.head(&visibility_qualified(&item.vis, "const"))?;
self.print_ident(item.ident)?;
self.word_space(":")?;
- self.print_type(&ty)?;
+ self.print_type(ty)?;
space(&mut self.s)?;
self.end()?; // end the head-ibox
self.word_space("=")?;
- self.print_expr(&expr)?;
+ self.print_expr(expr)?;
word(&mut self.s, ";")?;
self.end()?; // end the outer cbox
}
&item.vis
)?;
word(&mut self.s, " ")?;
- self.print_block_with_attrs(&body, &item.attrs)?;
+ self.print_block_with_attrs(body, &item.attrs)?;
}
ast::ItemKind::Mod(ref _mod) => {
self.head(&visibility_qualified(&item.vis, "mod"))?;
self.print_where_clause(¶ms.where_clause)?;
space(&mut self.s)?;
self.word_space("=")?;
- self.print_type(&ty)?;
+ self.print_type(ty)?;
word(&mut self.s, ";")?;
self.end()?; // end the outer ibox
}
}
ast::ItemKind::Struct(ref struct_def, ref generics) => {
self.head(&visibility_qualified(&item.vis, "struct"))?;
- self.print_struct(&struct_def, generics, item.ident, item.span, true)?;
+ self.print_struct(struct_def, generics, item.ident, item.span, true)?;
}
ast::ItemKind::Union(ref struct_def, ref generics) => {
self.head(&visibility_qualified(&item.vis, "union"))?;
- self.print_struct(&struct_def, generics, item.ident, item.span, true)?;
+ self.print_struct(struct_def, generics, item.ident, item.span, true)?;
}
ast::ItemKind::DefaultImpl(unsafety, ref trait_ref) => {
self.head("")?;
space(&mut self.s)?;
}
- match polarity {
- ast::ImplPolarity::Negative => {
- word(&mut self.s, "!")?;
- },
- _ => {}
+ if polarity == ast::ImplPolarity::Negative {
+ word(&mut self.s, "!")?;
}
if let Some(ref t) = *opt_trait {
self.word_space("for")?;
}
- self.print_type(&ty)?;
+ self.print_type(ty)?;
self.print_where_clause(&generics.where_clause)?;
space(&mut self.s)?;
Some(ref d) => {
space(&mut self.s)?;
self.word_space("=")?;
- self.print_expr(&d)
+ self.print_expr(d)
}
_ => Ok(())
}
self.print_outer_attributes(&ti.attrs)?;
match ti.node {
ast::TraitItemKind::Const(ref ty, ref default) => {
- self.print_associated_const(ti.ident, &ty,
+ self.print_associated_const(ti.ident, ty,
default.as_ref().map(|expr| &**expr),
&ast::Visibility::Inherited)?;
}
self.print_defaultness(ii.defaultness)?;
match ii.node {
ast::ImplItemKind::Const(ref ty, ref expr) => {
- self.print_associated_const(ii.ident, &ty, Some(&expr), &ii.vis)?;
+ self.print_associated_const(ii.ident, ty, Some(expr), &ii.vis)?;
}
ast::ImplItemKind::Method(ref sig, ref body) => {
self.head("")?;
self.word_nbsp("let")?;
self.ibox(INDENT_UNIT)?;
- self.print_local_decl(&loc)?;
+ self.print_local_decl(loc)?;
self.end()?;
if let Some(ref init) = loc.init {
self.nbsp()?;
self.word_space("=")?;
- self.print_expr(&init)?;
+ self.print_expr(init)?;
}
word(&mut self.s, ";")?;
self.end()?;
}
- ast::StmtKind::Item(ref item) => self.print_item(&item)?,
+ ast::StmtKind::Item(ref item) => self.print_item(item)?,
ast::StmtKind::Expr(ref expr) => {
self.space_if_not_bol()?;
- self.print_expr_outer_attr_style(&expr, false)?;
+ self.print_expr_outer_attr_style(expr, false)?;
if parse::classify::expr_requires_semi_to_be_stmt(expr) {
word(&mut self.s, ";")?;
}
}
ast::StmtKind::Semi(ref expr) => {
self.space_if_not_bol()?;
- self.print_expr_outer_attr_style(&expr, false)?;
+ self.print_expr_outer_attr_style(expr, false)?;
word(&mut self.s, ";")?;
}
ast::StmtKind::Mac(ref mac) => {
let (ref mac, style, ref attrs) = **mac;
self.space_if_not_bol()?;
- self.print_outer_attributes(&attrs)?;
+ self.print_outer_attributes(attrs)?;
let delim = match style {
ast::MacStmtStyle::Braces => token::Brace,
_ => token::Paren
};
- self.print_mac(&mac, delim)?;
+ self.print_mac(mac, delim)?;
if style == ast::MacStmtStyle::Semicolon {
word(&mut self.s, ";")?;
}
ast::StmtKind::Expr(ref expr) if i == blk.stmts.len() - 1 => {
self.maybe_print_comment(st.span.lo)?;
self.space_if_not_bol()?;
- self.print_expr_outer_attr_style(&expr, false)?;
+ self.print_expr_outer_attr_style(expr, false)?;
self.maybe_print_trailing_comment(expr.span, Some(blk.span.hi))?;
}
_ => self.print_stmt(st)?,
self.cbox(INDENT_UNIT - 1)?;
self.ibox(0)?;
word(&mut self.s, " else if ")?;
- self.print_expr(&i)?;
+ self.print_expr(i)?;
space(&mut self.s)?;
- self.print_block(&then)?;
+ self.print_block(then)?;
self.print_else(e.as_ref().map(|e| &**e))
}
// "another else-if-let"
self.cbox(INDENT_UNIT - 1)?;
self.ibox(0)?;
word(&mut self.s, " else if let ")?;
- self.print_pat(&pat)?;
+ self.print_pat(pat)?;
space(&mut self.s)?;
self.word_space("=")?;
- self.print_expr(&expr)?;
+ self.print_expr(expr)?;
space(&mut self.s)?;
- self.print_block(&then)?;
+ self.print_block(then)?;
self.print_else(e.as_ref().map(|e| &**e))
}
// "final else"
self.cbox(INDENT_UNIT - 1)?;
self.ibox(0)?;
word(&mut self.s, " else ")?;
- self.print_block(&b)
+ self.print_block(b)
}
// BLEAH, constraints would be great here
_ => {
binop: ast::BinOp) -> bool {
match sub_expr.node {
ast::ExprKind::Binary(ref sub_op, _, _) => {
- if AssocOp::from_ast_binop(sub_op.node).precedence() <
- AssocOp::from_ast_binop(binop.node).precedence() {
- true
- } else {
- false
- }
+ AssocOp::from_ast_binop(sub_op.node).precedence() <
+ AssocOp::from_ast_binop(binop.node).precedence()
}
_ => true
}
space(&mut self.s)?;
}
word(&mut self.s, "..")?;
- self.print_expr(&expr)?;
+ self.print_expr(expr)?;
self.end()?;
}
_ => if !fields.is_empty() {
if !tys.is_empty() {
word(&mut self.s, "::<")?;
self.commasep(Inconsistent, tys,
- |s, ty| s.print_type(&ty))?;
+ |s, ty| s.print_type(ty))?;
word(&mut self.s, ">")?;
}
self.print_call_post(base_args)
self.print_expr_vec(&exprs[..], attrs)?;
}
ast::ExprKind::Repeat(ref element, ref count) => {
- self.print_expr_repeat(&element, &count, attrs)?;
+ self.print_expr_repeat(element, count, attrs)?;
}
ast::ExprKind::Struct(ref path, ref fields, ref wth) => {
self.print_expr_struct(path, &fields[..], wth, attrs)?;
self.print_expr_tup(&exprs[..], attrs)?;
}
ast::ExprKind::Call(ref func, ref args) => {
- self.print_expr_call(&func, &args[..])?;
+ self.print_expr_call(func, &args[..])?;
}
ast::ExprKind::MethodCall(ident, ref tys, ref args) => {
self.print_expr_method_call(ident, &tys[..], &args[..])?;
}
ast::ExprKind::Binary(op, ref lhs, ref rhs) => {
- self.print_expr_binary(op, &lhs, &rhs)?;
+ self.print_expr_binary(op, lhs, rhs)?;
}
ast::ExprKind::Unary(op, ref expr) => {
- self.print_expr_unary(op, &expr)?;
+ self.print_expr_unary(op, expr)?;
}
ast::ExprKind::AddrOf(m, ref expr) => {
- self.print_expr_addr_of(m, &expr)?;
+ self.print_expr_addr_of(m, expr)?;
}
ast::ExprKind::Lit(ref lit) => {
- self.print_literal(&lit)?;
+ self.print_literal(lit)?;
}
ast::ExprKind::Cast(ref expr, ref ty) => {
if let ast::ExprKind::Cast(..) = expr.node {
- self.print_expr(&expr)?;
+ self.print_expr(expr)?;
} else {
- self.print_expr_maybe_paren(&expr)?;
+ self.print_expr_maybe_paren(expr)?;
}
space(&mut self.s)?;
self.word_space("as")?;
- self.print_type(&ty)?;
+ self.print_type(ty)?;
}
ast::ExprKind::Type(ref expr, ref ty) => {
- self.print_expr(&expr)?;
+ self.print_expr(expr)?;
self.word_space(":")?;
- self.print_type(&ty)?;
+ self.print_type(ty)?;
}
ast::ExprKind::If(ref test, ref blk, ref elseopt) => {
- self.print_if(&test, &blk, elseopt.as_ref().map(|e| &**e))?;
+ self.print_if(test, blk, elseopt.as_ref().map(|e| &**e))?;
}
ast::ExprKind::IfLet(ref pat, ref expr, ref blk, ref elseopt) => {
- self.print_if_let(&pat, &expr, &blk, elseopt.as_ref().map(|e| &**e))?;
+ self.print_if_let(pat, expr, blk, elseopt.as_ref().map(|e| &**e))?;
}
ast::ExprKind::While(ref test, ref blk, opt_ident) => {
if let Some(ident) = opt_ident {
self.word_space(":")?;
}
self.head("while")?;
- self.print_expr(&test)?;
+ self.print_expr(test)?;
space(&mut self.s)?;
- self.print_block_with_attrs(&blk, attrs)?;
+ self.print_block_with_attrs(blk, attrs)?;
}
ast::ExprKind::WhileLet(ref pat, ref expr, ref blk, opt_ident) => {
if let Some(ident) = opt_ident {
self.word_space(":")?;
}
self.head("while let")?;
- self.print_pat(&pat)?;
+ self.print_pat(pat)?;
space(&mut self.s)?;
self.word_space("=")?;
- self.print_expr(&expr)?;
+ self.print_expr(expr)?;
space(&mut self.s)?;
- self.print_block_with_attrs(&blk, attrs)?;
+ self.print_block_with_attrs(blk, attrs)?;
}
ast::ExprKind::ForLoop(ref pat, ref iter, ref blk, opt_ident) => {
if let Some(ident) = opt_ident {
self.word_space(":")?;
}
self.head("for")?;
- self.print_pat(&pat)?;
+ self.print_pat(pat)?;
space(&mut self.s)?;
self.word_space("in")?;
- self.print_expr(&iter)?;
+ self.print_expr(iter)?;
space(&mut self.s)?;
- self.print_block_with_attrs(&blk, attrs)?;
+ self.print_block_with_attrs(blk, attrs)?;
}
ast::ExprKind::Loop(ref blk, opt_ident) => {
if let Some(ident) = opt_ident {
}
self.head("loop")?;
space(&mut self.s)?;
- self.print_block_with_attrs(&blk, attrs)?;
+ self.print_block_with_attrs(blk, attrs)?;
}
ast::ExprKind::Match(ref expr, ref arms) => {
self.cbox(INDENT_UNIT)?;
self.ibox(4)?;
self.word_nbsp("match")?;
- self.print_expr(&expr)?;
+ self.print_expr(expr)?;
space(&mut self.s)?;
self.bopen()?;
self.print_inner_attributes_no_trailing_hardbreak(attrs)?;
ast::ExprKind::Closure(capture_clause, ref decl, ref body, _) => {
self.print_capture_clause(capture_clause)?;
- self.print_fn_block_args(&decl)?;
+ self.print_fn_block_args(decl)?;
space(&mut self.s)?;
self.print_expr(body)?;
self.end()?; // need to close a box
self.cbox(INDENT_UNIT)?;
// head-box, will be closed by print-block after {
self.ibox(0)?;
- self.print_block_with_attrs(&blk, attrs)?;
+ self.print_block_with_attrs(blk, attrs)?;
}
ast::ExprKind::Assign(ref lhs, ref rhs) => {
- self.print_expr(&lhs)?;
+ self.print_expr(lhs)?;
space(&mut self.s)?;
self.word_space("=")?;
- self.print_expr(&rhs)?;
+ self.print_expr(rhs)?;
}
ast::ExprKind::AssignOp(op, ref lhs, ref rhs) => {
- self.print_expr(&lhs)?;
+ self.print_expr(lhs)?;
space(&mut self.s)?;
word(&mut self.s, op.node.to_string())?;
self.word_space("=")?;
- self.print_expr(&rhs)?;
+ self.print_expr(rhs)?;
}
ast::ExprKind::Field(ref expr, id) => {
- self.print_expr(&expr)?;
+ self.print_expr(expr)?;
word(&mut self.s, ".")?;
self.print_ident(id.node)?;
}
ast::ExprKind::TupField(ref expr, id) => {
- self.print_expr(&expr)?;
+ self.print_expr(expr)?;
word(&mut self.s, ".")?;
self.print_usize(id.node)?;
}
ast::ExprKind::Index(ref expr, ref index) => {
- self.print_expr(&expr)?;
+ self.print_expr(expr)?;
word(&mut self.s, "[")?;
- self.print_expr(&index)?;
+ self.print_expr(index)?;
word(&mut self.s, "]")?;
}
ast::ExprKind::Range(ref start, ref end, limits) => {
- if let &Some(ref e) = start {
- self.print_expr(&e)?;
+ if let Some(ref e) = *start {
+ self.print_expr(e)?;
}
if limits == ast::RangeLimits::HalfOpen {
word(&mut self.s, "..")?;
} else {
word(&mut self.s, "...")?;
}
- if let &Some(ref e) = end {
- self.print_expr(&e)?;
+ if let Some(ref e) = *end {
+ self.print_expr(e)?;
}
}
ast::ExprKind::Path(None, ref path) => {
}
ast::ExprKind::Ret(ref result) => {
word(&mut self.s, "return")?;
- match *result {
- Some(ref expr) => {
- word(&mut self.s, " ")?;
- self.print_expr(&expr)?;
- }
- _ => ()
+ if let Some(ref expr) = *result {
+ word(&mut self.s, " ")?;
+ self.print_expr(expr)?;
}
}
ast::ExprKind::InlineAsm(ref a) => {
self.commasep(Inconsistent, &a.inputs, |s, &(co, ref o)| {
s.print_string(&co.as_str(), ast::StrStyle::Cooked)?;
s.popen()?;
- s.print_expr(&o)?;
+ s.print_expr(o)?;
s.pclose()?;
Ok(())
})?;
ast::ExprKind::Paren(ref e) => {
self.popen()?;
self.print_inner_attributes_inline(attrs)?;
- self.print_expr(&e)?;
+ self.print_expr(e)?;
self.pclose()?;
},
ast::ExprKind::Try(ref e) => {
ast::ExprKind::Catch(ref blk) => {
self.head("do catch")?;
space(&mut self.s)?;
- self.print_block_with_attrs(&blk, attrs)?
+ self.print_block_with_attrs(blk, attrs)?
}
}
self.ann.post(self, NodeExpr(expr))?;
self.print_pat(&loc.pat)?;
if let Some(ref ty) = loc.ty {
self.word_space(":")?;
- self.print_type(&ty)?;
+ self.print_type(ty)?;
}
Ok(())
}
space(&mut self.s)?;
self.word_space("as")?;
let depth = path.segments.len() - qself.position;
- self.print_path(&path, false, depth, false)?;
+ self.print_path(path, false, depth, false)?;
}
word(&mut self.s, ">")?;
word(&mut self.s, "::")?;
self.commasep(
Inconsistent,
&data.types,
- |s, ty| s.print_type(&ty))?;
+ |s, ty| s.print_type(ty))?;
comma = true;
}
self.commasep(
Inconsistent,
&data.inputs,
- |s, ty| s.print_type(&ty))?;
+ |s, ty| s.print_type(ty))?;
word(&mut self.s, ")")?;
if let Some(ref ty) = data.output {
self.space_if_not_bol()?;
self.word_space("->")?;
- self.print_type(&ty)?;
+ self.print_type(ty)?;
}
}
}
self.print_ident(path1.node)?;
if let Some(ref p) = *sub {
word(&mut self.s, "@")?;
- self.print_pat(&p)?;
+ self.print_pat(p)?;
}
}
PatKind::TupleStruct(ref path, ref elts, ddpos) => {
self.print_path(path, true, 0, false)?;
self.popen()?;
if let Some(ddpos) = ddpos {
- self.commasep(Inconsistent, &elts[..ddpos], |s, p| s.print_pat(&p))?;
+ self.commasep(Inconsistent, &elts[..ddpos], |s, p| s.print_pat(p))?;
if ddpos != 0 {
self.word_space(",")?;
}
word(&mut self.s, "..")?;
if ddpos != elts.len() {
word(&mut self.s, ",")?;
- self.commasep(Inconsistent, &elts[ddpos..], |s, p| s.print_pat(&p))?;
+ self.commasep(Inconsistent, &elts[ddpos..], |s, p| s.print_pat(p))?;
}
} else {
- self.commasep(Inconsistent, &elts[..], |s, p| s.print_pat(&p))?;
+ self.commasep(Inconsistent, &elts[..], |s, p| s.print_pat(p))?;
}
self.pclose()?;
}
PatKind::Tuple(ref elts, ddpos) => {
self.popen()?;
if let Some(ddpos) = ddpos {
- self.commasep(Inconsistent, &elts[..ddpos], |s, p| s.print_pat(&p))?;
+ self.commasep(Inconsistent, &elts[..ddpos], |s, p| s.print_pat(p))?;
if ddpos != 0 {
self.word_space(",")?;
}
word(&mut self.s, "..")?;
if ddpos != elts.len() {
word(&mut self.s, ",")?;
- self.commasep(Inconsistent, &elts[ddpos..], |s, p| s.print_pat(&p))?;
+ self.commasep(Inconsistent, &elts[ddpos..], |s, p| s.print_pat(p))?;
}
} else {
- self.commasep(Inconsistent, &elts[..], |s, p| s.print_pat(&p))?;
+ self.commasep(Inconsistent, &elts[..], |s, p| s.print_pat(p))?;
if elts.len() == 1 {
word(&mut self.s, ",")?;
}
}
PatKind::Box(ref inner) => {
word(&mut self.s, "box ")?;
- self.print_pat(&inner)?;
+ self.print_pat(inner)?;
}
PatKind::Ref(ref inner, mutbl) => {
word(&mut self.s, "&")?;
if mutbl == ast::Mutability::Mutable {
word(&mut self.s, "mut ")?;
}
- self.print_pat(&inner)?;
+ self.print_pat(inner)?;
}
PatKind::Lit(ref e) => self.print_expr(&**e)?,
PatKind::Range(ref begin, ref end, ref end_kind) => {
- self.print_expr(&begin)?;
+ self.print_expr(begin)?;
space(&mut self.s)?;
match *end_kind {
RangeEnd::Included => word(&mut self.s, "...")?,
RangeEnd::Excluded => word(&mut self.s, "..")?,
}
- self.print_expr(&end)?;
+ self.print_expr(end)?;
}
PatKind::Slice(ref before, ref slice, ref after) => {
word(&mut self.s, "[")?;
self.commasep(Inconsistent,
&before[..],
- |s, p| s.print_pat(&p))?;
+ |s, p| s.print_pat(p))?;
if let Some(ref p) = *slice {
if !before.is_empty() { self.word_space(",")?; }
if p.node != PatKind::Wild {
- self.print_pat(&p)?;
+ self.print_pat(p)?;
}
word(&mut self.s, "..")?;
if !after.is_empty() { self.word_space(",")?; }
}
self.commasep(Inconsistent,
&after[..],
- |s, p| s.print_pat(&p))?;
+ |s, p| s.print_pat(p))?;
word(&mut self.s, "]")?;
}
PatKind::Mac(ref m) => self.print_mac(m, token::Paren)?,
space(&mut self.s)?;
self.word_space("|")?;
}
- self.print_pat(&p)?;
+ self.print_pat(p)?;
}
space(&mut self.s)?;
if let Some(ref e) = arm.guard {
self.word_space("if")?;
- self.print_expr(&e)?;
+ self.print_expr(e)?;
space(&mut self.s)?;
}
self.word_space("=>")?;
match arm.body.node {
ast::ExprKind::Block(ref blk) => {
// the block will close the pattern's ibox
- self.print_block_unclosed_indent(&blk, INDENT_UNIT)?;
+ self.print_block_unclosed_indent(blk, INDENT_UNIT)?;
// If it is a user-provided unsafe block, print a comma after it
if let BlockCheckMode::Unsafe(ast::UserProvided) = blk.rules {
self.print_mutability(m)?;
word(&mut self.s, "self")?;
self.word_space(":")?;
- self.print_type(&typ)
+ self.print_type(typ)
}
}
}
self.word_space("->")?;
match decl.output {
ast::FunctionRetTy::Ty(ref ty) => {
- self.print_type(&ty)?;
+ self.print_type(ty)?;
self.maybe_print_comment(ty.span.lo)
}
ast::FunctionRetTy::Default(..) => unreachable!(),
Some(ref default) => {
space(&mut self.s)?;
self.word_space("=")?;
- self.print_type(&default)
+ self.print_type(default)
}
_ => Ok(())
}
ref bounds,
..}) => {
self.print_formal_lifetime_list(bound_lifetimes)?;
- self.print_type(&bounded_ty)?;
+ self.print_type(bounded_ty)?;
self.print_bounds(":", bounds)?;
}
ast::WherePredicate::RegionPredicate(ast::WhereRegionPredicate{ref lifetime,
match decl.output {
ast::FunctionRetTy::Default(..) => unreachable!(),
ast::FunctionRetTy::Ty(ref ty) =>
- self.print_type(&ty)?
+ self.print_type(ty)?
}
self.end()?;
if self.next_comment().is_none() {
hardbreak(&mut self.s)?;
}
- loop {
- match self.next_comment() {
- Some(ref cmnt) => {
- self.print_comment(cmnt)?;
- self.cur_cmnt_and_lit.cur_cmnt += 1;
- }
- _ => break
- }
+ while let Some(ref cmnt) = self.next_comment() {
+ self.print_comment(cmnt)?;
+ self.cur_cmnt_and_lit.cur_cmnt += 1;
}
Ok(())
}
use tokenstream::TokenStream;
/// Craft a span that will be ignored by the stability lint's
-/// call to codemap's is_internal check.
+/// call to codemap's `is_internal` check.
/// The expanded code uses the unstable `#[prelude_import]` attribute.
fn ignored_span(sp: Span) -> Span {
let mark = Mark::fresh();
None => return krate,
};
- let crate_name = Symbol::intern(&alt_std_name.unwrap_or(name.to_string()));
+ let crate_name = Symbol::intern(&alt_std_name.unwrap_or_else(|| name.to_string()));
krate.module.items.insert(0, P(ast::Item {
attrs: vec![attr::mk_attr_outer(DUMMY_SP,
// Add a special __test module to the crate that will contain code
// generated for the test harness
let (mod_, reexport) = mk_test_module(&mut self.cx);
- match reexport {
- Some(re) => folded.module.items.push(re),
- None => {}
+ if let Some(re) = reexport {
+ folded.module.items.push(re)
}
folded.module.items.push(mod_);
folded
let parent = if parent == ast::DUMMY_NODE_ID { ast::CRATE_NODE_ID } else { parent };
cx.ext_cx.current_expansion.mark = cx.ext_cx.resolver.get_module_scope(parent);
let it = cx.ext_cx.monotonic_expander().fold_item(P(ast::Item {
- ident: sym.clone(),
+ ident: sym,
attrs: Vec::new(),
id: ast::DUMMY_NODE_ID,
node: ast::ItemKind::Mod(reexport_mod),
}
/// Craft a span that will be ignored by the stability lint's
-/// call to codemap's is_internal check.
+/// call to codemap's `is_internal` check.
/// The expanded code calls some unstable functions in the test crate.
fn ignored_span(cx: &TestCtxt, sp: Span) -> Span {
Span { ctxt: cx.ctxt, ..sp }
}
}
- return has_test_attr && has_test_signature(i) == Yes;
+ has_test_attr && has_test_signature(i) == Yes
}
fn is_bench_fn(cx: &TestCtxt, i: &ast::Item) -> bool {
`fn(&mut Bencher) -> ()`");
}
- return has_bench_attr && has_test_signature(i);
+ has_bench_attr && has_test_signature(i)
}
fn is_ignored(i: &ast::Item) -> bool {
ast::Unsafety::Normal,
dummy_spanned(ast::Constness::NotConst),
::abi::Abi::Rust, ast::Generics::default(), main_body);
- let main = P(ast::Item {
+ P(ast::Item {
ident: Ident::from_str("main"),
attrs: vec![main_attr],
id: ast::DUMMY_NODE_ID,
node: main,
vis: ast::Visibility::Public,
span: sp
- });
-
- return main;
+ })
}
fn mk_test_module(cx: &mut TestCtxt) -> (P<ast::Item>, Option<P<ast::Item>>) {
//! # Token Streams
//!
-//! TokenStreams represent syntactic objects before they are converted into ASTs.
+//! `TokenStream`s represent syntactic objects before they are converted into ASTs.
//! A `TokenStream` is, roughly speaking, a sequence (eg stream) of `TokenTree`s,
//! which are themselves a single `Token` or a `Delimited` subsequence of tokens.
//!
//! ## Ownership
-//! TokenStreams are persistent data structures constructed as ropes with reference
-//! counted-children. In general, this means that calling an operation on a TokenStream
-//! (such as `slice`) produces an entirely new TokenStream from the borrowed reference to
-//! the original. This essentially coerces TokenStreams into 'views' of their subparts,
-//! and a borrowed TokenStream is sufficient to build an owned TokenStream without taking
+//! `TokenStreams` are persistent data structures constructed as ropes with reference
+//! counted-children. In general, this means that calling an operation on a `TokenStream`
+//! (such as `slice`) produces an entirely new `TokenStream` from the borrowed reference to
+//! the original. This essentially coerces `TokenStream`s into 'views' of their subparts,
+//! and a borrowed `TokenStream` is sufficient to build an owned `TokenStream` without taking
//! ownership of the original.
use syntax_pos::{BytePos, Span, DUMMY_SP};
/// If the syntax extension is an MBE macro, it will attempt to match its
/// LHS token tree against the provided token tree, and if it finds a
/// match, will transcribe the RHS token tree, splicing in any captured
-/// macro_parser::matched_nonterminals into the `SubstNt`s it finds.
+/// `macro_parser::matched_nonterminals` into the `SubstNt`s it finds.
///
/// The RHS of an MBE macro is the only place `SubstNt`s are substituted.
/// Nothing special happens to misnamed or misplaced `SubstNt`s.
iter_names
.filter_map(|&name| {
let dist = lev_distance(lookup, &name.as_str());
- match dist <= max_dist { // filter the unwanted cases
- true => Some((name, dist)),
- false => None,
+ if dist <= max_dist { // filter the unwanted cases
+ Some((name, dist))
+ } else {
+ None
}
})
.min_by_key(|&(_, val)| val) // extract the tuple containing the minimum edit distance
// move the read_i'th item out of the vector and map it
// to an iterator
let e = ptr::read(self.get_unchecked(read_i));
- let mut iter = f(e).into_iter();
+ let iter = f(e).into_iter();
read_i += 1;
- while let Some(e) = iter.next() {
+ for e in iter {
if write_i < read_i {
ptr::write(self.get_unchecked_mut(write_i), e);
write_i += 1;
// move the read_i'th item out of the vector and map it
// to an iterator
let e = ptr::read(self.get_unchecked(read_i));
- let mut iter = f(e).into_iter();
+ let iter = f(e).into_iter();
read_i += 1;
- while let Some(e) = iter.next() {
+ for e in iter {
if write_i < read_i {
ptr::write(self.get_unchecked_mut(write_i), e);
write_i += 1;
use ast::*;
use syntax_pos::Span;
use codemap::Spanned;
+use tokenstream::ThinTokenStream;
#[derive(Copy, Clone, PartialEq, Eq)]
pub enum FnKind<'a> {
// definition in your trait impl:
// visit::walk_mac(self, _mac)
}
+ fn visit_mac_def(&mut self, _mac: &'ast ThinTokenStream, _id: NodeId) {
+ // Nothing to do
+ }
fn visit_path(&mut self, path: &'ast Path, _id: NodeId) {
walk_path(self, path)
}
walk_list!(visitor, visit_trait_item, methods);
}
ItemKind::Mac(ref mac) => visitor.visit_mac(mac),
- ItemKind::MacroDef(..) => {},
+ ItemKind::MacroDef(ref ts) => visitor.visit_mac_def(ts, item.id),
}
walk_list!(visitor, visit_attribute, &item.attrs);
}
visitor.visit_ty(ty);
visitor.visit_expr(expression)
}
- TyKind::TraitObject(ref bounds) => {
- walk_list!(visitor, visit_ty_param_bound, bounds);
- }
+ TyKind::TraitObject(ref bounds) |
TyKind::ImplTrait(ref bounds) => {
walk_list!(visitor, visit_ty_param_bound, bounds);
}
walk_fn_decl(visitor, declaration);
visitor.visit_block(body);
}
- FnKind::Method(_, ref sig, _, body) => {
+ FnKind::Method(_, sig, _, body) => {
visitor.visit_generics(&sig.generics);
walk_fn_decl(visitor, declaration);
visitor.visit_block(body);
}
ExprKind::InlineAsm(ref ia) => {
for &(_, ref input) in &ia.inputs {
- visitor.visit_expr(&input)
+ visitor.visit_expr(input)
}
for output in &ia.outputs {
visitor.visit_expr(&output.expr)
passed: usize,
failed: usize,
ignored: usize,
+ filtered_out: usize,
measured: usize,
metrics: MetricMap,
failures: Vec<(TestDesc, Vec<u8>)>,
passed: 0,
failed: 0,
ignored: 0,
+ filtered_out: 0,
measured: 0,
metrics: MetricMap::new(),
failures: Vec::new(),
} else {
self.write_pretty("FAILED", term::color::RED)?;
}
- let s = format!(". {} passed; {} failed; {} ignored; {} measured\n\n",
+ let s = format!(". {} passed; {} failed; {} ignored; {} measured; {} filtered out\n\n",
self.passed,
self.failed,
self.ignored,
- self.measured);
+ self.measured,
+ self.filtered_out);
self.write_plain(&s)?;
return Ok(success);
}
fn callback<T: Write>(event: &TestEvent, st: &mut ConsoleTestState<T>) -> io::Result<()> {
match (*event).clone() {
TeFiltered(ref filtered_tests) => st.write_run_start(filtered_tests.len()),
+ TeFilteredOut(filtered_out) => Ok(st.filtered_out = filtered_out),
TeWait(ref test, padding) => st.write_test_start(test, padding),
TeTimeout(ref test) => st.write_timeout(test),
TeResult(test, result, stdout) => {
passed: 0,
failed: 0,
ignored: 0,
+ filtered_out: 0,
measured: 0,
max_name_len: 10,
metrics: MetricMap::new(),
TeWait(TestDesc, NamePadding),
TeResult(TestDesc, TestResult, Vec<u8>),
TeTimeout(TestDesc),
+ TeFilteredOut(usize),
}
pub type MonitorMsg = (TestDesc, TestResult, Vec<u8>);
use std::collections::HashMap;
use std::sync::mpsc::RecvTimeoutError;
+ let tests_len = tests.len();
+
let mut filtered_tests = filter_tests(opts, tests);
if !opts.bench_benchmarks {
filtered_tests = convert_benchmarks_to_tests(filtered_tests);
}
+ let filtered_out = tests_len - filtered_tests.len();
+ callback(TeFilteredOut(filtered_out))?;
+
let filtered_descs = filtered_tests.iter()
.map(|t| t.desc.clone())
.collect();
+++ /dev/null
-Subproject commit 2e6417f6af5218a29a8ee72ed17af085560b9b9c
// aux-build:bang_proc_macro.rs
#![feature(proc_macro)]
+#![allow(unused_macros)]
#[macro_use]
extern crate derive_foo;
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Formerly this ICEd with the following message:
+// Tried to project an inherited associated type during coherence checking,
+// which is currently not supported.
+//
+// No we expect to run into a more user-friendly cycle error instead.
+
+#![feature(specialization)]
+
+trait Trait<T> { type Assoc; }
+//~^ unsupported cyclic reference between types/traits detected [E0391]
+
+impl<T> Trait<T> for Vec<T> {
+ type Assoc = ();
+}
+
+impl Trait<u8> for Vec<u8> {}
+
+impl<T> Trait<T> for String {
+ type Assoc = ();
+}
+
+impl Trait<<Vec<u8> as Trait<u8>>::Assoc> for String {}
+
+fn main() {}
// gate-test-allow_internal_unstable
+#![allow(unused_macros)]
+
macro_rules! bar {
() => {
// more layers don't help:
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+#![allow(unused_macros)]
+
#[allow_internal_unstable] //~ ERROR allow_internal_unstable side-steps
macro_rules! foo {
() => {}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+#![allow(unused_macros)]
+
macro_rules! invalid {
_ => (); //~ ERROR invalid macro matcher
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+#![allow(unused_macros)]
+
macro_rules! test { ($wrong:t_ty ..) => () }
//~^ ERROR: invalid fragment specifier `t_ty`
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+#![allow(unused_macros)]
+
macro_rules! assign {
(($($a:tt)*) = ($($b:tt))*) => { //~ ERROR expected `*` or `+`
$($a)* = $($b)*
// except according to those terms.
#![deny(missing_fragment_specifier)] //~ NOTE lint level defined here
+#![allow(unused_macros)]
macro_rules! m { ($i) => {} }
//~^ ERROR missing fragment specifier
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn main() {
+ include!(line!()); //~ ERROR argument must be a string literal
+}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+#![allow(unused_macros)]
+
macro_rules! foo {
( $()* ) => {};
//~^ ERROR repetition matches empty token tree
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+#![allow(unused_macros)]
+
mod macros_cant_escape_fns {
fn f() {
macro_rules! m { () => { 3 + 4 } }
//
// Check the macro follow sets (see corresponding rpass test).
+#![allow(unused_macros)]
+
// FOLLOW(pat) = {FatArrow, Comma, Eq, Or, Ident(if), Ident(in)}
macro_rules! follow_pat {
($p:pat ()) => {}; //~ERROR `$p:pat` is followed by `(`
// Regression test for issue #25436: check that things which can be
// followed by any token also permit X* to come afterwards.
+#![allow(unused_macros)]
+
macro_rules! foo {
( $a:expr $($b:tt)* ) => { }; //~ ERROR not allowed for `expr` fragments
( $a:ty $($b:tt)* ) => { }; //~ ERROR not allowed for `ty` fragments
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+#![allow(unused_macros)]
+
macro_rules! errors_everywhere {
($ty:ty <) => (); //~ ERROR `$ty:ty` is followed by `<`, which is not allowed for `ty`
($ty:ty < foo ,) => (); //~ ERROR `$ty:ty` is followed by `<`, which is not allowed for `ty`
// aux-build:two_macros.rs
+#![allow(unused_macros)]
+
macro_rules! foo { () => {} }
macro_rules! macro_one { () => {} }
#[macro_use(macro_two)] extern crate two_macros;
+++ /dev/null
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-struct Foo;
-
-fn main() {
- let a: Result<(), Foo> = Ok(());
- a.unwrap();
- //~^ ERROR no method named `unwrap` found for type `std::result::Result<(), Foo>`
- //~| NOTE the following trait bounds were not satisfied: `Foo : std::fmt::Debug`
-}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+#![allow(unused_macros)]
+
// Issue #21370
macro_rules! test {
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+#![allow(unused_macros)]
+
macro_rules! test {
($e:expr +) => () //~ ERROR not allowed for `expr` fragments
}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![deny(unused_macros)]
+
+// Most simple case
+macro_rules! unused { //~ ERROR: unused macro definition
+ () => {};
+}
+
+// Test macros created by macros
+macro_rules! create_macro {
+ () => {
+ macro_rules! m { //~ ERROR: unused macro definition
+ () => {};
+ }
+ };
+}
+create_macro!();
+
+#[allow(unused_macros)]
+mod bar {
+ // Test that putting the #[deny] close to the macro's definition
+ // works.
+
+ #[deny(unused_macros)]
+ macro_rules! unused { //~ ERROR: unused macro definition
+ () => {};
+ }
+}
+
+fn main() {}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+#![allow(unused_macros)]
+
macro_rules! macro_rules { () => {} } //~ ERROR user-defined macros may not be named `macro_rules`
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: -Z incremental-cc
-
pub struct Point {
pub x: f32,
pub y: f32,
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: -Z incremental-cc
-
#![crate_type="rlib"]
#[cfg(rpass1)]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: -Z incremental-cc
-
pub struct Point {
pub x: f32,
pub y: f32,
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: -Z incremental-cc
-
pub struct Point {
pub x: f32,
pub y: f32,
// ignore-tidy-linelength
-// aux-build:extern_crate.rs
//[rpass1] compile-flags: -g
//[rpass2] compile-flags: -g
//[rpass3] compile-flags: -g -Zremap-path-prefix-from={{src-base}} -Zremap-path-prefix-to=/the/src
// except according to those terms.
// revisions:rpass1 rpass2 rpass3
-// compile-flags: -Z query-dep-graph -g
+// compile-flags: -Z query-dep-graph -g -Zincremental-cc
// aux-build:extern_crate.rs
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: -Z incremental-cc
-
#![allow(warnings)]
#![crate_name = "a"]
#![crate_type = "rlib"]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: -Z incremental-cc
// no-prefer-dynamic
#![crate_type="rlib"]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: -Z incremental-cc
-
#![crate_type="rlib"]
#[cfg(rpass1)]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: -Z incremental-cc
-
#![crate_type="rlib"]
#[cfg(rpass1)]
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Regression test for #41697. Using dump-mir was triggering
+// artificial cycles: during type-checking, we had to get the MIR for
+// the constant expressions in `[u8; 2]`, which in turn would trigger
+// an attempt to get the item-path, which in turn would request the
+// types of the impl, which would trigger a cycle. We supressed this
+// cycle now by forcing mir-dump to avoid asking for types of an impl.
+
+#![feature(rustc_attrs)]
+
+use std::sync::Arc;
+
+trait Foo {
+ fn get(&self) -> [u8; 2];
+}
+
+impl Foo for [u8; 2] {
+ fn get(&self) -> [u8; 2] {
+ *self
+ }
+}
+
+struct Bar<T: ?Sized>(T);
+
+fn unsize_fat_ptr<'a>(x: &'a Bar<Foo + Send + 'a>) -> &'a Bar<Foo + 'a> {
+ x
+}
+
+fn unsize_nested_fat_ptr(x: Arc<Foo + Send>) -> Arc<Foo> {
+ x
+}
+
+fn main() {
+ let x: Box<Bar<Foo + Send>> = Box::new(Bar([1,2]));
+ assert_eq!(unsize_fat_ptr(&*x).0.get(), [1, 2]);
+
+ let x: Arc<Foo + Send> = Arc::new([3, 4]);
+ assert_eq!(unsize_nested_fat_ptr(x).get(), [3, 4]);
+}
extern crate rustc_lint;
extern crate rustc_metadata;
extern crate rustc_errors;
+extern crate rustc_trans;
extern crate syntax;
use rustc::dep_graph::DepGraph;
let descriptions = Registry::new(&rustc::DIAGNOSTICS);
let dep_graph = DepGraph::new(opts.build_dep_graph());
- let cstore = Rc::new(CStore::new(&dep_graph));
+ let cstore = Rc::new(CStore::new(&dep_graph, Box::new(rustc_trans::LlvmMetadataLoader)));
let sess = build_session(opts, &dep_graph, None, descriptions, cstore.clone());
+ rustc_trans::init(&sess);
rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess));
(sess, cstore)
}
extern crate rustc;
extern crate rustc_plugin;
+extern crate rustc_trans;
#[link(name = "llvm-function-pass", kind = "static")]
#[link(name = "llvm-module-pass", kind = "static")]
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// ignore-stage1
+// ignore-cross-compile
+#![feature(quote, rustc_private)]
+
+extern crate syntax;
+
+use syntax::ext::base::{ExtCtxt, DummyResolver};
+use syntax::ext::expand::ExpansionConfig;
+use syntax::parse::ParseSess;
+use syntax::codemap::{FilePathMapping, dummy_spanned};
+use syntax::print::pprust::expr_to_string;
+use syntax::ast::{Expr, ExprKind, LitKind, StrStyle, RangeLimits};
+use syntax::symbol::Symbol;
+use syntax::ptr::P;
+
+use std::rc::Rc;
+
+fn main() {
+ let parse_sess = ParseSess::new(FilePathMapping::empty());
+ let exp_cfg = ExpansionConfig::default("issue_35829".to_owned());
+ let mut resolver = DummyResolver;
+ let cx = ExtCtxt::new(&parse_sess, exp_cfg, &mut resolver);
+
+ // check byte string
+ let byte_string = quote_expr!(&cx, b"one");
+ let byte_string_lit_kind = LitKind::ByteStr(Rc::new(b"one".to_vec()));
+ assert_eq!(byte_string.node, ExprKind::Lit(P(dummy_spanned(byte_string_lit_kind))));
+
+ // check raw byte string
+ let raw_byte_string = quote_expr!(&cx, br###"#"two"#"###);
+ let raw_byte_string_lit_kind = LitKind::ByteStr(Rc::new(b"#\"two\"#".to_vec()));
+ assert_eq!(raw_byte_string.node, ExprKind::Lit(P(dummy_spanned(raw_byte_string_lit_kind))));
+
+ // check dotdotdot
+ let closed_range = quote_expr!(&cx, 0 ... 1);
+ assert_eq!(closed_range.node, ExprKind::Range(
+ Some(quote_expr!(&cx, 0)),
+ Some(quote_expr!(&cx, 1)),
+ RangeLimits::Closed
+ ));
+
+ // test case from 35829
+ let expr_35829 = quote_expr!(&cx, std::io::stdout().write(b"one"));
+ assert_eq!(expr_to_string(&expr_35829), r#"std::io::stdout().write(b"one")"#);
+}
extern crate syntax;
extern crate syntax_pos;
-use syntax::ast::Ident;
-use syntax::parse::token;
+use syntax::ast::{Ident, Name};
+use syntax::parse::token::{self, Token, Lit};
use syntax::tokenstream::TokenTree;
fn main() {
let true_tok = token::Ident(Ident::from_str("true"));
assert!(quote!(true).eq_unspanned(&true_tok.into()));
+
+ // issue #35829, extended check to proc_macro.
+ let triple_dot_tok = Token::DotDotDot;
+ assert!(quote!(...).eq_unspanned(&triple_dot_tok.into()));
+
+ let byte_str_tok = Token::Literal(Lit::ByteStr(Name::intern("one")), None);
+ assert!(quote!(b"one").eq_unspanned(&byte_str_tok.into()));
+
+ let byte_str_raw_tok = Token::Literal(Lit::ByteStrRaw(Name::intern("#\"two\"#"), 3), None);
+ assert!(quote!(br###"#"two"#"###).eq_unspanned(&byte_str_raw_tok.into()));
+
+ let str_raw_tok = Token::Literal(Lit::StrRaw(Name::intern("#\"three\"#"), 2), None);
+ assert!(quote!(r##"#"three"#"##).eq_unspanned(&str_raw_tok.into()));
}
+++ /dev/null
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// compile-flags:-Zdump-mir=NEVER_MATCHED
-
-// Regression test for #41697. Using dump-mir was triggering
-// artificial cycles: during type-checking, we had to get the MIR for
-// the constant expressions in `[u8; 2]`, which in turn would trigger
-// an attempt to get the item-path, which in turn would request the
-// types of the impl, which would trigger a cycle. We supressed this
-// cycle now by forcing mir-dump to avoid asking for types of an impl.
-
-#![feature(rustc_attrs)]
-
-use std::sync::Arc;
-
-trait Foo {
- fn get(&self) -> [u8; 2];
-}
-
-impl Foo for [u8; 2] {
- fn get(&self) -> [u8; 2] {
- *self
- }
-}
-
-struct Bar<T: ?Sized>(T);
-
-fn unsize_fat_ptr<'a>(x: &'a Bar<Foo + Send + 'a>) -> &'a Bar<Foo + 'a> {
- x
-}
-
-fn unsize_nested_fat_ptr(x: Arc<Foo + Send>) -> Arc<Foo> {
- x
-}
-
-fn main() {
- let x: Box<Bar<Foo + Send>> = Box::new(Bar([1,2]));
- assert_eq!(unsize_fat_ptr(&*x).0.get(), [1, 2]);
-
- let x: Arc<Foo + Send> = Arc::new([3, 4]);
- assert_eq!(unsize_nested_fat_ptr(x).get(), [3, 4]);
-}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+/// A compile-time map from identifiers to arbitrary (heterogeneous) expressions
+macro_rules! ident_map {
+ ( $name:ident = { $($key:ident => $e:expr,)* } ) => {
+ macro_rules! $name {
+ $(
+ ( $key ) => { $e };
+ )*
+ // Empty invocation expands to nothing. Needed when the map is empty.
+ () => {};
+ }
+ };
+}
+
+ident_map!(my_map = {
+ main => 0,
+});
+
+fn main() {
+ my_map!(main);
+}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Regression test for #41936. The coerce-unsized trait check in
+// coherence was using subtyping, which triggered variance
+// computation, which failed because it required type info for fields
+// that had not (yet) been computed.
+
+#![feature(unsize)]
+#![feature(coerce_unsized)]
+
+use std::{marker,ops};
+
+// Change the array to a non-array, and error disappears
+// Adding a new field to the end keeps the error
+struct LogDataBuf([u8;8]);
+
+struct Aref<T: ?Sized>
+{
+ // Inner structure triggers the error, removing the inner removes the message.
+ ptr: Box<ArefInner<T>>,
+}
+impl<T: ?Sized + marker::Unsize<U>, U: ?Sized> ops::CoerceUnsized<Aref<U>> for Aref<T> {}
+
+struct ArefInner<T: ?Sized>
+{
+ // Even with this field commented out, the error is raised.
+ data: T,
+}
+
+fn main(){}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Make sure we don't crash with a cycle error during coherence.
+
+#![feature(specialization)]
+
+trait Trait<T> {
+ type Assoc;
+}
+
+impl<T> Trait<T> for Vec<T> {
+ default type Assoc = ();
+}
+
+impl Trait<u8> for Vec<u8> {
+ type Assoc = u8;
+}
+
+impl<T> Trait<T> for String {
+ type Assoc = ();
+}
+
+impl Trait<<Vec<u8> as Trait<u8>>::Assoc> for String {}
+
+fn main() {}
pub fn rust0() {}
// @has - '//code' 'fn rust1()'
pub extern "Rust" fn rust1() {}
- // @has - '//code' 'extern fn c0()'
+ // @has - '//code' 'extern "C" fn c0()'
pub extern fn c0() {}
- // @has - '//code' 'extern fn c1()'
+ // @has - '//code' 'extern "C" fn c1()'
pub extern "C" fn c1() {}
// @has - '//code' 'extern "system" fn system0()'
pub extern "system" fn system0() {}
// @has - '//code' 'impl Bar for fn()'
impl Bar for fn() {}
-// @has - '//code' 'impl Bar for extern fn()'
+// @has - '//code' 'impl Bar for extern "C" fn()'
impl Bar for extern fn() {}
// @has - '//code' 'impl Bar for extern "system" fn()'
impl Bar for extern "system" fn() {}
extern crate rustdoc_ffi as lib;
-// @has ffi/fn.foreigner.html //pre 'pub unsafe extern fn foreigner(cold_as_ice: u32)'
+// @has ffi/fn.foreigner.html //pre 'pub unsafe extern "C" fn foreigner(cold_as_ice: u32)'
pub use lib::foreigner;
extern "C" {
- // @has ffi/fn.another.html //pre 'pub unsafe extern fn another(cold_as_ice: u32)'
+ // @has ffi/fn.another.html //pre 'pub unsafe extern "C" fn another(cold_as_ice: u32)'
pub fn another(cold_as_ice: u32);
}
extern {
// @has issue_22038/fn.foo1.html \
- // '//*[@class="rust fn"]' 'pub unsafe extern fn foo1()'
+ // '//*[@class="rust fn"]' 'pub unsafe extern "C" fn foo1()'
pub fn foo1();
}
}
// @has issue_22038/fn.bar.html \
-// '//*[@class="rust fn"]' 'pub extern fn bar()'
+// '//*[@class="rust fn"]' 'pub extern "C" fn bar()'
pub extern fn bar() {}
// @has issue_22038/fn.baz.html \
// except according to those terms.
extern "C" {
- // @has variadic/fn.foo.html //pre 'pub unsafe extern fn foo(x: i32, ...)'
+ // @has variadic/fn.foo.html //pre 'pub unsafe extern "C" fn foo(x: i32, ...)'
pub fn foo(x: i32, ...);
}
17 | once::<&str>("str").fuse().filter(|a: &str| true).count();
| ^^^^^
|
- = note: the method `count` exists but the following trait bounds were not satisfied: `[closure@$DIR/issue-36053-2.rs:17:39: 17:53] : std::ops::FnMut<(&_,)>`, `std::iter::Filter<std::iter::Fuse<std::iter::Once<&str>>, [closure@$DIR/issue-36053-2.rs:17:39: 17:53]> : std::iter::Iterator`
+ = note: the method `count` exists but the following trait bounds were not satisfied:
+ `[closure@$DIR/issue-36053-2.rs:17:39: 17:53] : std::ops::FnMut<(&_,)>`
+ `std::iter::Filter<std::iter::Fuse<std::iter::Once<&str>>, [closure@$DIR/issue-36053-2.rs:17:39: 17:53]> : std::iter::Iterator`
error[E0281]: type mismatch: `[closure@$DIR/issue-36053-2.rs:17:39: 17:53]` implements the trait `for<'r> std::ops::FnMut<(&'r str,)>`, but the trait `for<'r> std::ops::FnMut<(&'r &str,)>` is required
--> $DIR/issue-36053-2.rs:17:32
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+struct Foo;
+
+fn main() {
+ let a: Result<(), Foo> = Ok(());
+ a.unwrap();
+ //~^ ERROR no method named `unwrap` found for type `std::result::Result<(), Foo>`
+ //~| NOTE the method `unwrap` exists but the following trait bounds were not satisfied
+}
--- /dev/null
+error: no method named `unwrap` found for type `std::result::Result<(), Foo>` in the current scope
+ --> $DIR/method-help-unsatisfied-bound.rs:15:7
+ |
+15 | a.unwrap();
+ | ^^^^^^
+ |
+ = note: the method `unwrap` exists but the following trait bounds were not satisfied:
+ `Foo : std::fmt::Debug`
+
+error: aborting due to previous error
+
-Subproject commit 13d92c64d0153d95dbabeb49b828bbbef4b1bb34
+Subproject commit 397359840ecad02d5fe69b2a0cf328e98235ffea
}
fn main() {
- let docs = env::args().nth(1).unwrap();
+ let docs = env::args_os().nth(1).unwrap();
let docs = env::current_dir().unwrap().join(docs);
let mut errors = false;
walk(&mut HashMap::new(), &docs, &docs, &mut errors);
struct FileEntry {
source: String,
ids: HashSet<String>,
- names: HashSet<String>,
}
type Cache = HashMap<PathBuf, FileEntry>;
impl FileEntry {
fn parse_ids(&mut self, file: &Path, contents: &str, errors: &mut bool) {
if self.ids.is_empty() {
- with_attrs_in_source(contents, " id", |fragment, i| {
+ with_attrs_in_source(contents, " id", |fragment, i, _| {
let frag = fragment.trim_left_matches("#").to_owned();
if !self.ids.insert(frag) {
*errors = true;
});
}
}
-
- fn parse_names(&mut self, contents: &str) {
- if self.names.is_empty() {
- with_attrs_in_source(contents, " name", |fragment, _| {
- let frag = fragment.trim_left_matches("#").to_owned();
- self.names.insert(frag);
- });
- }
- }
}
fn walk(cache: &mut Cache, root: &Path, dir: &Path, errors: &mut bool) {
file: &Path,
errors: &mut bool)
-> Option<PathBuf> {
- // ignore js files as they are not prone to errors as the rest of the
- // documentation is and they otherwise bring up false positives.
- if file.extension().and_then(|s| s.to_str()) == Some("js") {
- return None;
- }
-
- // ignore handlebars files as they use {{}} to build links, we only
- // want to test the generated files
- if file.extension().and_then(|s| s.to_str()) == Some("hbs") {
+ // Ignore none HTML files.
+ if file.extension().and_then(|s| s.to_str()) != Some("html") {
return None;
}
return None;
}
- // mdbook uses the HTML <base> tag to handle links for subdirectories, which
- // linkchecker doesn't support
- if file.to_str().unwrap().contains("unstable-book") {
- return None;
- }
-
- let res = load_file(cache, root, PathBuf::from(file), SkipRedirect);
+ let res = load_file(cache, root, file, SkipRedirect);
let (pretty_file, contents) = match res {
Ok(res) => res,
Err(_) => return None,
cache.get_mut(&pretty_file)
.unwrap()
.parse_ids(&pretty_file, &contents, errors);
- cache.get_mut(&pretty_file)
- .unwrap()
- .parse_names(&contents);
}
// Search for anything that's the regex 'href[ ]*=[ ]*".*?"'
- with_attrs_in_source(&contents, " href", |url, i| {
+ with_attrs_in_source(&contents, " href", |url, i, base| {
// Ignore external URLs
if url.starts_with("http:") || url.starts_with("https:") ||
url.starts_with("javascript:") || url.starts_with("ftp:") ||
// Once we've plucked out the URL, parse it using our base url and
// then try to extract a file path.
let mut path = file.to_path_buf();
- if !url.is_empty() {
+ if !base.is_empty() || !url.is_empty() {
path.pop();
- for part in Path::new(url).components() {
+ for part in Path::new(base).join(url).components() {
match part {
Component::Prefix(_) |
Component::RootDir => panic!(),
}
}
- if let Some(extension) = path.extension() {
- // don't check these files
- if extension == "png" {
- return;
- }
- }
-
// Alright, if we've found a file name then this file had better
// exist! If it doesn't then we register and print an error.
if path.exists() {
pretty_path.display());
return;
}
- let res = load_file(cache, root, path.clone(), FromRedirect(false));
+ if let Some(extension) = path.extension() {
+ // Ignore none HTML files.
+ if extension != "html" {
+ return;
+ }
+ }
+ let res = load_file(cache, root, &path, FromRedirect(false));
let (pretty_path, contents) = match res {
Ok(res) => res,
Err(LoadError::IOError(err)) => {
- panic!(format!("error loading {}: {}", path.display(), err));
+ panic!("error loading {}: {}", path.display(), err);
}
Err(LoadError::BrokenRedirect(target, _)) => {
*errors = true;
let entry = &mut cache.get_mut(&pretty_path).unwrap();
entry.parse_ids(&pretty_path, &contents, errors);
- entry.parse_names(&contents);
- if !(entry.ids.contains(*fragment) || entry.names.contains(*fragment)) {
+ if !entry.ids.contains(*fragment) {
*errors = true;
- print!("{}:{}: broken link fragment ",
+ print!("{}:{}: broken link fragment ",
pretty_file.display(),
i + 1);
println!("`#{}` pointing to `{}`", fragment, pretty_path.display());
fn load_file(cache: &mut Cache,
root: &Path,
- mut file: PathBuf,
+ file: &Path,
redirect: Redirect)
-> Result<(PathBuf, String), LoadError> {
let mut contents = String::new();
None
}
Entry::Vacant(entry) => {
- let mut fp = File::open(file.clone()).map_err(|err| {
+ let mut fp = File::open(file).map_err(|err| {
if let FromRedirect(true) = redirect {
- LoadError::BrokenRedirect(file.clone(), err)
+ LoadError::BrokenRedirect(file.to_path_buf(), err)
} else {
LoadError::IOError(err)
}
entry.insert(FileEntry {
source: contents.clone(),
ids: HashSet::new(),
- names: HashSet::new(),
});
}
maybe
}
};
- file.pop();
- match maybe_redirect.map(|url| file.join(url)) {
+ match maybe_redirect.map(|url| file.parent().unwrap().join(url)) {
Some(redirect_file) => {
- let path = PathBuf::from(redirect_file);
- load_file(cache, root, path, FromRedirect(true))
+ load_file(cache, root, &redirect_file, FromRedirect(true))
}
None => Ok((pretty_file, contents)),
}
})
}
-fn with_attrs_in_source<F: FnMut(&str, usize)>(contents: &str, attr: &str, mut f: F) {
+fn with_attrs_in_source<F: FnMut(&str, usize, &str)>(contents: &str, attr: &str, mut f: F) {
+ let mut base = "";
for (i, mut line) in contents.lines().enumerate() {
while let Some(j) = line.find(attr) {
let rest = &line[j + attr.len()..];
+ // The base tag should always be the first link in the document so
+ // we can get away with using one pass.
+ let is_base = line[..j].ends_with("<base");
line = rest;
let pos_equals = match rest.find("=") {
Some(i) => i,
Some(i) => &rest[..i],
None => continue,
};
- f(url, i)
+ if is_base {
+ base = url;
+ continue;
+ }
+ f(url, i, base)
}
}
}
--- /dev/null
+Subproject commit daa2a05ebe7b8d07a309e8891ebc548652362954
"openssl", // BSD+advertising clause, cargo, mdbook
"pest", // MPL2, mdbook via handlebars
"thread-id", // Apache-2.0, mdbook
+ "strings", // this is actually MIT/Apache-2.0 but it's not in the manifest yet
];
pub fn check(path: &Path, bad: &mut bool) {
"src/libbacktrace",
"src/compiler-rt",
"src/rustllvm",
- "src/rust-installer",
"src/liblibc",
"src/vendor",
"src/rt/hoedown",
"src/tools/cargo",
"src/tools/rls",
+ "src/tools/rust-installer",
];
skip.iter().any(|p| path.ends_with(p))
}