[submodule "src/dlmalloc"]
path = src/dlmalloc
url = https://github.com/alexcrichton/dlmalloc-rs.git
-[submodule "src/binaryen"]
- path = src/binaryen
- url = https://github.com/alexcrichton/binaryen.git
[submodule "src/doc/rust-by-example"]
path = src/doc/rust-by-example
url = https://github.com/rust-lang/rust-by-example
[submodule "src/llvm-emscripten"]
path = src/llvm-emscripten
url = https://github.com/rust-lang/llvm
+[submodule "src/stdsimd"]
+ path = src/stdsimd
+ url = https://github.com/rust-lang-nursery/stdsimd
+[submodule "src/tools/lld"]
+ path = src/tools/lld
+ url = https://github.com/rust-lang/lld.git
# OSX 10.7 and `xcode7` is the latest Xcode able to compile LLVM for 10.7.
- env: >
RUST_CHECK_TARGET=dist
- RUST_CONFIGURE_ARGS="--build=i686-apple-darwin --enable-extended --enable-profiler --enable-emscripten"
+ RUST_CONFIGURE_ARGS="--build=i686-apple-darwin --enable-full-tools --enable-profiler"
SRC=.
DEPLOY=1
RUSTC_RETRY_LINKER_ON_SEGFAULT=1
- env: >
RUST_CHECK_TARGET=dist
- RUST_CONFIGURE_ARGS="--target=aarch64-apple-ios,armv7-apple-ios,armv7s-apple-ios,i386-apple-ios,x86_64-apple-ios --enable-extended --enable-sanitizers --enable-profiler --enable-emscripten"
+ RUST_CONFIGURE_ARGS="--target=aarch64-apple-ios,armv7-apple-ios,armv7s-apple-ios,i386-apple-ios,x86_64-apple-ios --enable-full-tools --enable-sanitizers --enable-profiler"
SRC=.
DEPLOY=1
RUSTC_RETRY_LINKER_ON_SEGFAULT=1
python x.py build
```
-If you are seeing build failure when compiling `rustc_binaryen`, make sure the path
-length of the rust folder is not longer than 22 characters.
-
#### Specifying an ABI
[specifying-an-abi]: #specifying-an-abi
# 32/64 bit MSVC and GNU deployment
- RUST_CONFIGURE_ARGS: >
--build=x86_64-pc-windows-msvc
- --enable-extended
+ --enable-full-tools
--enable-profiler
- --enable-emscripten
SCRIPT: python x.py dist
DEPLOY: 1
- RUST_CONFIGURE_ARGS: >
--build=i686-pc-windows-msvc
--target=i586-pc-windows-msvc
- --enable-extended
+ --enable-full-tools
--enable-profiler
- --enable-emscripten
SCRIPT: python x.py dist
DEPLOY: 1
- MSYS_BITS: 32
- RUST_CONFIGURE_ARGS: --build=i686-pc-windows-gnu --enable-extended --enable-emscripten
+ RUST_CONFIGURE_ARGS: --build=i686-pc-windows-gnu --enable-full-tools
SCRIPT: python x.py dist
MINGW_URL: https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror
MINGW_ARCHIVE: i686-6.3.0-release-posix-dwarf-rt_v5-rev2.7z
DEPLOY: 1
- MSYS_BITS: 64
SCRIPT: python x.py dist
- RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-gnu --enable-extended --enable-emscripten
+ RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-gnu --enable-full-tools
MINGW_URL: https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror
MINGW_ARCHIVE: x86_64-6.3.0-release-posix-seh-rt_v5-rev2.7z
MINGW_DIR: mingw64
# bootstrap)
#codegen-backends = ["llvm"]
+# This is the name of the directory in which codegen backends will get installed
+#codegen-backends-dir = "codegen-backends"
+
# Flag indicating whether `libstd` calls an imported function to handle basic IO
# when targeting WebAssembly. Enable this to debug tests for the `wasm32-unknown-unknown`
# target, as without this option the test output will not be captured.
#wasm-syscall = false
+# Indicates whether LLD will be compiled and made available in the sysroot for
+# rustc to execute.
+#lld = false
+
# =============================================================================
# Options for specific targets
#
"crypto-hash 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"curl 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)",
"docopt 0.8.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "env_logger 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "env_logger 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)",
"failure 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"filetime 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
"flate2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
version = "0.0.0"
dependencies = [
"diff 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
- "env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "env_logger 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)",
"filetime 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
"getopts 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)",
[[package]]
name = "derive-new"
-version = "0.5.0"
+version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)",
- "syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 0.12.13 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
[[package]]
name = "ena"
-version = "0.9.1"
+version = "0.9.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
+ "log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
[[package]]
name = "env_logger"
-version = "0.5.3"
+version = "0.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"atty 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
- "chrono 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "humantime 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
"termcolor 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
]
+[[package]]
+name = "humantime"
+version = "1.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "quick-error 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "idna"
version = "0.1.4"
dependencies = [
"chrono 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"clap 2.29.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "env_logger 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "env_logger 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)",
"error-chain 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
"handlebars 0.29.1 (registry+https://github.com/rust-lang/crates.io-index)",
"itertools 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)",
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
+[[package]]
+name = "proc-macro2"
+version = "0.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "proc_macro"
version = "0.0.0"
version = "0.3.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
+[[package]]
+name = "quote"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "proc-macro2 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "racer"
version = "2.0.12"
"cargo 0.26.0",
"cargo_metadata 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy_lints 0.0.186 (registry+https://github.com/rust-lang/crates.io-index)",
- "env_logger 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "env_logger 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)",
"failure 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"json 0.11.12 (registry+https://github.com/rust-lang/crates.io-index)",
"jsonrpc-core 8.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "derive-new 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "derive-new 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
"radix_trie 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"rls-data 0.15.0 (registry+https://github.com/rust-lang/crates.io-index)",
"syntax 0.0.0",
]
-[[package]]
-name = "rustc_binaryen"
-version = "0.0.0"
-dependencies = [
- "cc 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
- "cmake 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)",
- "libc 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
[[package]]
name = "rustc_borrowck"
version = "0.0.0"
version = "0.0.0"
dependencies = [
"cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "ena 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "ena 0.9.2 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot_core 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)",
dependencies = [
"ar 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"arena 0.0.0",
- "env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "env_logger 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)",
"graphviz 0.0.0",
"log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc 0.0.0",
dependencies = [
"bitflags 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"cc 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
+ "env_logger 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)",
"flate2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"jobserver 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc_allocator 0.0.0",
"rustc_apfloat 0.0.0",
"rustc_back 0.0.0",
- "rustc_binaryen 0.0.0",
"rustc_const_math 0.0.0",
"rustc_data_structures 0.0.0",
"rustc_errors 0.0.0",
version = "0.3.8"
dependencies = [
"cargo_metadata 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "derive-new 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "derive-new 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
"diff 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
"getopts 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
]
+[[package]]
+name = "syn"
+version = "0.12.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "proc-macro2 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "synom"
version = "0.11.3"
[[package]]
name = "tidy"
version = "0.1.0"
+dependencies = [
+ "serde 1.0.27 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_derive 1.0.27 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_json 1.0.9 (registry+https://github.com/rust-lang/crates.io-index)",
+]
[[package]]
name = "time"
version = "0.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
+[[package]]
+name = "unicode-xid"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
[[package]]
name = "unreachable"
version = "1.0.0"
"checksum crypto-hash 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "34903878eec1694faf53cae8473a088df333181de421d4d3d48061d6559fe602"
"checksum curl 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)" = "b70fd6394677d3c0e239ff4be6f2b3176e171ffd1c23ffdc541e78dea2b8bb5e"
"checksum curl-sys 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "f46e49c7125131f5afaded06944d6888b55cbdf8eba05dae73c954019b907961"
-"checksum derive-new 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "415f627ab054041c3eb748c2e1da0ef751989f5f0c386b63a098e545854a98ba"
+"checksum derive-new 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "92f8b8e1d6c8a5f5ea0849a0e4c55941576115c62d3fc425e96918bbbeb3d3c2"
"checksum diff 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "3c2b69f912779fbb121ceb775d74d51e915af17aaebc38d28a592843a2dd0a3a"
"checksum docopt 0.8.3 (registry+https://github.com/rust-lang/crates.io-index)" = "d8acd393692c503b168471874953a2531df0e9ab77d0b6bbc582395743300a4a"
"checksum dtoa 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "09c3753c3db574d215cba4ea76018483895d7bff25a31b49ba45db21c48e50ab"
"checksum duct 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e45aa15fe0a8a8f511e6d834626afd55e49b62e5c8802e18328a87e8a8f6065c"
"checksum either 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "740178ddf48b1a9e878e6d6509a1442a2d42fd2928aae8e7a6f8a36fb01981b3"
-"checksum ena 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1733e41a3c37b0893685933d09dcb0c30269aa5d14dc5cafebf4bcded1e58225"
+"checksum ena 0.9.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f8b449f3b18c89d2dbe40548d2ee4fa58ea0a08b761992da6ecb9788e4688834"
"checksum endian-type 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c34f04666d835ff5d62e058c3995147c06f42fe86ff053337632bca83e42702d"
"checksum enum_primitive 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "be4551092f4d519593039259a9ed8daedf0da12e5109c5280338073eaeb81180"
"checksum env_logger 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "15abd780e45b3ea4f76b4e9a26ff4843258dd8a3eed2775a0e7368c2e7936c2f"
"checksum env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3ddf21e73e016298f5cb37d6ef8e8da8e39f91f9ec8b0df44b7deb16a9f8cd5b"
-"checksum env_logger 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "f15f0b172cb4f52ed5dbf47f774a387cd2315d1bf7894ab5af9b083ae27efa5a"
+"checksum env_logger 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)" = "f3cc21490995c841d68e00276eba02071ebb269ec24011d5728bd00eabd39e31"
"checksum error-chain 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ff511d5dc435d703f4971bc399647c9bc38e20cb41452e3b9feb4765419ed3f3"
"checksum error-chain 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "6930e04918388a9a2e41d518c25cf679ccafe26733fb4127dbf21993f2575d46"
"checksum failure 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "934799b6c1de475a012a02dab0ace1ace43789ee4b99bcfbf1a2e3e8ced5de82"
"checksum hex 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d6a22814455d41612f41161581c2883c0c6a1c41852729b17d5ed88f01e153aa"
"checksum hex 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "459d3cf58137bb02ad4adeef5036377ff59f066dbb82517b7192e3a5462a2abc"
"checksum home 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9f25ae61099d8f3fee8b483df0bd4ecccf4b2731897aad40d50eca1b641fe6db"
+"checksum humantime 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0484fda3e7007f2a4a0d9c3a703ca38c71c54c55602ce4660c419fd32e188c9e"
"checksum idna 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "014b298351066f1512874135335d62a789ffe78a9974f94b43ed5621951eaf7d"
"checksum if_chain 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "61bb90bdd39e3af69b0172dfc6130f6cd6332bf040fbb9bdd4401d37adbd48b8"
"checksum ignore 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "bb2f0238094bd1b41800fb6eb9b16fdd5e9832ed6053ed91409f0cd5bf28dcfd"
"checksum percent-encoding 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "31010dd2e1ac33d5b46a5b413495239882813e0369f8ed8a5e266f173602f831"
"checksum pest 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "0a6dda33d67c26f0aac90d324ab2eb7239c819fc7b2552fe9faa4fe88441edc8"
"checksum pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "3a8b4c6b8165cd1a1cd4b9b120978131389f64bdaf456435caa41e630edba903"
+"checksum proc-macro2 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "cd07deb3c6d1d9ff827999c7f9b04cdfd66b1b17ae508e14fe47b620f2282ae0"
"checksum pulldown-cmark 0.0.15 (registry+https://github.com/rust-lang/crates.io-index)" = "378e941dbd392c101f2cb88097fa4d7167bc421d4b88de3ff7dbee503bc3233b"
"checksum pulldown-cmark 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "d6fdf85cda6cadfae5428a54661d431330b312bc767ddbc57adbedc24da66e32"
"checksum quick-error 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "eda5fe9b71976e62bc81b781206aaa076401769b2143379d3eb2118388babac4"
"checksum quine-mc_cluskey 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)" = "07589615d719a60c8dd8a4622e7946465dfef20d1a428f969e3443e7386d5f45"
"checksum quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e920b65c65f10b2ae65c831a81a073a89edd28c7cce89475bff467ab4167a"
+"checksum quote 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1eca14c727ad12702eb4b6bfb5a232287dcf8385cb8ca83a3eeaf6519c44c408"
"checksum racer 2.0.12 (registry+https://github.com/rust-lang/crates.io-index)" = "034f1c4528581c40a60e96875467c03315868084e08ff4ceb46a00f7be3b16b4"
"checksum radix_trie 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "211c49b6a9995cac0fd1dd9ca60b42cf3a51e151a12eb954b3a9e75513426ee8"
"checksum rand 0.3.20 (registry+https://github.com/rust-lang/crates.io-index)" = "512870020642bb8c221bf68baa1b2573da814f6ccfe5c9699b1c303047abe9b1"
"checksum stable_deref_trait 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "15132e0e364248108c5e2c02e3ab539be8d6f5d52a01ca9bbf27ed657316f02b"
"checksum strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b4d15c810519a91cf877e7e36e63fe068815c678181439f2f29e2562147c3694"
"checksum syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)" = "d3b891b9015c88c576343b9b3e41c2c11a51c219ef067b264bd9c8aa9b441dad"
+"checksum syn 0.12.13 (registry+https://github.com/rust-lang/crates.io-index)" = "517f6da31bc53bf080b9a77b29fbd0ff8da2f5a2ebd24c73c2238274a94ac7cb"
"checksum synom 0.11.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a393066ed9010ebaed60b9eafa373d4b1baac186dd7e008555b0f702b51945b6"
"checksum synstructure 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3a761d12e6d8dcb4dcf952a7a89b475e3a9d69e4a69307e01a470977642914bd"
"checksum syntex_errors 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9e52bffe6202cfb67587784cf23e0ec5bf26d331eef4922a16d5c42e12aa1e9b"
"checksum unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "bf3a113775714a22dcb774d8ea3655c53a32debae63a063acc00a91cc586245f"
"checksum unicode-xid 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "36dff09cafb4ec7c8cf0023eb0b686cb6ce65499116a12201c9e11840ca01beb"
"checksum unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "8c1f860d7d29cf02cb2f3f359fd35991af3d30bac52c57d265a3c461074cb4dc"
+"checksum unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc"
"checksum unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "382810877fe448991dfc7f0dd6e3ae5d58088fd0ea5e35189655f84e6814fa56"
"checksum url 1.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fa35e768d4daf1d85733418a49fb42e10d7f633e394fccab4ab7aba897053fe2"
"checksum url_serde 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "74e7d099f1ee52f823d4bdd60c93c3602043c728f5db3b97bdb548467f7bddea"
+++ /dev/null
-Subproject commit 17841e155edf858c8ea7802dd5f5ecbef54b989f
// flesh out rpath support more fully in the future.
cmd.arg("-Z").arg("osx-rpath-install-name");
Some("-Wl,-rpath,@loader_path/../lib")
- } else if !target.contains("windows") {
+ } else if !target.contains("windows") && !target.contains("wasm32") {
Some("-Wl,-rpath,$ORIGIN/../lib")
} else {
None
self.build_dir = os.path.join(os.getcwd(), "build")
self.clean = False
self.config_toml = ''
- self.printed = False
self.rust_root = os.path.abspath(os.path.join(__file__, '../../..'))
self.use_locked_deps = ''
self.use_vendored_sources = ''
if self.rustc().startswith(self.bin_root()) and \
(not os.path.exists(self.rustc()) or
self.program_out_of_date(self.rustc_stamp())):
- self.print_what_bootstrap_means()
if os.path.exists(self.bin_root()):
shutil.rmtree(self.bin_root())
filename = "rust-std-{}-{}.tar.gz".format(
with open(self.rustc_stamp(), 'w') as rust_stamp:
rust_stamp.write(self.date)
+ # This is required so that we don't mix incompatible MinGW
+ # libraries/binaries that are included in rust-std with
+ # the system MinGW ones.
+ if "pc-windows-gnu" in self.build:
+ filename = "rust-mingw-{}-{}.tar.gz".format(
+ rustc_channel, self.build)
+ self._download_stage0_helper(filename, "rust-mingw")
+
if self.cargo().startswith(self.bin_root()) and \
(not os.path.exists(self.cargo()) or
self.program_out_of_date(self.cargo_stamp())):
- self.print_what_bootstrap_means()
filename = "cargo-{}-{}.tar.gz".format(cargo_channel, self.build)
self._download_stage0_helper(filename, "cargo")
self.fix_executable("{}/bin/cargo".format(self.bin_root()))
return '.exe'
return ''
- def print_what_bootstrap_means(self):
- """Prints more information about the build system"""
- if hasattr(self, 'printed'):
- return
- self.printed = True
- if os.path.exists(self.bootstrap_binary()):
- return
- if '--help' not in sys.argv or len(sys.argv) == 1:
- return
-
- print('info: the build system for Rust is written in Rust, so this')
- print(' script is now going to download a stage0 rust compiler')
- print(' and then compile the build system itself')
- print('')
- print('info: in the meantime you can read more about rustbuild at')
- print(' src/bootstrap/README.md before the download finishes')
-
def bootstrap_binary(self):
"""Return the path of the boostrap binary
def build_bootstrap(self):
"""Build bootstrap"""
- self.print_what_bootstrap_means()
build_dir = os.path.join(self.build_dir, "bootstrap")
if self.clean and os.path.exists(build_dir):
shutil.rmtree(build_dir)
continue
if self.get_toml('jemalloc'):
continue
+ if module.endswith("lld"):
+ config = self.get_toml('lld')
+ if config is None or config == 'false':
+ continue
filtered_submodules.append(module)
run(["git", "submodule", "update",
"--init", "--recursive"] + filtered_submodules,
self._download_url = 'https://dev-static.rust-lang.org'
-def bootstrap():
+def bootstrap(help_triggered):
"""Configure, fetch, build and run the initial bootstrap"""
+
+ # If the user is asking for help, let them know that the whole download-and-build
+ # process has to happen before anything is printed out.
+ if help_triggered:
+ print("info: Downloading and building bootstrap before processing --help")
+ print(" command. See src/bootstrap/README.md for help with common")
+ print(" commands.")
+
parser = argparse.ArgumentParser(description='Build rust')
parser.add_argument('--config')
parser.add_argument('--build')
print(' and so in order to preserve your $HOME this will now')
print(' use vendored sources by default. Note that if this')
print(' does not work you should run a normal build first')
- print(' before running a command like `sudo make install`')
+ print(' before running a command like `sudo ./x.py install`')
if build.use_vendored_sources:
if not os.path.exists('.cargo'):
if 'dev' in data:
build.set_dev_environment()
- build.update_submodules()
+ # No help text depends on submodules. This check saves ~1 minute of git commands, even if
+ # all the submodules are present and downloaded!
+ if not help_triggered:
+ build.update_submodules()
# Fetch/build the bootstrap
build.build = args.build or build.build_triple()
help_triggered = (
'-h' in sys.argv) or ('--help' in sys.argv) or (len(sys.argv) == 1)
try:
- bootstrap()
+ bootstrap(help_triggered)
if not help_triggered:
print("Build completed successfully in {}".format(
format_build_time(time() - start_time)))
tool::UnstableBookGen, tool::Tidy, tool::Linkchecker, tool::CargoTest,
tool::Compiletest, tool::RemoteTestServer, tool::RemoteTestClient,
tool::RustInstaller, tool::Cargo, tool::Rls, tool::Rustdoc, tool::Clippy,
- native::Llvm, tool::Rustfmt, tool::Miri),
+ native::Llvm, tool::Rustfmt, tool::Miri, native::Lld),
Kind::Check => describe!(check::Std, check::Test, check::Rustc),
Kind::Test => describe!(test::Tidy, test::Bootstrap, test::Ui, test::RunPass,
test::CompileFail, test::ParseFail, test::RunFail, test::RunPassValgrind,
pub fn sysroot_codegen_backends(&self, compiler: Compiler) -> PathBuf {
self.sysroot_libdir(compiler, compiler.host)
- .with_file_name("codegen-backends")
+ .with_file_name(self.build.config.rust_codegen_backends_dir.clone())
}
/// Returns the compiler's libdir where it stores the dynamic libraries that
//
// FIXME: the guard against msvc shouldn't need to be here
if !target.contains("msvc") {
- let cc = self.cc(target);
- cargo.env(format!("CC_{}", target), cc)
- .env("CC", cc);
+ let ccache = self.config.ccache.as_ref();
+ let ccacheify = |s: &Path| {
+ let ccache = match ccache {
+ Some(ref s) => s,
+ None => return s.display().to_string(),
+ };
+ // FIXME: the cc-rs crate only recognizes the literal strings
+ // `ccache` and `sccache` when doing caching compilations, so we
+ // mirror that here. It should probably be fixed upstream to
+ // accept a new env var or otherwise work with custom ccache
+ // vars.
+ match &ccache[..] {
+ "ccache" | "sccache" => format!("{} {}", ccache, s.display()),
+ _ => s.display().to_string(),
+ }
+ };
+ let cc = ccacheify(&self.cc(target));
+ cargo.env(format!("CC_{}", target), &cc)
+ .env("CC", &cc);
let cflags = self.cflags(target).join(" ");
cargo.env(format!("CFLAGS_{}", target), cflags.clone())
}
if let Ok(cxx) = self.cxx(target) {
- cargo.env(format!("CXX_{}", target), cxx)
- .env("CXX", cxx)
+ let cxx = ccacheify(&cxx);
+ cargo.env(format!("CXX_{}", target), &cxx)
+ .env("CXX", &cxx)
.env(format!("CXXFLAGS_{}", target), cflags.clone())
.env("CXXFLAGS", cflags);
}
let mut cfg = cc::Build::new();
cfg.cargo_metadata(false).opt_level(0).warnings(false).debug(false)
.target(&target).host(&build.build);
+ if target.contains("msvc") {
+ cfg.static_crt(true);
+ }
let config = build.config.target_config.get(&target);
if let Some(cc) = config.and_then(|c| c.cc.as_ref()) {
cargo.env("CFG_RELEASE", build.rust_release())
.env("CFG_RELEASE_CHANNEL", &build.config.channel)
.env("CFG_VERSION", build.rust_version())
- .env("CFG_PREFIX", build.config.prefix.clone().unwrap_or_default());
+ .env("CFG_PREFIX", build.config.prefix.clone().unwrap_or_default())
+ .env("CFG_CODEGEN_BACKENDS_DIR", &build.config.rust_codegen_backends_dir);
let libdir_relative = build.config.libdir_relative().unwrap_or(Path::new("lib"));
cargo.env("CFG_LIBDIR_RELATIVE", libdir_relative);
}
}
+fn copy_lld_to_sysroot(builder: &Builder,
+ target_compiler: Compiler,
+ lld_install_root: &Path) {
+ let target = target_compiler.host;
+
+ let dst = builder.sysroot_libdir(target_compiler, target)
+ .parent()
+ .unwrap()
+ .join("bin");
+ t!(fs::create_dir_all(&dst));
+
+ let exe = exe("lld", &target);
+ copy(&lld_install_root.join("bin").join(&exe), &dst.join(&exe));
+}
+
/// Cargo's output path for the standard library in a given stage, compiled
/// by a particular compiler for the specified target.
pub fn libstd_stamp(build: &Build, compiler: Compiler, target: Interned<String>) -> PathBuf {
}
}
+ let lld_install = if build.config.lld_enabled && target_compiler.stage > 0 {
+ Some(builder.ensure(native::Lld {
+ target: target_compiler.host,
+ }))
+ } else {
+ None
+ };
+
let stage = target_compiler.stage;
let host = target_compiler.host;
println!("Assembling stage{} compiler ({})", stage, host);
copy_codegen_backends_to_sysroot(builder,
build_compiler,
target_compiler);
+ if let Some(lld_install) = lld_install {
+ copy_lld_to_sysroot(builder, target_compiler, &lld_install);
+ }
// Link the compiler binary itself into place
let out_dir = build.cargo_out(build_compiler, Mode::Librustc, host);
pub llvm_experimental_targets: String,
pub llvm_link_jobs: Option<u32>,
+ pub lld_enabled: bool,
+
// rust codegen options
pub rust_optimize: bool,
pub rust_codegen_units: Option<u32>,
pub rust_debuginfo_tests: bool,
pub rust_dist_src: bool,
pub rust_codegen_backends: Vec<Interned<String>>,
+ pub rust_codegen_backends_dir: String,
pub build: Interned<String>,
pub hosts: Vec<Interned<String>>,
test_miri: Option<bool>,
save_toolstates: Option<String>,
codegen_backends: Option<Vec<String>>,
+ codegen_backends_dir: Option<String>,
wasm_syscall: Option<bool>,
+ lld: Option<bool>,
}
/// TOML representation of how each build target is configured.
config.rust_dist_src = true;
config.test_miri = false;
config.rust_codegen_backends = vec![INTERNER.intern_str("llvm")];
+ config.rust_codegen_backends_dir = "codegen-backends".to_owned();
config.rustc_error_format = flags.rustc_error_format;
config.on_fail = flags.on_fail;
set(&mut config.quiet_tests, rust.quiet_tests);
set(&mut config.test_miri, rust.test_miri);
set(&mut config.wasm_syscall, rust.wasm_syscall);
+ set(&mut config.lld_enabled, rust.lld);
config.rustc_parallel_queries = rust.experimental_parallel_queries.unwrap_or(false);
config.rustc_default_linker = rust.default_linker.clone();
config.musl_root = rust.musl_root.clone().map(PathBuf::from);
.collect();
}
+ set(&mut config.rust_codegen_backends_dir, rust.codegen_backends_dir.clone());
+
match rust.codegen_units {
Some(0) => config.rust_codegen_units = Some(num_cpus::get() as u32),
Some(n) => config.rust_codegen_units = Some(n),
o("cargo-openssl-static", "build.openssl-static", "static openssl in cargo")
o("profiler", "build.profiler", "build the profiler runtime")
o("emscripten", None, "compile the emscripten backend as well as LLVM")
+o("full-tools", None, "enable all tools")
# Optimization and debugging options. These may be overridden by the release
# channel, etc.
set('build.target', value.split(','))
elif option.name == 'emscripten':
set('rust.codegen-backends', ['llvm', 'emscripten'])
+ elif option.name == 'full-tools':
+ set('rust.codegen-backends', ['llvm', 'emscripten'])
+ set('rust.lld', True)
+ set('build.extended', True)
elif option.name == 'option-checking':
# this was handled above
pass
use {Build, Compiler, Mode};
use channel;
-use util::{cp_r, libdir, is_dylib, cp_filtered, copy, replace_in_file};
+use util::{cp_r, libdir, is_dylib, cp_filtered, copy, replace_in_file, exe};
use builder::{Builder, RunConfig, ShouldRun, Step};
use compile;
use native;
t!(fs::create_dir_all(&backends_dst));
cp_r(&backends_src, &backends_dst);
+ // Copy over lld if it's there
+ if builder.config.lld_enabled {
+ let exe = exe("lld", &compiler.host);
+ let src = builder.sysroot_libdir(compiler, host)
+ .parent()
+ .unwrap()
+ .join("bin")
+ .join(&exe);
+ let dst = image.join("lib/rustlib")
+ .join(&*host)
+ .join("bin")
+ .join(&exe);
+ t!(fs::create_dir_all(&dst.parent().unwrap()));
+ copy(&src, &dst);
+ }
+
// Man pages
t!(fs::create_dir_all(image.join("share/man/man1")));
let man_src = build.src.join("src/doc/man");
let mut src = builder.sysroot_libdir(compiler, target).to_path_buf();
src.pop(); // Remove the trailing /lib folder from the sysroot_libdir
cp_filtered(&src, &dst, &|path| {
- path.file_name().and_then(|s| s.to_str()) != Some("codegen-backends")
+ let name = path.file_name().and_then(|s| s.to_str());
+ name != Some(build.config.rust_codegen_backends_dir.as_str()) &&
+ name != Some("bin")
+
});
let mut cmd = rust_installer(builder);
"src/libterm",
"src/jemalloc",
"src/libprofiler_builtins",
+ "src/stdsimd",
];
let std_src_dirs_exclude = [
"src/libcompiler_builtins/compiler-rt/test",
cmd.arg("--html-after-content").arg(&footer)
.arg("--html-before-content").arg(&version_info)
.arg("--html-in-header").arg(&favicon)
+ .arg("--markdown-no-toc")
.arg("--markdown-playground-url")
.arg("https://play.rust-lang.org/")
.arg("-o").arg(&out)
self.out.join(&*target).join("llvm-emscripten")
}
+ fn lld_out(&self, target: Interned<String>) -> PathBuf {
+ self.out.join(&*target).join("lld")
+ }
+
/// Output directory for all documentation for a target
fn doc_out(&self, target: Interned<String>) -> PathBuf {
self.out.join(&*target).join("doc")
.and_then(|c| c.linker.as_ref()) {
Some(linker)
} else if target != self.config.build &&
- !target.contains("msvc") && !target.contains("emscripten") {
+ !target.contains("msvc") &&
+ !target.contains("emscripten") &&
+ !target.contains("wasm32") {
Some(self.cc(target))
} else {
None
let (out_dir, llvm_config_ret_dir) = if emscripten {
let dir = build.emscripten_llvm_out(target);
- let config_dir = dir.join("bin");
+ let config_dir = dir.join("build/bin");
(dir, config_dir)
} else {
(build.llvm_out(target),
- build.llvm_out(build.config.build).join("bin"))
+ build.llvm_out(build.config.build).join("build/bin"))
};
let done_stamp = out_dir.join("llvm-finished-building");
let build_llvm_config = llvm_config_ret_dir
// http://llvm.org/docs/CMake.html
let root = if self.emscripten { "src/llvm-emscripten" } else { "src/llvm" };
let mut cfg = cmake::Config::new(build.src.join(root));
- if build.config.ninja {
- cfg.generator("Ninja");
- }
let profile = match (build.config.llvm_optimize, build.config.llvm_release_debuginfo) {
(false, _) => "Debug",
let assertions = if build.config.llvm_assertions {"ON"} else {"OFF"};
- cfg.target(&target)
- .host(&build.build)
- .out_dir(&out_dir)
+ cfg.out_dir(&out_dir)
.profile(profile)
.define("LLVM_ENABLE_ASSERTIONS", assertions)
.define("LLVM_TARGETS_TO_BUILD", llvm_targets)
cfg.define("LLVM_NATIVE_BUILD", build.llvm_out(build.build).join("build"));
}
- let sanitize_cc = |cc: &Path| {
- if target.contains("msvc") {
- OsString::from(cc.to_str().unwrap().replace("\\", "/"))
- } else {
- cc.as_os_str().to_owned()
- }
- };
-
- let configure_compilers = |cfg: &mut cmake::Config| {
- // MSVC with CMake uses msbuild by default which doesn't respect these
- // vars that we'd otherwise configure. In that case we just skip this
- // entirely.
- if target.contains("msvc") && !build.config.ninja {
- return
- }
-
- let cc = build.cc(target);
- let cxx = build.cxx(target).unwrap();
-
- // Handle msvc + ninja + ccache specially (this is what the bots use)
- if target.contains("msvc") &&
- build.config.ninja &&
- build.config.ccache.is_some() {
- let mut cc = env::current_exe().expect("failed to get cwd");
- cc.set_file_name("sccache-plus-cl.exe");
-
- cfg.define("CMAKE_C_COMPILER", sanitize_cc(&cc))
- .define("CMAKE_CXX_COMPILER", sanitize_cc(&cc));
- cfg.env("SCCACHE_PATH",
- build.config.ccache.as_ref().unwrap())
- .env("SCCACHE_TARGET", target);
-
- // If ccache is configured we inform the build a little differently hwo
- // to invoke ccache while also invoking our compilers.
- } else if let Some(ref ccache) = build.config.ccache {
- cfg.define("CMAKE_C_COMPILER", ccache)
- .define("CMAKE_C_COMPILER_ARG1", sanitize_cc(cc))
- .define("CMAKE_CXX_COMPILER", ccache)
- .define("CMAKE_CXX_COMPILER_ARG1", sanitize_cc(cxx));
- } else {
- cfg.define("CMAKE_C_COMPILER", sanitize_cc(cc))
- .define("CMAKE_CXX_COMPILER", sanitize_cc(cxx));
- }
-
- cfg.build_arg("-j").build_arg(build.jobs().to_string());
- cfg.define("CMAKE_C_FLAGS", build.cflags(target).join(" "));
- cfg.define("CMAKE_CXX_FLAGS", build.cflags(target).join(" "));
- if let Some(ar) = build.ar(target) {
- if ar.is_absolute() {
- // LLVM build breaks if `CMAKE_AR` is a relative path, for some reason it
- // tries to resolve this path in the LLVM build directory.
- cfg.define("CMAKE_AR", sanitize_cc(ar));
- }
- }
- };
-
- configure_compilers(&mut cfg);
-
- if env::var_os("SCCACHE_ERROR_LOG").is_some() {
- cfg.env("RUST_LOG", "sccache=warn");
- }
+ configure_cmake(build, target, &mut cfg, false);
// FIXME: we don't actually need to build all LLVM tools and all LLVM
// libraries here, e.g. we just want a few components and a few
panic!("\n\nbad LLVM version: {}, need >=3.9\n\n", version)
}
+fn configure_cmake(build: &Build,
+ target: Interned<String>,
+ cfg: &mut cmake::Config,
+ building_dist_binaries: bool) {
+ if build.config.ninja {
+ cfg.generator("Ninja");
+ }
+ cfg.target(&target)
+ .host(&build.config.build);
+
+ let sanitize_cc = |cc: &Path| {
+ if target.contains("msvc") {
+ OsString::from(cc.to_str().unwrap().replace("\\", "/"))
+ } else {
+ cc.as_os_str().to_owned()
+ }
+ };
+
+ // MSVC with CMake uses msbuild by default which doesn't respect these
+ // vars that we'd otherwise configure. In that case we just skip this
+ // entirely.
+ if target.contains("msvc") && !build.config.ninja {
+ return
+ }
+
+ let cc = build.cc(target);
+ let cxx = build.cxx(target).unwrap();
+
+ // Handle msvc + ninja + ccache specially (this is what the bots use)
+ if target.contains("msvc") &&
+ build.config.ninja &&
+ build.config.ccache.is_some() {
+ let mut cc = env::current_exe().expect("failed to get cwd");
+ cc.set_file_name("sccache-plus-cl.exe");
+
+ cfg.define("CMAKE_C_COMPILER", sanitize_cc(&cc))
+ .define("CMAKE_CXX_COMPILER", sanitize_cc(&cc));
+ cfg.env("SCCACHE_PATH",
+ build.config.ccache.as_ref().unwrap())
+ .env("SCCACHE_TARGET", target);
+
+ // If ccache is configured we inform the build a little differently hwo
+ // to invoke ccache while also invoking our compilers.
+ } else if let Some(ref ccache) = build.config.ccache {
+ cfg.define("CMAKE_C_COMPILER", ccache)
+ .define("CMAKE_C_COMPILER_ARG1", sanitize_cc(cc))
+ .define("CMAKE_CXX_COMPILER", ccache)
+ .define("CMAKE_CXX_COMPILER_ARG1", sanitize_cc(cxx));
+ } else {
+ cfg.define("CMAKE_C_COMPILER", sanitize_cc(cc))
+ .define("CMAKE_CXX_COMPILER", sanitize_cc(cxx));
+ }
+
+ cfg.build_arg("-j").build_arg(build.jobs().to_string());
+ cfg.define("CMAKE_C_FLAGS", build.cflags(target).join(" "));
+ let mut cxxflags = build.cflags(target).join(" ");
+ if building_dist_binaries {
+ if build.config.llvm_static_stdcpp && !target.contains("windows") {
+ cxxflags.push_str(" -static-libstdc++");
+ }
+ }
+ cfg.define("CMAKE_CXX_FLAGS", cxxflags);
+ if let Some(ar) = build.ar(target) {
+ if ar.is_absolute() {
+ // LLVM build breaks if `CMAKE_AR` is a relative path, for some reason it
+ // tries to resolve this path in the LLVM build directory.
+ cfg.define("CMAKE_AR", sanitize_cc(ar));
+ }
+ }
+
+ if env::var_os("SCCACHE_ERROR_LOG").is_some() {
+ cfg.env("RUST_LOG", "sccache=warn");
+ }
+}
+
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+pub struct Lld {
+ pub target: Interned<String>,
+}
+
+impl Step for Lld {
+ type Output = PathBuf;
+ const ONLY_HOSTS: bool = true;
+
+ fn should_run(run: ShouldRun) -> ShouldRun {
+ run.path("src/tools/lld")
+ }
+
+ fn make_run(run: RunConfig) {
+ run.builder.ensure(Lld { target: run.target });
+ }
+
+ /// Compile LLVM for `target`.
+ fn run(self, builder: &Builder) -> PathBuf {
+ let target = self.target;
+ let build = builder.build;
+
+ let llvm_config = builder.ensure(Llvm {
+ target: self.target,
+ emscripten: false,
+ });
+
+ let out_dir = build.lld_out(target);
+ let done_stamp = out_dir.join("lld-finished-building");
+ if done_stamp.exists() {
+ return out_dir
+ }
+
+ let _folder = build.fold_output(|| "lld");
+ println!("Building LLD for {}", target);
+ let _time = util::timeit();
+ t!(fs::create_dir_all(&out_dir));
+
+ let mut cfg = cmake::Config::new(build.src.join("src/tools/lld"));
+ configure_cmake(build, target, &mut cfg, true);
+
+ cfg.out_dir(&out_dir)
+ .profile("Release")
+ .define("LLVM_CONFIG_PATH", llvm_config)
+ .define("LLVM_INCLUDE_TESTS", "OFF");
+
+ cfg.build();
+
+ t!(File::create(&done_stamp));
+ out_dir
+ }
+}
+
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct TestHelpers {
pub target: Interned<String>,
println!("tidy check ({})", host);
let mut cmd = builder.tool_cmd(Tool::Tidy);
cmd.arg(build.src.join("src"));
+ cmd.arg(&build.initial_cargo);
if !build.config.vendor {
cmd.arg("--no-vendor");
}
}
if build.config.llvm_enabled {
- let llvm_config = build.llvm_config(target);
+ let llvm_config = build.llvm_config(build.config.build);
let llvm_version = output(Command::new(&llvm_config).arg("--version"));
cmd.arg("--llvm-version").arg(llvm_version);
if !build.is_rust_llvm(target) {
ENV TARGETS=arm-linux-androideabi
-ENV RUST_CONFIGURE_ARGS \
- --target=$TARGETS \
- --arm-linux-androideabi-ndk=/android/ndk/arm-14
+ENV RUST_CONFIGURE_ARGS --arm-linux-androideabi-ndk=/android/ndk/arm-14
ENV SCRIPT python2.7 ../x.py test --target $TARGETS
COPY scripts/sccache.sh /scripts/
RUN sh /scripts/sccache.sh
-ENV RUST_CONFIGURE_ARGS \
- --target=arm-unknown-linux-gnueabihf \
- --qemu-armhf-rootfs=/tmp/rootfs
+ENV RUST_CONFIGURE_ARGS --qemu-armhf-rootfs=/tmp/rootfs
ENV SCRIPT python2.7 ../x.py test --target arm-unknown-linux-gnueabihf
ENV NO_CHANGE_USER=1
ENV TARGETS=asmjs-unknown-emscripten
-ENV RUST_CONFIGURE_ARGS --target=$TARGETS --enable-emscripten
+ENV RUST_CONFIGURE_ARGS --enable-emscripten
ENV SCRIPT python2.7 ../x.py test --target $TARGETS
RUN sh /scripts/sccache.sh
ENV RUST_CONFIGURE_ARGS \
- --target=aarch64-unknown-linux-gnu \
--qemu-aarch64-rootfs=/tmp/rootfs
ENV SCRIPT python2.7 ../x.py test --target aarch64-unknown-linux-gnu
ENV NO_CHANGE_USER=1
ENV HOSTS=aarch64-linux-android
ENV RUST_CONFIGURE_ARGS \
- --host=$HOSTS \
- --target=$HOSTS \
--aarch64-linux-android-ndk=/android/ndk/arm64-21 \
--disable-rpath \
--enable-extended \
ENV HOSTS=armv7-linux-androideabi
ENV RUST_CONFIGURE_ARGS \
- --host=$HOSTS \
- --target=$HOSTS \
--armv7-linux-androideabi-ndk=/android/ndk/arm \
--disable-rpath \
--enable-extended \
ENV HOSTS=i686-linux-android
ENV RUST_CONFIGURE_ARGS \
- --host=$HOSTS \
- --target=$HOSTS \
--i686-linux-android-ndk=/android/ndk/x86 \
--disable-rpath \
--enable-extended \
ENV HOSTS=x86_64-linux-android
ENV RUST_CONFIGURE_ARGS \
- --host=$HOSTS \
- --target=$HOSTS \
--x86_64-linux-android-ndk=/android/ndk/x86_64-21 \
--disable-rpath \
--enable-extended \
ENV HOSTS=x86_64-unknown-dragonfly
-ENV RUST_CONFIGURE_ARGS --host=$HOSTS --enable-extended
+ENV RUST_CONFIGURE_ARGS --enable-extended
ENV SCRIPT python2.7 ../x.py dist --host $HOSTS --target $HOSTS
ENV HOST=x86_64-unknown-haiku
ENV TARGET=target.$HOST
-ENV RUST_CONFIGURE_ARGS --host=$HOST --target=$HOST --disable-jemalloc \
+ENV RUST_CONFIGURE_ARGS --disable-jemalloc \
--set=$TARGET.cc=x86_64-unknown-haiku-gcc \
--set=$TARGET.cxx=x86_64-unknown-haiku-g++ \
--set=$TARGET.llvm-config=/bin/llvm-config-haiku
-ENV SCRIPT python2.7 ../x.py dist
+ENV SCRIPT python2.7 ../x.py dist --host=$HOST --target=$HOST
CC_x86_64_unknown_redox=x86_64-unknown-redox-gcc \
CXX_x86_64_unknown_redox=x86_64-unknown-redox-g++
-ENV RUST_CONFIGURE_ARGS --target=x86_64-unknown-redox --enable-extended
+ENV RUST_CONFIGURE_ARGS --enable-extended
ENV SCRIPT python2.7 ../x.py dist --target x86_64-unknown-redox
ENV TARGETS=wasm32-experimental-emscripten
-ENV RUST_CONFIGURE_ARGS --target=$TARGETS --experimental-targets=WebAssembly
+ENV RUST_CONFIGURE_ARGS --experimental-targets=WebAssembly
ENV SCRIPT python2.7 ../x.py test --target $TARGETS
ENV EM_CONFIG=/emsdk-portable/.emscripten
ENV TARGETS=wasm32-unknown-emscripten
-
-ENV RUST_CONFIGURE_ARGS --target=$TARGETS
-
ENV SCRIPT python2.7 ../x.py test --target $TARGETS
ENV HOSTS=aarch64-unknown-linux-gnu
-ENV RUST_CONFIGURE_ARGS --host=$HOSTS --enable-extended
+ENV RUST_CONFIGURE_ARGS --enable-extended
ENV SCRIPT python2.7 ../x.py dist --host $HOSTS --target $HOSTS
ENV TARGETS=$TARGETS,x86_64-linux-android
ENV RUST_CONFIGURE_ARGS \
- --target=$TARGETS \
--enable-extended \
--arm-linux-androideabi-ndk=/android/ndk/arm-14 \
--armv7-linux-androideabi-ndk=/android/ndk/arm-14 \
ENV HOSTS=arm-unknown-linux-gnueabi
-ENV RUST_CONFIGURE_ARGS --host=$HOSTS --enable-extended
+ENV RUST_CONFIGURE_ARGS --enable-extended
ENV SCRIPT python2.7 ../x.py dist --host $HOSTS --target $HOSTS
ENV HOSTS=arm-unknown-linux-gnueabihf
-ENV RUST_CONFIGURE_ARGS --host=$HOSTS --enable-extended
+ENV RUST_CONFIGURE_ARGS --enable-extended
ENV SCRIPT python2.7 ../x.py dist --host $HOSTS --target $HOSTS
ENV HOSTS=armv7-unknown-linux-gnueabihf
-ENV RUST_CONFIGURE_ARGS --host=$HOSTS --enable-extended
+ENV RUST_CONFIGURE_ARGS --enable-extended
ENV SCRIPT python2.7 ../x.py dist --host $HOSTS --target $HOSTS
RUN sh /scripts/sccache.sh
ENV RUST_CONFIGURE_ARGS \
- --target=i686-unknown-linux-musl,i586-unknown-linux-gnu \
--musl-root-i586=/musl-i586 \
--musl-root-i686=/musl-i686 \
--enable-extended
# https://github.com/alexcrichton/cc-rs/pull/281
ENV CFLAGS_i586_unknown_linux_musl="-Wa,-mrelax-relocations=no -Wl,-melf_i386"
-ENV TARGETS=i586-unknown-linux-gnu
-ENV TARGETS=$TARGETS,i686-unknown-linux-musl
+ENV TARGETS=i586-unknown-linux-gnu,i686-unknown-linux-musl
ENV SCRIPT \
python2.7 ../x.py test --target $TARGETS && \
ENV HOSTS=i686-unknown-freebsd
-ENV RUST_CONFIGURE_ARGS --host=$HOSTS --enable-extended
+ENV RUST_CONFIGURE_ARGS --enable-extended
ENV SCRIPT python2.7 ../x.py dist --host $HOSTS --target $HOSTS
ENV HOSTS=i686-unknown-linux-gnu
ENV RUST_CONFIGURE_ARGS \
- --host=$HOSTS \
- --enable-extended \
+ --enable-full-tools \
--enable-sanitizers \
- --enable-profiler \
- --enable-emscripten \
- --build=i686-unknown-linux-gnu
-ENV SCRIPT python2.7 ../x.py dist --host $HOSTS --target $HOSTS
+ --enable-profiler
+ENV SCRIPT python2.7 ../x.py dist --build $HOSTS --host $HOSTS --target $HOSTS
# This is the only builder which will create source tarballs
ENV DIST_SRC 1
ENV HOSTS=mips-unknown-linux-gnu
-ENV RUST_CONFIGURE_ARGS --host=$HOSTS --enable-extended
+ENV RUST_CONFIGURE_ARGS --enable-extended
ENV SCRIPT python2.7 ../x.py dist --host $HOSTS --target $HOSTS
ENV HOSTS=mips64-unknown-linux-gnuabi64
-ENV RUST_CONFIGURE_ARGS --host=$HOSTS --enable-extended
+ENV RUST_CONFIGURE_ARGS --enable-extended
ENV SCRIPT python2.7 ../x.py dist --host $HOSTS --target $HOSTS
ENV HOSTS=mips64el-unknown-linux-gnuabi64
-ENV RUST_CONFIGURE_ARGS --host=$HOSTS --enable-extended
+ENV RUST_CONFIGURE_ARGS --enable-extended
ENV SCRIPT python2.7 ../x.py dist --host $HOSTS --target $HOSTS
ENV HOSTS=mipsel-unknown-linux-gnu
-ENV RUST_CONFIGURE_ARGS --host=$HOSTS --enable-extended
+ENV RUST_CONFIGURE_ARGS --enable-extended
ENV SCRIPT python2.7 ../x.py dist --host $HOSTS --target $HOSTS
ENV HOSTS=powerpc-unknown-linux-gnu
-ENV RUST_CONFIGURE_ARGS --host=$HOSTS --enable-extended
+ENV RUST_CONFIGURE_ARGS --enable-extended
ENV SCRIPT python2.7 ../x.py dist --host $HOSTS --target $HOSTS
# FIXME(#36150) this will fail the bootstrap. Probably means something bad is
ENV HOSTS=powerpc64-unknown-linux-gnu
-ENV RUST_CONFIGURE_ARGS --host=$HOSTS --enable-extended
+ENV RUST_CONFIGURE_ARGS --enable-extended
ENV SCRIPT python2.7 ../x.py dist --host $HOSTS --target $HOSTS
ENV HOSTS=powerpc64le-unknown-linux-gnu
-ENV RUST_CONFIGURE_ARGS --host=$HOSTS --enable-extended
+ENV RUST_CONFIGURE_ARGS --enable-extended
ENV SCRIPT python2.7 ../x.py dist --host $HOSTS --target $HOSTS
ENV HOSTS=s390x-unknown-linux-gnu
-ENV RUST_CONFIGURE_ARGS --host=$HOSTS --enable-extended
+ENV RUST_CONFIGURE_ARGS --enable-extended
ENV SCRIPT python2.7 ../x.py dist --host $HOSTS --target $HOSTS
CFLAGS_armv5te_unknown_linux_gnueabi="-march=armv5te -marm -mfloat-abi=soft"
ENV RUST_CONFIGURE_ARGS \
- --target=$TARGETS \
--musl-root-arm=/musl-arm \
--musl-root-armhf=/musl-armhf \
--musl-root-armv7=/musl-armv7 \
ENV TARGETS=$TARGETS,x86_64-unknown-linux-gnux32
ENV TARGETS=$TARGETS,x86_64-unknown-cloudabi
-ENV RUST_CONFIGURE_ARGS --target=$TARGETS --enable-extended
+ENV RUST_CONFIGURE_ARGS --enable-extended
ENV SCRIPT python2.7 ../x.py dist --target $TARGETS
ENV HOSTS=x86_64-unknown-freebsd
-ENV RUST_CONFIGURE_ARGS --host=$HOSTS --enable-extended
+ENV RUST_CONFIGURE_ARGS --enable-extended
ENV SCRIPT python2.7 ../x.py dist --host $HOSTS --target $HOSTS
ENV HOSTS=x86_64-unknown-linux-gnu
ENV RUST_CONFIGURE_ARGS \
- --host=$HOSTS \
- --enable-extended \
+ --enable-full-tools \
--enable-sanitizers \
- --enable-profiler \
- --enable-emscripten
+ --enable-profiler
ENV SCRIPT python2.7 ../x.py dist --host $HOSTS --target $HOSTS
# This is the only builder which will create source tarballs
RUN sh /scripts/sccache.sh
ENV RUST_CONFIGURE_ARGS \
- --target=x86_64-unknown-linux-musl \
--musl-root-x86_64=/musl-x86_64 \
--enable-extended
ENV HOSTS=x86_64-unknown-netbsd
-ENV RUST_CONFIGURE_ARGS --host=$HOSTS --enable-extended
+ENV RUST_CONFIGURE_ARGS --enable-extended
ENV SCRIPT python2.7 ../x.py dist --host $HOSTS --target $HOSTS
ENV TARGETS=wasm32-unknown-unknown
ENV RUST_CONFIGURE_ARGS \
- --target=$TARGETS \
- --set build.nodejs=/node-v9.2.0-linux-x64/bin/node
+ --set build.nodejs=/node-v9.2.0-linux-x64/bin/node \
+ --set rust.lld
ENV SCRIPT python2.7 /checkout/x.py test --target $TARGETS \
src/test/ui \
fi
fi
+# We've had problems in the past of shell scripts leaking fds into the sccache
+# server (#48192) which causes Cargo to erroneously think that a build script
+# hasn't finished yet. Try to solve that problem by starting a very long-lived
+# sccache server at the start of the build, but no need to worry if this fails.
+SCCACHE_IDLE_TIMEOUT=10800 sccache --start-server || true
+
travis_fold start configure
travis_time_start
$SRC/configure $RUST_CONFIGURE_ARGS
+++ /dev/null
-#!/usr/bin/env python
-#
-# Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
-# file at the top-level directory of this distribution and at
-# http://rust-lang.org/COPYRIGHT.
-#
-# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-# option. This file may not be copied, modified, or distributed
-# except according to those terms.
-
-#
-# this script attempts to turn doc comment attributes (#[doc = "..."])
-# into sugared-doc-comments (/** ... */ and /// ...)
-#
-# it sugarises all .rs/.rc files underneath the working directory
-#
-
-import sys
-import os
-import fnmatch
-import re
-
-
-DOC_PATTERN = '^(?P<indent>[\\t ]*)#\\[(\\s*)doc(\\s*)=' + \
- '(\\s*)"(?P<text>(\\"|[^"])*?)"(\\s*)\\]' + \
- '(?P<semi>;)?'
-
-ESCAPES = [("\\'", "'"),
- ('\\"', '"'),
- ("\\n", "\n"),
- ("\\r", "\r"),
- ("\\t", "\t")]
-
-
-def unescape(s):
- for (find, repl) in ESCAPES:
- s = s.replace(find, repl)
- return s
-
-
-def block_trim(s):
- lns = s.splitlines()
-
- # remove leading/trailing whitespace-lines
- while lns and not lns[0].strip():
- lns = lns[1:]
- while lns and not lns[-1].strip():
- lns = lns[:-1]
-
- # remove leading horizontal whitespace
- n = sys.maxsize
- for ln in lns:
- if ln.strip():
- n = min(n, len(re.search('^\s*', ln).group()))
- if n != sys.maxsize:
- lns = [ln[n:] for ln in lns]
-
- # strip trailing whitespace
- lns = [ln.rstrip() for ln in lns]
-
- return lns
-
-
-def replace_doc(m):
- indent = m.group('indent')
- text = block_trim(unescape(m.group('text')))
-
- if len(text) > 1:
- inner = '!' if m.group('semi') else '*'
- starify = lambda s: indent + ' *' + (' ' + s if s else '')
- text = '\n'.join(map(starify, text))
- repl = indent + '/*' + inner + '\n' + text + '\n' + indent + ' */'
- else:
- inner = '!' if m.group('semi') else '/'
- repl = indent + '//' + inner + ' ' + text[0]
-
- return repl
-
-
-def sugarise_file(path):
- s = open(path).read()
-
- r = re.compile(DOC_PATTERN, re.MULTILINE | re.DOTALL)
- ns = re.sub(r, replace_doc, s)
-
- if s != ns:
- open(path, 'w').write(ns)
-
-for (dirpath, dirnames, filenames) in os.walk('.'):
- for name in fnmatch.filter(filenames, '*.r[sc]'):
- sugarise_file(os.path.join(dirpath, name))
exp2f: function(x) { return Math.pow(2, x); },
ldexp: function(x, y) { return x * Math.pow(2, y); },
ldexpf: function(x, y) { return x * Math.pow(2, y); },
+ log: Math.log,
+ log2: Math.log2,
log10: Math.log10,
log10f: Math.log10,
+++ /dev/null
-#!/usr/bin/env python
-#
-# Copyright 2013 The Rust Project Developers. See the COPYRIGHT
-# file at the top-level directory of this distribution and at
-# http://rust-lang.org/COPYRIGHT.
-#
-# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-# option. This file may not be copied, modified, or distributed
-# except according to those terms.
-
-# This creates the tables used for distributions implemented using the
-# ziggurat algorithm in `rand::distributions;`. They are
-# (basically) the tables as used in the ZIGNOR variant (Doornik 2005).
-# They are changed rarely, so the generated file should be checked in
-# to git.
-#
-# It creates 3 tables: X as in the paper, F which is f(x_i), and
-# F_DIFF which is f(x_i) - f(x_{i-1}). The latter two are just cached
-# values which is not done in that paper (but is done in other
-# variants). Note that the adZigR table is unnecessary because of
-# algebra.
-#
-# It is designed to be compatible with Python 2 and 3.
-
-from math import exp, sqrt, log, floor
-import random
-
-# The order should match the return value of `tables`
-TABLE_NAMES = ['X', 'F']
-
-# The actual length of the table is 1 more, to stop
-# index-out-of-bounds errors. This should match the bitwise operation
-# to find `i` in `zigurrat` in `libstd/rand/mod.rs`. Also the *_R and
-# *_V constants below depend on this value.
-TABLE_LEN = 256
-
-# equivalent to `zigNorInit` in Doornik2005, but generalised to any
-# distribution. r = dR, v = dV, f = probability density function,
-# f_inv = inverse of f
-def tables(r, v, f, f_inv):
- # compute the x_i
- xvec = [0]*(TABLE_LEN+1)
-
- xvec[0] = v / f(r)
- xvec[1] = r
-
- for i in range(2, TABLE_LEN):
- last = xvec[i-1]
- xvec[i] = f_inv(v / last + f(last))
-
- # cache the f's
- fvec = [0]*(TABLE_LEN+1)
- for i in range(TABLE_LEN+1):
- fvec[i] = f(xvec[i])
-
- return xvec, fvec
-
-# Distributions
-# N(0, 1)
-def norm_f(x):
- return exp(-x*x/2.0)
-def norm_f_inv(y):
- return sqrt(-2.0*log(y))
-
-NORM_R = 3.6541528853610088
-NORM_V = 0.00492867323399
-
-NORM = tables(NORM_R, NORM_V,
- norm_f, norm_f_inv)
-
-# Exp(1)
-def exp_f(x):
- return exp(-x)
-def exp_f_inv(y):
- return -log(y)
-
-EXP_R = 7.69711747013104972
-EXP_V = 0.0039496598225815571993
-
-EXP = tables(EXP_R, EXP_V,
- exp_f, exp_f_inv)
-
-
-# Output the tables/constants/types
-
-def render_static(name, type, value):
- # no space or
- return 'pub static %s: %s =%s;\n' % (name, type, value)
-
-# static `name`: [`type`, .. `len(values)`] =
-# [values[0], ..., values[3],
-# values[4], ..., values[7],
-# ... ];
-def render_table(name, values):
- rows = []
- # 4 values on each row
- for i in range(0, len(values), 4):
- row = values[i:i+4]
- rows.append(', '.join('%.18f' % f for f in row))
-
- rendered = '\n [%s]' % ',\n '.join(rows)
- return render_static(name, '[f64, .. %d]' % len(values), rendered)
-
-
-with open('ziggurat_tables.rs', 'w') as f:
- f.write('''// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// Tables for distributions which are sampled using the ziggurat
-// algorithm. Autogenerated by `ziggurat_tables.py`.
-
-pub type ZigTable = &\'static [f64, .. %d];
-''' % (TABLE_LEN + 1))
- for name, tables, r in [('NORM', NORM, NORM_R),
- ('EXP', EXP, EXP_R)]:
- f.write(render_static('ZIG_%s_R' % name, 'f64', ' %.18f' % r))
- for (tabname, table) in zip(TABLE_NAMES, tables):
- f.write(render_table('ZIG_%s_%s' % (name, tabname), table))
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a, T> FusedIterator for Iter<'a, T> {}
/// An owning iterator over the elements of a `BinaryHeap`.
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<T> FusedIterator for IntoIter<T> {}
/// A draining iterator over the elements of a `BinaryHeap`.
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a, T: 'a> FusedIterator for Drain<'a, T> {}
#[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<I: FusedIterator + ?Sized> FusedIterator for Box<I> {}
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a, K, V> FusedIterator for Iter<'a, K, V> {}
#[stable(feature = "rust1", since = "1.0.0")]
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a, K, V> FusedIterator for IterMut<'a, K, V> {}
#[stable(feature = "rust1", since = "1.0.0")]
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<K, V> FusedIterator for IntoIter<K, V> {}
#[stable(feature = "rust1", since = "1.0.0")]
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a, K, V> FusedIterator for Keys<'a, K, V> {}
#[stable(feature = "rust1", since = "1.0.0")]
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a, K, V> FusedIterator for Values<'a, K, V> {}
#[stable(feature = "rust1", since = "1.0.0")]
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a, K, V> FusedIterator for ValuesMut<'a, K, V> {}
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a, K, V> FusedIterator for Range<'a, K, V> {}
#[stable(feature = "btree_range", since = "1.17.0")]
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a, K, V> FusedIterator for RangeMut<'a, K, V> {}
impl<'a, K, V> RangeMut<'a, K, V> {
fn len(&self) -> usize { self.iter.len() }
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a, T> FusedIterator for Iter<'a, T> {}
#[stable(feature = "rust1", since = "1.0.0")]
fn len(&self) -> usize { self.iter.len() }
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<T> FusedIterator for IntoIter<T> {}
#[stable(feature = "btree_range", since = "1.17.0")]
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a, T> FusedIterator for Range<'a, T> {}
/// Compare `x` and `y`, but return `short` if x is None and `long` if y is None
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a, T: Ord> FusedIterator for Difference<'a, T> {}
#[stable(feature = "rust1", since = "1.0.0")]
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a, T: Ord> FusedIterator for SymmetricDifference<'a, T> {}
#[stable(feature = "rust1", since = "1.0.0")]
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a, T: Ord> FusedIterator for Intersection<'a, T> {}
#[stable(feature = "rust1", since = "1.0.0")]
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a, T: Ord> FusedIterator for Union<'a, T> {}
#![feature(fmt_internals)]
#![feature(from_ref)]
#![feature(fundamental)]
-#![feature(fused)]
#![feature(generic_param_attrs)]
#![feature(i128_type)]
#![feature(inclusive_range)]
#![feature(allocator_internals)]
#![feature(on_unimplemented)]
#![feature(exact_chunks)]
+#![feature(pointer_methods)]
-#![cfg_attr(not(test), feature(fused, fn_traits, placement_new_protocol, swap_with_slice, i128))]
+#![cfg_attr(not(test), feature(fn_traits, placement_new_protocol, swap_with_slice, i128))]
#![cfg_attr(test, feature(test, box_heap))]
// Allow testing this library
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> ExactSizeIterator for Iter<'a, T> {}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a, T> FusedIterator for Iter<'a, T> {}
#[stable(feature = "rust1", since = "1.0.0")]
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> ExactSizeIterator for IterMut<'a, T> {}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a, T> FusedIterator for IterMut<'a, T> {}
impl<'a, T> IterMut<'a, T> {
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> ExactSizeIterator for IntoIter<T> {}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<T> FusedIterator for IntoIter<T> {}
#[stable(feature = "rust1", since = "1.0.0")]
use core::str::pattern::Pattern;
use core::str::pattern::{Searcher, ReverseSearcher, DoubleEndedSearcher};
use core::mem;
+use core::ptr;
use core::iter::FusedIterator;
use std_unicode::str::{UnicodeStr, Utf16Encoder};
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a> FusedIterator for EncodeUtf16<'a> {}
#[stable(feature = "rust1", since = "1.0.0")]
/// ```
#[stable(feature = "repeat_str", since = "1.16.0")]
pub fn repeat(&self, n: usize) -> String {
- let mut s = String::with_capacity(self.len() * n);
- s.extend((0..n).map(|_| self));
- s
+ if n == 0 {
+ return String::new();
+ }
+
+ // If `n` is larger than zero, it can be split as
+ // `n = 2^expn + rem (2^expn > rem, expn >= 0, rem >= 0)`.
+ // `2^expn` is the number represented by the leftmost '1' bit of `n`,
+ // and `rem` is the remaining part of `n`.
+
+ // Using `Vec` to access `set_len()`.
+ let mut buf = Vec::with_capacity(self.len() * n);
+
+ // `2^expn` repetition is done by doubling `buf` `expn`-times.
+ buf.extend(self.as_bytes());
+ {
+ let mut m = n >> 1;
+ // If `m > 0`, there are remaining bits up to the leftmost '1'.
+ while m > 0 {
+ // `buf.extend(buf)`:
+ unsafe {
+ ptr::copy_nonoverlapping(
+ buf.as_ptr(),
+ (buf.as_mut_ptr() as *mut u8).add(buf.len()),
+ buf.len(),
+ );
+ // `buf` has capacity of `self.len() * n`.
+ let buf_len = buf.len();
+ buf.set_len(buf_len * 2);
+ }
+
+ m >>= 1;
+ }
+ }
+
+ // `rem` (`= n - 2^expn`) repetition is done by copying
+ // first `rem` repetitions from `buf` itself.
+ let rem_len = self.len() * n - buf.len(); // `self.len() * rem`
+ if rem_len > 0 {
+ // `buf.extend(buf[0 .. rem_len])`:
+ unsafe {
+ // This is non-overlapping since `2^expn > rem`.
+ ptr::copy_nonoverlapping(
+ buf.as_ptr(),
+ (buf.as_mut_ptr() as *mut u8).add(buf.len()),
+ rem_len,
+ );
+ // `buf.len() + rem_len` equals to `buf.capacity()` (`= self.len() * n`).
+ let buf_cap = buf.capacity();
+ buf.set_len(buf_cap);
+ }
+ }
+
+ unsafe { String::from_utf8_unchecked(buf) }
}
/// Checks if all characters in this string are within the ASCII range.
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a> FusedIterator for Drain<'a> {}
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_on_unimplemented = "vector indices are of type `usize` or ranges of `usize`"]
-impl<T> Index<usize> for Vec<T> {
- type Output = T;
-
- #[inline]
- fn index(&self, index: usize) -> &T {
- // NB built-in indexing via `&[T]`
- &(**self)[index]
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-#[rustc_on_unimplemented = "vector indices are of type `usize` or ranges of `usize`"]
-impl<T> IndexMut<usize> for Vec<T> {
- #[inline]
- fn index_mut(&mut self, index: usize) -> &mut T {
- // NB built-in indexing via `&mut [T]`
- &mut (**self)[index]
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-#[rustc_on_unimplemented = "vector indices are of type `usize` or ranges of `usize`"]
-impl<T> ops::Index<ops::Range<usize>> for Vec<T> {
- type Output = [T];
-
- #[inline]
- fn index(&self, index: ops::Range<usize>) -> &[T] {
- Index::index(&**self, index)
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-#[rustc_on_unimplemented = "vector indices are of type `usize` or ranges of `usize`"]
-impl<T> ops::Index<ops::RangeTo<usize>> for Vec<T> {
- type Output = [T];
-
- #[inline]
- fn index(&self, index: ops::RangeTo<usize>) -> &[T] {
- Index::index(&**self, index)
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-#[rustc_on_unimplemented = "vector indices are of type `usize` or ranges of `usize`"]
-impl<T> ops::Index<ops::RangeFrom<usize>> for Vec<T> {
- type Output = [T];
-
- #[inline]
- fn index(&self, index: ops::RangeFrom<usize>) -> &[T] {
- Index::index(&**self, index)
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-#[rustc_on_unimplemented = "vector indices are of type `usize` or ranges of `usize`"]
-impl<T> ops::Index<ops::RangeFull> for Vec<T> {
- type Output = [T];
-
- #[inline]
- fn index(&self, _index: ops::RangeFull) -> &[T] {
- self
- }
-}
-
-#[unstable(feature = "inclusive_range", reason = "recently added, follows RFC", issue = "28237")]
-#[rustc_on_unimplemented = "vector indices are of type `usize` or ranges of `usize`"]
-impl<T> ops::Index<ops::RangeInclusive<usize>> for Vec<T> {
- type Output = [T];
-
- #[inline]
- fn index(&self, index: ops::RangeInclusive<usize>) -> &[T] {
- Index::index(&**self, index)
- }
-}
-
-#[unstable(feature = "inclusive_range", reason = "recently added, follows RFC", issue = "28237")]
-#[rustc_on_unimplemented = "vector indices are of type `usize` or ranges of `usize`"]
-impl<T> ops::Index<ops::RangeToInclusive<usize>> for Vec<T> {
- type Output = [T];
+impl<T, I> Index<I> for Vec<T>
+where
+ I: ::core::slice::SliceIndex<[T]>,
+{
+ type Output = I::Output;
#[inline]
- fn index(&self, index: ops::RangeToInclusive<usize>) -> &[T] {
+ fn index(&self, index: I) -> &Self::Output {
Index::index(&**self, index)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_on_unimplemented = "vector indices are of type `usize` or ranges of `usize`"]
-impl<T> ops::IndexMut<ops::Range<usize>> for Vec<T> {
- #[inline]
- fn index_mut(&mut self, index: ops::Range<usize>) -> &mut [T] {
- IndexMut::index_mut(&mut **self, index)
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-#[rustc_on_unimplemented = "vector indices are of type `usize` or ranges of `usize`"]
-impl<T> ops::IndexMut<ops::RangeTo<usize>> for Vec<T> {
- #[inline]
- fn index_mut(&mut self, index: ops::RangeTo<usize>) -> &mut [T] {
- IndexMut::index_mut(&mut **self, index)
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-#[rustc_on_unimplemented = "vector indices are of type `usize` or ranges of `usize`"]
-impl<T> ops::IndexMut<ops::RangeFrom<usize>> for Vec<T> {
- #[inline]
- fn index_mut(&mut self, index: ops::RangeFrom<usize>) -> &mut [T] {
- IndexMut::index_mut(&mut **self, index)
- }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-#[rustc_on_unimplemented = "vector indices are of type `usize` or ranges of `usize`"]
-impl<T> ops::IndexMut<ops::RangeFull> for Vec<T> {
- #[inline]
- fn index_mut(&mut self, _index: ops::RangeFull) -> &mut [T] {
- self
- }
-}
-
-#[unstable(feature = "inclusive_range", reason = "recently added, follows RFC", issue = "28237")]
-#[rustc_on_unimplemented = "vector indices are of type `usize` or ranges of `usize`"]
-impl<T> ops::IndexMut<ops::RangeInclusive<usize>> for Vec<T> {
- #[inline]
- fn index_mut(&mut self, index: ops::RangeInclusive<usize>) -> &mut [T] {
- IndexMut::index_mut(&mut **self, index)
- }
-}
-
-#[unstable(feature = "inclusive_range", reason = "recently added, follows RFC", issue = "28237")]
-#[rustc_on_unimplemented = "vector indices are of type `usize` or ranges of `usize`"]
-impl<T> ops::IndexMut<ops::RangeToInclusive<usize>> for Vec<T> {
+impl<T, I> IndexMut<I> for Vec<T>
+where
+ I: ::core::slice::SliceIndex<[T]>,
+{
#[inline]
- fn index_mut(&mut self, index: ops::RangeToInclusive<usize>) -> &mut [T] {
+ fn index_mut(&mut self, index: I) -> &mut Self::Output {
IndexMut::index_mut(&mut **self, index)
}
}
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<T> FusedIterator for IntoIter<T> {}
#[unstable(feature = "trusted_len", issue = "37572")]
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a, T> FusedIterator for Drain<'a, T> {}
/// A place for insertion at the back of a `Vec`.
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a, T> FusedIterator for Iter<'a, T> {}
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a, T> FusedIterator for IterMut<'a, T> {}
/// An owning iterator over the elements of a `VecDeque`.
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<T> FusedIterator for IntoIter<T> {}
/// A draining iterator over the elements of a `VecDeque`.
#[stable(feature = "drain", since = "1.6.0")]
impl<'a, T: 'a> ExactSizeIterator for Drain<'a, T> {}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a, T: 'a> FusedIterator for Drain<'a, T> {}
#[stable(feature = "rust1", since = "1.0.0")]
//! Shareable mutable containers.
//!
+//! Rust memory safety is based on this rule: Given an object `T`, it is only possible to
+//! have one of the following:
+//!
+//! - Having several immutable references (`&T`) to the object (also known as **aliasing**).
+//! - Having one mutable reference (`&mut T`) to the object (also known as **mutability**).
+//!
+//! This is enforced by the Rust compiler. However, there are situations where this rule is not
+//! flexible enough. Sometimes it is required to have multiple references to an object and yet
+//! mutate it.
+//!
+//! Shareable mutable containers exist to permit mutability in a controlled manner, even in the
+//! presence of aliasing. Both `Cell<T>` and `RefCell<T>` allows to do this in a single threaded
+//! way. However, neither `Cell<T>` nor `RefCell<T>` are thread safe (they do not implement
+//! `Sync`). If you need to do aliasing and mutation between multiple threads it is possible to
+//! use [`Mutex`](../../std/sync/struct.Mutex.html),
+//! [`RwLock`](../../std/sync/struct.RwLock.html) or
+//! [`atomic`](../../core/sync/atomic/index.html) types.
+//!
//! Values of the `Cell<T>` and `RefCell<T>` types may be mutated through shared references (i.e.
//! the common `&T` type), whereas most Rust types can only be mutated through unique (`&mut T`)
//! references. We say that `Cell<T>` and `RefCell<T>` provide 'interior mutability', in contrast
/// Converts a `u32` to a `char`.
///
-/// Note that all [`char`]s are valid [`u32`]s, and can be casted to one with
+/// Note that all [`char`]s are valid [`u32`]s, and can be cast to one with
/// [`as`]:
///
/// ```
/// Converts a `u32` to a `char`, ignoring validity.
///
-/// Note that all [`char`]s are valid [`u32`]s, and can be casted to one with
+/// Note that all [`char`]s are valid [`u32`]s, and can be cast to one with
/// [`as`]:
///
/// ```
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl FusedIterator for EscapeUnicode {}
#[stable(feature = "char_struct_display", since = "1.16.0")]
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl FusedIterator for EscapeDefault {}
#[stable(feature = "char_struct_display", since = "1.16.0")]
#[stable(feature = "char_escape_debug", since = "1.20.0")]
impl ExactSizeIterator for EscapeDebug { }
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl FusedIterator for EscapeDebug {}
#[stable(feature = "char_escape_debug", since = "1.20.0")]
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[unstable(feature = "decode_utf8", issue = "33906")]
impl<I: FusedIterator<Item = u8>> FusedIterator for DecodeUtf8<I> {}
/// Reverses the bytes in an integer type `T`.
pub fn bswap<T>(x: T) -> T;
+ /// Reverses the bits in an integer type `T`.
+ #[cfg(not(stage0))]
+ pub fn bitreverse<T>(x: T) -> T;
+
/// Performs checked integer addition.
/// The stabilized versions of this intrinsic are available on the integer
/// primitives via the `overflowing_add` method. For example,
///
/// // this iterator sequence is complex.
/// let sum = a.iter()
- /// .cloned()
- /// .filter(|&x| x % 2 == 0)
- /// .fold(0, |sum, i| sum + i);
+ /// .cloned()
+ /// .filter(|x| x % 2 == 0)
+ /// .fold(0, |sum, i| sum + i);
///
/// println!("{}", sum);
///
/// // let's add some inspect() calls to investigate what's happening
/// let sum = a.iter()
- /// .cloned()
- /// .inspect(|x| println!("about to filter: {}", x))
- /// .filter(|&x| x % 2 == 0)
- /// .inspect(|x| println!("made it through filter: {}", x))
- /// .fold(0, |sum, i| sum + i);
+ /// .cloned()
+ /// .inspect(|x| println!("about to filter: {}", x))
+ /// .filter(|x| x % 2 == 0)
+ /// .inspect(|x| println!("made it through filter: {}", x))
+ /// .fold(0, |sum, i| sum + i);
///
/// println!("{}", sum);
/// ```
/// This will print:
///
/// ```text
+ /// 6
/// about to filter: 1
/// about to filter: 4
/// made it through filter: 4
///
/// let iter = a.into_iter();
///
- /// let sum: i32 = iter.take(5)
- /// .fold(0, |acc, &i| acc + i );
+ /// let sum: i32 = iter.take(5).fold(0, |acc, i| acc + i );
///
/// assert_eq!(sum, 6);
///
/// let mut iter = a.into_iter();
///
/// // instead, we add in a .by_ref()
- /// let sum: i32 = iter.by_ref()
- /// .take(2)
- /// .fold(0, |acc, &i| acc + i );
+ /// let sum: i32 = iter.by_ref().take(2).fold(0, |acc, i| acc + i );
///
/// assert_eq!(sum, 3);
///
///
/// let a = [1, 2, 3];
///
- /// let doubled: VecDeque<i32> = a.iter()
- /// .map(|&x| x * 2)
- /// .collect();
+ /// let doubled: VecDeque<i32> = a.iter().map(|&x| x * 2).collect();
///
/// assert_eq!(2, doubled[0]);
/// assert_eq!(4, doubled[1]);
/// ```
/// let a = [1, 2, 3];
///
- /// let doubled = a.iter()
- /// .map(|&x| x * 2)
- /// .collect::<Vec<i32>>();
+ /// let doubled = a.iter().map(|x| x * 2).collect::<Vec<i32>>();
///
/// assert_eq!(vec![2, 4, 6], doubled);
/// ```
/// ```
/// let a = [1, 2, 3];
///
- /// let doubled = a.iter()
- /// .map(|&x| x * 2)
- /// .collect::<Vec<_>>();
+ /// let doubled = a.iter().map(|x| x * 2).collect::<Vec<_>>();
///
/// assert_eq!(vec![2, 4, 6], doubled);
/// ```
/// let chars = ['g', 'd', 'k', 'k', 'n'];
///
/// let hello: String = chars.iter()
- /// .map(|&x| x as u8)
- /// .map(|x| (x + 1) as char)
- /// .collect();
+ /// .map(|&x| x as u8)
+ /// .map(|x| (x + 1) as char)
+ /// .collect();
///
/// assert_eq!("hello", hello);
/// ```
/// ```
/// let a = [1, 2, 3];
///
- /// let (even, odd): (Vec<i32>, Vec<i32>) = a.into_iter()
- /// .partition(|&n| n % 2 == 0);
+ /// let (even, odd): (Vec<i32>, Vec<i32>) = a
+ /// .into_iter()
+ /// .partition(|&n| n % 2 == 0);
///
/// assert_eq!(even, vec![2]);
/// assert_eq!(odd, vec![1, 3]);
/// let a = [1, 2, 3];
///
/// // the checked sum of all of the elements of the array
- /// let sum = a.iter()
- /// .try_fold(0i8, |acc, &x| acc.checked_add(x));
+ /// let sum = a.iter().try_fold(0i8, |acc, &x| acc.checked_add(x));
///
/// assert_eq!(sum, Some(6));
/// ```
/// let a = [1, 2, 3];
///
/// // the sum of all of the elements of the array
- /// let sum = a.iter()
- /// .fold(0, |acc, &x| acc + x);
+ /// let sum = a.iter().fold(0, |acc, x| acc + x);
///
/// assert_eq!(sum, 6);
/// ```
pub use self::traits::{FromIterator, IntoIterator, DoubleEndedIterator, Extend};
#[stable(feature = "rust1", since = "1.0.0")]
pub use self::traits::{ExactSizeIterator, Sum, Product};
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
pub use self::traits::FusedIterator;
#[unstable(feature = "trusted_len", issue = "37572")]
pub use self::traits::TrustedLen;
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<I> FusedIterator for Rev<I>
where I: FusedIterator + DoubleEndedIterator {}
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a, I, T: 'a> FusedIterator for Cloned<I>
where I: FusedIterator<Item=&'a T>, T: Clone
{}
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<I> FusedIterator for Cycle<I> where I: Clone + Iterator {}
/// An iterator for stepping iterators by a custom amount.
}
// Note: *both* must be fused to handle double-ended iterators.
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<A, B> FusedIterator for Chain<A, B>
where A: FusedIterator,
B: FusedIterator<Item=A::Item>,
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<A, B> FusedIterator for Zip<A, B>
where A: FusedIterator, B: FusedIterator, {}
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<B, I: FusedIterator, F> FusedIterator for Map<I, F>
where F: FnMut(I::Item) -> B {}
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<I: FusedIterator, P> FusedIterator for Filter<I, P>
where P: FnMut(&I::Item) -> bool {}
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<B, I: FusedIterator, F> FusedIterator for FilterMap<I, F>
where F: FnMut(I::Item) -> Option<B> {}
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<I> FusedIterator for Enumerate<I> where I: FusedIterator {}
#[unstable(feature = "trusted_len", issue = "37572")]
#[stable(feature = "rust1", since = "1.0.0")]
impl<I: ExactSizeIterator> ExactSizeIterator for Peekable<I> {}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<I: FusedIterator> FusedIterator for Peekable<I> {}
impl<I: Iterator> Peekable<I> {
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<I, P> FusedIterator for SkipWhile<I, P>
where I: FusedIterator, P: FnMut(&I::Item) -> bool {}
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<I, P> FusedIterator for TakeWhile<I, P>
where I: FusedIterator, P: FnMut(&I::Item) -> bool {}
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<I> FusedIterator for Skip<I> where I: FusedIterator {}
/// An iterator that only iterates over the first `n` iterations of `iter`.
#[stable(feature = "rust1", since = "1.0.0")]
impl<I> ExactSizeIterator for Take<I> where I: ExactSizeIterator {}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<I> FusedIterator for Take<I> where I: FusedIterator {}
#[unstable(feature = "trusted_len", issue = "37572")]
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<I, U, F> FusedIterator for FlatMap<I, U, F>
where I: FusedIterator, U: IntoIterator, F: FnMut(I::Item) -> U {}
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<I, U> FusedIterator for Flatten<I>
where I: FusedIterator, U: Iterator,
I::Item: IntoIterator<IntoIter = U, Item = U::Item> {}
done: bool
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<I> FusedIterator for Fuse<I> where I: Iterator {}
#[stable(feature = "rust1", since = "1.0.0")]
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<I> Iterator for Fuse<I> where I: FusedIterator {
#[inline]
fn next(&mut self) -> Option<<I as Iterator>::Item> {
}
}
-#[unstable(feature = "fused", reason = "recently added", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<I> DoubleEndedIterator for Fuse<I>
where I: DoubleEndedIterator + FusedIterator
{
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<I: FusedIterator, F> FusedIterator for Inspect<I, F>
where F: FnMut(&I::Item) {}
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<A: Step> FusedIterator for ops::Range<A> {}
#[stable(feature = "rust1", since = "1.0.0")]
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<A: Step> FusedIterator for ops::RangeFrom<A> {}
#[unstable(feature = "trusted_len", issue = "37572")]
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<A: Step> FusedIterator for ops::RangeInclusive<A> {}
fn next_back(&mut self) -> Option<A> { Some(self.element.clone()) }
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<A: Clone> FusedIterator for Repeat<A> {}
#[unstable(feature = "trusted_len", issue = "37572")]
fn next_back(&mut self) -> Option<A> { self.next() }
}
-#[unstable(feature = "fused", issue = "35602")]
+#[unstable(feature = "iterator_repeat_with", issue = "48169")]
impl<A, F: FnMut() -> A> FusedIterator for RepeatWith<F> {}
#[unstable(feature = "trusted_len", issue = "37572")]
#[unstable(feature = "trusted_len", issue = "37572")]
unsafe impl<T> TrustedLen for Empty<T> {}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<T> FusedIterator for Empty<T> {}
// not #[derive] because that adds a Clone bound on T,
#[unstable(feature = "trusted_len", issue = "37572")]
unsafe impl<T> TrustedLen for Once<T> {}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<T> FusedIterator for Once<T> {}
/// Creates an iterator that yields an element exactly once.
/// [`None`]: ../../std/option/enum.Option.html#variant.None
/// [`Iterator::fuse`]: ../../std/iter/trait.Iterator.html#method.fuse
/// [`Fuse`]: ../../std/iter/struct.Fuse.html
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
pub trait FusedIterator: Iterator {}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a, I: FusedIterator + ?Sized> FusedIterator for &'a mut I {}
/// An iterator that reports an accurate length using size_hint.
#![feature(allow_internal_unstable)]
#![feature(asm)]
#![feature(associated_type_defaults)]
+#![feature(attr_literals)]
#![feature(cfg_target_feature)]
#![feature(cfg_target_has_atomic)]
#![feature(concat_idents)]
#![feature(const_fn)]
#![feature(custom_attribute)]
+#![feature(doc_spotlight)]
#![feature(fundamental)]
#![feature(i128_type)]
#![feature(inclusive_range_syntax)]
#![feature(intrinsics)]
+#![feature(iterator_flatten)]
+#![feature(iterator_repeat_with)]
#![feature(lang_items)]
+#![feature(link_llvm_intrinsics)]
#![feature(never_type)]
#![feature(no_core)]
#![feature(on_unimplemented)]
#![feature(prelude_import)]
#![feature(repr_simd, platform_intrinsics)]
#![feature(rustc_attrs)]
+#![feature(rustc_const_unstable)]
+#![feature(simd_ffi)]
#![feature(specialization)]
#![feature(staged_api)]
+#![feature(stmt_expr_attributes)]
+#![feature(target_feature)]
#![feature(unboxed_closures)]
#![feature(untagged_unions)]
#![feature(unwind_attributes)]
-#![feature(doc_spotlight)]
-#![feature(rustc_const_unstable)]
-#![feature(iterator_repeat_with)]
-#![feature(iterator_flatten)]
+
+#![cfg_attr(stage0, allow(unused_attributes))]
#[prelude_import]
#[allow(unused)]
mod iter_private;
mod tuple;
mod unit;
+
+// Pull in the the `coresimd` crate directly into libcore. This is where all the
+// architecture-specific (and vendor-specific) intrinsics are defined. AKA
+// things like SIMD and such. Note that the actual source for all this lies in a
+// different repository, rust-lang-nursery/stdsimd. That's why the setup here is
+// a bit wonky.
+#[path = "../stdsimd/coresimd/mod.rs"]
+#[allow(missing_docs, missing_debug_implementations, dead_code)]
+#[unstable(feature = "stdsimd", issue = "48556")]
+#[cfg(not(stage0))] // allow changes to how stdsimd works in stage0
+mod coresimd;
+
+#[unstable(feature = "stdsimd", issue = "48556")]
+#[cfg(not(stage0))]
+pub use coresimd::simd;
+#[unstable(feature = "stdsimd", issue = "48556")]
+#[cfg(not(stage0))]
+pub use coresimd::arch;
(self as $UnsignedT).swap_bytes() as Self
}
+ /// Reverses the bit pattern of the integer.
+ ///
+ /// # Examples
+ ///
+ /// Please note that this example is shared between integer types.
+ /// Which explains why `i16` is used here.
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// #![feature(reverse_bits)]
+ ///
+ /// let n: i16 = 0b0000000_01010101;
+ /// assert_eq!(n, 85);
+ ///
+ /// let m = n.reverse_bits();
+ ///
+ /// assert_eq!(m as u16, 0b10101010_00000000);
+ /// assert_eq!(m, -22016);
+ /// ```
+ #[unstable(feature = "reverse_bits", issue = "48763")]
+ #[cfg(not(stage0))]
+ #[inline]
+ pub fn reverse_bits(self) -> Self {
+ (self as $UnsignedT).reverse_bits() as Self
+ }
+
doc_comment! {
concat!("Converts an integer from big endian to the target's endianness.
unsafe { intrinsics::bswap(self as $ActualT) as Self }
}
+ /// Reverses the bit pattern of the integer.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// Please note that this example is shared between integer types.
+ /// Which explains why `u16` is used here.
+ ///
+ /// ```
+ /// #![feature(reverse_bits)]
+ ///
+ /// let n: u16 = 0b0000000_01010101;
+ /// assert_eq!(n, 85);
+ ///
+ /// let m = n.reverse_bits();
+ ///
+ /// assert_eq!(m, 0b10101010_00000000);
+ /// assert_eq!(m, 43520);
+ /// ```
+ #[unstable(feature = "reverse_bits", issue = "48763")]
+ #[cfg(not(stage0))]
+ #[inline]
+ pub fn reverse_bits(self) -> Self {
+ unsafe { intrinsics::bitreverse(self as $ActualT) as Self }
+ }
+
doc_comment! {
concat!("Converts an integer from big endian to the target's endianness.
/// [`usize`]: ../../std/primitive.usize.html
///
/// ```
- /// let num_as_str: Option<String> = Some("10".to_string());
+ /// let text: Option<String> = Some("Hello, world!".to_string());
/// // First, cast `Option<String>` to `Option<&String>` with `as_ref`,
- /// // then consume *that* with `map`, leaving `num_as_str` on the stack.
- /// let num_as_int: Option<usize> = num_as_str.as_ref().map(|n| n.len());
- /// println!("still can print num_as_str: {:?}", num_as_str);
+ /// // then consume *that* with `map`, leaving `text` on the stack.
+ /// let text_length: Option<usize> = text.as_ref().map(|s| s.len());
+ /// println!("still can print text: {:?}", text);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
/// # Examples
///
/// ```
- /// #![feature(option_ref_mut_cloned)]
/// let mut x = 12;
/// let opt_x = Some(&mut x);
/// assert_eq!(opt_x, Some(&mut 12));
/// let cloned = opt_x.cloned();
/// assert_eq!(cloned, Some(12));
/// ```
- #[unstable(feature = "option_ref_mut_cloned", issue = "43738")]
+ #[stable(since = "1.26.0", feature = "option_ref_mut_cloned")]
pub fn cloned(self) -> Option<T> {
self.map(|t| t.clone())
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, A> ExactSizeIterator for Iter<'a, A> {}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a, A> FusedIterator for Iter<'a, A> {}
#[unstable(feature = "trusted_len", issue = "37572")]
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, A> ExactSizeIterator for IterMut<'a, A> {}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a, A> FusedIterator for IterMut<'a, A> {}
#[unstable(feature = "trusted_len", issue = "37572")]
unsafe impl<'a, A> TrustedLen for IterMut<'a, A> {}
#[stable(feature = "rust1", since = "1.0.0")]
impl<A> ExactSizeIterator for IntoIter<A> {}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<A> FusedIterator for IntoIter<A> {}
#[unstable(feature = "trusted_len", issue = "37572")]
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> ExactSizeIterator for Iter<'a, T> {}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a, T> FusedIterator for Iter<'a, T> {}
#[unstable(feature = "trusted_len", issue = "37572")]
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> ExactSizeIterator for IterMut<'a, T> {}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a, T> FusedIterator for IterMut<'a, T> {}
#[unstable(feature = "trusted_len", issue = "37572")]
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> ExactSizeIterator for IntoIter<T> {}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<T> FusedIterator for IntoIter<T> {}
#[unstable(feature = "trusted_len", issue = "37572")]
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a, T> FusedIterator for Iter<'a, T> {}
#[unstable(feature = "trusted_len", issue = "37572")]
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a, T> FusedIterator for IterMut<'a, T> {}
#[unstable(feature = "trusted_len", issue = "37572")]
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a, T, P> FusedIterator for Split<'a, T, P> where P: FnMut(&T) -> bool {}
/// An iterator over the subslices of the vector which are separated
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a, T, P> FusedIterator for SplitMut<'a, T, P> where P: FnMut(&T) -> bool {}
/// An iterator over subslices separated by elements that match a predicate
}
}
-//#[unstable(feature = "fused", issue = "35602")]
#[unstable(feature = "slice_rsplit", issue = "41020")]
impl<'a, T, P> FusedIterator for RSplit<'a, T, P> where P: FnMut(&T) -> bool {}
}
}
-//#[unstable(feature = "fused", issue = "35602")]
#[unstable(feature = "slice_rsplit", issue = "41020")]
impl<'a, T, P> FusedIterator for RSplitMut<'a, T, P> where P: FnMut(&T) -> bool {}
}
}
- #[unstable(feature = "fused", issue = "35602")]
+ #[stable(feature = "fused", since = "1.26.0")]
impl<'a, $elem, P> FusedIterator for $name<'a, $elem, P>
where P: FnMut(&T) -> bool {}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> ExactSizeIterator for Windows<'a, T> {}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a, T> FusedIterator for Windows<'a, T> {}
#[doc(hidden)]
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> ExactSizeIterator for Chunks<'a, T> {}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a, T> FusedIterator for Chunks<'a, T> {}
#[doc(hidden)]
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> ExactSizeIterator for ChunksMut<'a, T> {}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a, T> FusedIterator for ChunksMut<'a, T> {}
#[doc(hidden)]
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[unstable(feature = "exact_chunks", issue = "47115")]
impl<'a, T> FusedIterator for ExactChunks<'a, T> {}
#[doc(hidden)]
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[unstable(feature = "exact_chunks", issue = "47115")]
impl<'a, T> FusedIterator for ExactChunksMut<'a, T> {}
#[doc(hidden)]
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a> FusedIterator for Chars<'a> {}
impl<'a> Chars<'a> {
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a> FusedIterator for CharIndices<'a> {}
impl<'a> CharIndices<'a> {
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a> FusedIterator for Bytes<'a> {}
#[unstable(feature = "trusted_len", issue = "37572")]
}
}
- #[unstable(feature = "fused", issue = "35602")]
+ #[stable(feature = "fused", since = "1.26.0")]
impl<'a, P: Pattern<'a>> FusedIterator for $forward_iterator<'a, P> {}
- #[unstable(feature = "fused", issue = "35602")]
+ #[stable(feature = "fused", since = "1.26.0")]
impl<'a, P: Pattern<'a>> FusedIterator for $reverse_iterator<'a, P>
where P::Searcher: ReverseSearcher<'a> {}
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a> FusedIterator for Lines<'a> {}
/// Created with the method [`lines_any`].
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
#[allow(deprecated)]
impl<'a> FusedIterator for LinesAny<'a> {}
#![feature(try_trait)]
#![feature(exact_chunks)]
#![feature(atomic_nand)]
+#![feature(reverse_bits)]
extern crate core;
extern crate test;
assert_eq!(_1.swap_bytes(), _1);
}
+ #[test]
+ fn test_reverse_bits() {
+ assert_eq!(A.reverse_bits().reverse_bits(), A);
+ assert_eq!(B.reverse_bits().reverse_bits(), B);
+ assert_eq!(C.reverse_bits().reverse_bits(), C);
+
+ // Swapping these should make no difference
+ assert_eq!(_0.reverse_bits(), _0);
+ assert_eq!(_1.reverse_bits(), _1);
+ }
+
#[test]
fn test_le() {
assert_eq!($T::from_le(A.to_le()), A);
}
define_dep_nodes!( <'tcx>
+ // We use this for most things when incr. comp. is turned off.
+ [] Null,
+
// Represents the `Krate` as a whole (the `hir::Krate` value) (as
// distinct from the krate module). This is basically a hash of
// the entire krate, so if you read from `Krate` (e.g., by calling
[] RvaluePromotableMap(DefId),
[] ImplParent(DefId),
[] TraitOfItem(DefId),
- [] IsExportedSymbol(DefId),
+ [] IsReachableNonGeneric(DefId),
[] IsMirAvailable(DefId),
[] ItemAttrs(DefId),
[] FnArgNames(DefId),
[] GetPanicStrategy(CrateNum),
[] IsNoBuiltins(CrateNum),
[] ImplDefaultness(DefId),
- [] ExportedSymbolIds(CrateNum),
+ [] ReachableNonGenerics(CrateNum),
[] NativeLibraries(CrateNum),
[] PluginRegistrarFn(CrateNum),
[] DeriveRegistrarFn(CrateNum),
[input] MissingExternCrateItem(CrateNum),
[input] UsedCrateSource(CrateNum),
[input] PostorderCnums,
- [input] HasCloneClosures(CrateNum),
- [input] HasCopyClosures(CrateNum),
+ [] HasCloneClosures(CrateNum),
+ [] HasCopyClosures(CrateNum),
// This query is not expected to have inputs -- as a result, it's
// not a good candidate for "replay" because it's essentially a
[] CompileCodegenUnit(InternedString),
[input] OutputFilenames,
[anon] NormalizeTy,
- // We use this for most things when incr. comp. is turned off.
- [] Null,
[] SubstituteNormalizeAndTestPredicates { key: (DefId, &'tcx Substs<'tcx>) },
[] GetSymbolExportLevel(DefId),
+ [input] Features,
);
trait DepNodeParams<'a, 'gcx: 'tcx + 'a, 'tcx: 'a> : fmt::Debug {
{
assert!(!self.is_collecting_in_band_lifetimes);
assert!(self.lifetimes_to_define.is_empty());
- self.is_collecting_in_band_lifetimes = self.sess.features.borrow().in_band_lifetimes;
+ self.is_collecting_in_band_lifetimes = self.sess.features_untracked().in_band_lifetimes;
assert!(self.in_band_ty_params.is_empty());
let span = t.span;
match itctx {
ImplTraitContext::Existential => {
- let has_feature = self.sess.features.borrow().conservative_impl_trait;
+ let has_feature = self.sess.features_untracked().conservative_impl_trait;
if !t.span.allows_unstable() && !has_feature {
emit_feature_err(&self.sess.parse_sess, "conservative_impl_trait",
t.span, GateIssue::Language,
}, lifetimes)
},
ImplTraitContext::Universal(def_id) => {
- let has_feature = self.sess.features.borrow().universal_impl_trait;
+ let has_feature = self.sess.features_untracked().universal_impl_trait;
if !t.span.allows_unstable() && !has_feature {
emit_feature_err(&self.sess.parse_sess, "universal_impl_trait",
t.span, GateIssue::Language,
}
fn maybe_lint_bare_trait(&self, span: Span, id: NodeId, is_global: bool) {
- if self.sess.features.borrow().dyn_trait {
+ if self.sess.features_untracked().dyn_trait {
self.sess.buffer_lint_with_diagnostic(
builtin::BARE_TRAIT_OBJECT, id, span,
"trait objects without an explicit `dyn` are deprecated",
if self.next_comment().is_none() {
self.s.hardbreak()?;
}
- loop {
- match self.next_comment() {
- Some(ref cmnt) => {
- self.print_comment(cmnt)?;
- }
- _ => break,
- }
+ while let Some(ref cmnt) = self.next_comment() {
+ self.print_comment(cmnt)?
}
Ok(())
}
use std::mem;
use syntax::ast;
+use syntax::feature_gate;
use syntax::parse::token;
use syntax::symbol::InternedString;
use syntax::tokenstream;
(pos.0 - filemap_start.0, width as u32)
}
+
+
+
+impl<'gcx> HashStable<StableHashingContext<'gcx>> for feature_gate::Features {
+ fn hash_stable<W: StableHasherResult>(&self,
+ hcx: &mut StableHashingContext<'gcx>,
+ hasher: &mut StableHasher<W>) {
+ // Unfortunately we cannot exhaustively list fields here, since the
+ // struct is macro generated.
+ self.declared_stable_lang_features.hash_stable(hcx, hasher);
+ self.declared_lib_features.hash_stable(hcx, hasher);
+
+ self.walk_feature_fields(|feature_name, value| {
+ feature_name.hash_stable(hcx, hasher);
+ value.hash_stable(hcx, hasher);
+ });
+ }
+}
impl_stable_hash_for!(struct ty::TraitRef<'tcx> { def_id, substs });
impl_stable_hash_for!(struct ty::TraitPredicate<'tcx> { trait_ref });
-impl_stable_hash_for!(tuple_struct ty::EquatePredicate<'tcx> { t1, t2 });
impl_stable_hash_for!(struct ty::SubtypePredicate<'tcx> { a_is_expected, a, b });
impl<'gcx, A, B> HashStable<StableHashingContext<'gcx>>
ty::Predicate::Trait(ref pred) => {
pred.hash_stable(hcx, hasher);
}
- ty::Predicate::Equate(ref pred) => {
- pred.hash_stable(hcx, hasher);
- }
ty::Predicate::Subtype(ref pred) => {
pred.hash_stable(hcx, hasher);
}
//! time of error detection.
use infer;
-use super::{InferCtxt, TypeTrace, SubregionOrigin, RegionVariableOrigin, ValuePairs};
+use super::{InferCtxt, RegionVariableOrigin, SubregionOrigin, TypeTrace, ValuePairs};
use super::region_constraints::GenericKind;
use super::lexical_region_resolve::RegionResolutionError;
pub mod nice_region_error;
impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
- pub fn note_and_explain_region(self,
- region_scope_tree: ®ion::ScopeTree,
- err: &mut DiagnosticBuilder,
- prefix: &str,
- region: ty::Region<'tcx>,
- suffix: &str) {
- fn item_scope_tag(item: &hir::Item) -> &'static str {
- match item.node {
- hir::ItemImpl(..) => "impl",
- hir::ItemStruct(..) => "struct",
- hir::ItemUnion(..) => "union",
- hir::ItemEnum(..) => "enum",
- hir::ItemTrait(..) => "trait",
- hir::ItemFn(..) => "function body",
- _ => "item"
- }
- }
-
- fn trait_item_scope_tag(item: &hir::TraitItem) -> &'static str {
- match item.node {
- hir::TraitItemKind::Method(..) => "method body",
- hir::TraitItemKind::Const(..) |
- hir::TraitItemKind::Type(..) => "associated item"
- }
- }
-
- fn impl_item_scope_tag(item: &hir::ImplItem) -> &'static str {
- match item.node {
- hir::ImplItemKind::Method(..) => "method body",
- hir::ImplItemKind::Const(..) |
- hir::ImplItemKind::Type(_) => "associated item"
- }
- }
-
- fn explain_span<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
- heading: &str, span: Span)
- -> (String, Option<Span>) {
- let lo = tcx.sess.codemap().lookup_char_pos_adj(span.lo());
- (format!("the {} at {}:{}", heading, lo.line, lo.col.to_usize() + 1),
- Some(span))
- }
-
+ pub fn note_and_explain_region(
+ self,
+ region_scope_tree: ®ion::ScopeTree,
+ err: &mut DiagnosticBuilder,
+ prefix: &str,
+ region: ty::Region<'tcx>,
+ suffix: &str,
+ ) {
let (description, span) = match *region {
ty::ReScope(scope) => {
let new_string;
let unknown_scope = || {
- format!("{}unknown scope: {:?}{}. Please report a bug.",
- prefix, scope, suffix)
+ format!(
+ "{}unknown scope: {:?}{}. Please report a bug.",
+ prefix, scope, suffix
+ )
};
let span = scope.span(self, region_scope_tree);
let tag = match self.hir.find(scope.node_id(self, region_scope_tree)) {
hir::ExprCall(..) => "call",
hir::ExprMethodCall(..) => "method call",
hir::ExprMatch(.., hir::MatchSource::IfLetDesugar { .. }) => "if let",
- hir::ExprMatch(.., hir::MatchSource::WhileLetDesugar) => "while let",
- hir::ExprMatch(.., hir::MatchSource::ForLoopDesugar) => "for",
+ hir::ExprMatch(.., hir::MatchSource::WhileLetDesugar) => "while let",
+ hir::ExprMatch(.., hir::MatchSource::ForLoopDesugar) => "for",
hir::ExprMatch(..) => "match",
_ => "expression",
},
Some(hir_map::NodeStmt(_)) => "statement",
- Some(hir_map::NodeItem(it)) => item_scope_tag(&it),
- Some(hir_map::NodeTraitItem(it)) => trait_item_scope_tag(&it),
- Some(hir_map::NodeImplItem(it)) => impl_item_scope_tag(&it),
+ Some(hir_map::NodeItem(it)) => Self::item_scope_tag(&it),
+ Some(hir_map::NodeTraitItem(it)) => Self::trait_item_scope_tag(&it),
+ Some(hir_map::NodeImplItem(it)) => Self::impl_item_scope_tag(&it),
Some(_) | None => {
err.span_note(span, &unknown_scope());
return;
};
let scope_decorated_tag = match scope.data() {
region::ScopeData::Node(_) => tag,
- region::ScopeData::CallSite(_) => {
- "scope of call-site for function"
- }
- region::ScopeData::Arguments(_) => {
- "scope of function body"
- }
+ region::ScopeData::CallSite(_) => "scope of call-site for function",
+ region::ScopeData::Arguments(_) => "scope of function body",
region::ScopeData::Destruction(_) => {
new_string = format!("destruction scope surrounding {}", tag);
&new_string[..]
}
region::ScopeData::Remainder(r) => {
- new_string = format!("block suffix following statement {}",
- r.first_statement_index.index());
+ new_string = format!(
+ "block suffix following statement {}",
+ r.first_statement_index.index()
+ );
&new_string[..]
}
};
- explain_span(self, scope_decorated_tag, span)
+ self.explain_span(scope_decorated_tag, span)
}
- ty::ReEarlyBound(_) |
- ty::ReFree(_) => {
- let scope = region.free_region_binding_scope(self);
- let node = self.hir.as_local_node_id(scope)
- .unwrap_or(DUMMY_NODE_ID);
- let unknown;
- let tag = match self.hir.find(node) {
- Some(hir_map::NodeBlock(_)) |
- Some(hir_map::NodeExpr(_)) => "body",
- Some(hir_map::NodeItem(it)) => item_scope_tag(&it),
- Some(hir_map::NodeTraitItem(it)) => trait_item_scope_tag(&it),
- Some(hir_map::NodeImplItem(it)) => impl_item_scope_tag(&it),
-
- // this really should not happen, but it does:
- // FIXME(#27942)
- Some(_) => {
- unknown = format!("unexpected node ({}) for scope {:?}. \
- Please report a bug.",
- self.hir.node_to_string(node), scope);
- &unknown
- }
- None => {
- unknown = format!("unknown node for scope {:?}. \
- Please report a bug.", scope);
- &unknown
- }
- };
- let (prefix, span) = match *region {
- ty::ReEarlyBound(ref br) => {
- (format!("the lifetime {} as defined on", br.name),
- self.sess.codemap().def_span(self.hir.span(node)))
- }
- ty::ReFree(ref fr) => {
- match fr.bound_region {
- ty::BrAnon(idx) => {
- (format!("the anonymous lifetime #{} defined on", idx + 1),
- self.hir.span(node))
- }
- ty::BrFresh(_) => ("an anonymous lifetime defined on".to_owned(),
- self.hir.span(node)),
- _ => (format!("the lifetime {} as defined on", fr.bound_region),
- self.sess.codemap().def_span(self.hir.span(node))),
- }
- }
- _ => bug!()
- };
- let (msg, opt_span) = explain_span(self, tag, span);
- (format!("{} {}", prefix, msg), opt_span)
- }
+ ty::ReEarlyBound(_) | ty::ReFree(_) => self.msg_span_from_free_region(region),
ty::ReStatic => ("the static lifetime".to_owned(), None),
//
// We shouldn't really be having unification failures with ReVar
// and ReLateBound though.
- ty::ReSkolemized(..) |
- ty::ReVar(_) |
- ty::ReLateBound(..) |
- ty::ReErased => {
+ ty::ReSkolemized(..) | ty::ReVar(_) | ty::ReLateBound(..) | ty::ReErased => {
(format!("lifetime {:?}", region), None)
}
// We shouldn't encounter an error message with ReClosureBound.
ty::ReClosureBound(..) => {
- bug!(
- "encountered unexpected ReClosureBound: {:?}",
- region,
+ bug!("encountered unexpected ReClosureBound: {:?}", region,);
+ }
+ };
+
+ TyCtxt::emit_msg_span(err, prefix, description, span, suffix);
+ }
+
+ pub fn note_and_explain_free_region(
+ self,
+ err: &mut DiagnosticBuilder,
+ prefix: &str,
+ region: ty::Region<'tcx>,
+ suffix: &str,
+ ) {
+ let (description, span) = self.msg_span_from_free_region(region);
+
+ TyCtxt::emit_msg_span(err, prefix, description, span, suffix);
+ }
+
+ fn msg_span_from_free_region(self, region: ty::Region<'tcx>) -> (String, Option<Span>) {
+ let scope = region.free_region_binding_scope(self);
+ let node = self.hir.as_local_node_id(scope).unwrap_or(DUMMY_NODE_ID);
+ let unknown;
+ let tag = match self.hir.find(node) {
+ Some(hir_map::NodeBlock(_)) | Some(hir_map::NodeExpr(_)) => "body",
+ Some(hir_map::NodeItem(it)) => Self::item_scope_tag(&it),
+ Some(hir_map::NodeTraitItem(it)) => Self::trait_item_scope_tag(&it),
+ Some(hir_map::NodeImplItem(it)) => Self::impl_item_scope_tag(&it),
+
+ // this really should not happen, but it does:
+ // FIXME(#27942)
+ Some(_) => {
+ unknown = format!(
+ "unexpected node ({}) for scope {:?}. \
+ Please report a bug.",
+ self.hir.node_to_string(node),
+ scope
+ );
+ &unknown
+ }
+ None => {
+ unknown = format!(
+ "unknown node for scope {:?}. \
+ Please report a bug.",
+ scope
);
+ &unknown
}
};
+ let (prefix, span) = match *region {
+ ty::ReEarlyBound(ref br) => (
+ format!("the lifetime {} as defined on", br.name),
+ self.sess.codemap().def_span(self.hir.span(node)),
+ ),
+ ty::ReFree(ref fr) => match fr.bound_region {
+ ty::BrAnon(idx) => (
+ format!("the anonymous lifetime #{} defined on", idx + 1),
+ self.hir.span(node),
+ ),
+ ty::BrFresh(_) => (
+ "an anonymous lifetime defined on".to_owned(),
+ self.hir.span(node),
+ ),
+ _ => (
+ format!("the lifetime {} as defined on", fr.bound_region),
+ self.sess.codemap().def_span(self.hir.span(node)),
+ ),
+ },
+ _ => bug!(),
+ };
+ let (msg, opt_span) = self.explain_span(tag, span);
+ (format!("{} {}", prefix, msg), opt_span)
+ }
+
+ fn emit_msg_span(
+ err: &mut DiagnosticBuilder,
+ prefix: &str,
+ description: String,
+ span: Option<Span>,
+ suffix: &str,
+ ) {
let message = format!("{}{}{}", prefix, description, suffix);
+
if let Some(span) = span {
err.span_note(span, &message);
} else {
err.note(&message);
}
}
+
+ fn item_scope_tag(item: &hir::Item) -> &'static str {
+ match item.node {
+ hir::ItemImpl(..) => "impl",
+ hir::ItemStruct(..) => "struct",
+ hir::ItemUnion(..) => "union",
+ hir::ItemEnum(..) => "enum",
+ hir::ItemTrait(..) => "trait",
+ hir::ItemFn(..) => "function body",
+ _ => "item",
+ }
+ }
+
+ fn trait_item_scope_tag(item: &hir::TraitItem) -> &'static str {
+ match item.node {
+ hir::TraitItemKind::Method(..) => "method body",
+ hir::TraitItemKind::Const(..) | hir::TraitItemKind::Type(..) => "associated item",
+ }
+ }
+
+ fn impl_item_scope_tag(item: &hir::ImplItem) -> &'static str {
+ match item.node {
+ hir::ImplItemKind::Method(..) => "method body",
+ hir::ImplItemKind::Const(..) | hir::ImplItemKind::Type(_) => "associated item",
+ }
+ }
+
+ fn explain_span(self, heading: &str, span: Span) -> (String, Option<Span>) {
+ let lo = self.sess.codemap().lookup_char_pos_adj(span.lo());
+ (
+ format!("the {} at {}:{}", heading, lo.line, lo.col.to_usize() + 1),
+ Some(span),
+ )
+ }
}
impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
- pub fn report_region_errors(&self,
- region_scope_tree: ®ion::ScopeTree,
- errors: &Vec<RegionResolutionError<'tcx>>,
- will_later_be_reported_by_nll: bool) {
+ pub fn report_region_errors(
+ &self,
+ region_scope_tree: ®ion::ScopeTree,
+ errors: &Vec<RegionResolutionError<'tcx>>,
+ will_later_be_reported_by_nll: bool,
+ ) {
debug!("report_region_errors(): {} errors to start", errors.len());
- if will_later_be_reported_by_nll && self.tcx.sess.nll() {
+ if will_later_be_reported_by_nll && self.tcx.nll() {
// With `#![feature(nll)]`, we want to present a nice user
// experience, so don't even mention the errors from the
// AST checker.
- if self.tcx.sess.features.borrow().nll {
+ if self.tcx.features().nll {
return;
}
// But with -Znll, it's nice to have some note for later.
for error in errors {
match *error {
- RegionResolutionError::ConcreteFailure(ref origin, ..) |
- RegionResolutionError::GenericBoundFailure(ref origin, ..) => {
- self.tcx.sess.span_warn(
- origin.span(),
- "not reporting region error due to -Znll");
+ RegionResolutionError::ConcreteFailure(ref origin, ..)
+ | RegionResolutionError::GenericBoundFailure(ref origin, ..) => {
+ self.tcx
+ .sess
+ .span_warn(origin.span(), "not reporting region error due to -Znll");
}
RegionResolutionError::SubSupConflict(ref rvo, ..) => {
- self.tcx.sess.span_warn(
- rvo.span(),
- "not reporting region error due to -Znll");
+ self.tcx
+ .sess
+ .span_warn(rvo.span(), "not reporting region error due to -Znll");
}
}
}
// together into a `ProcessedErrors` group:
let errors = self.process_errors(errors);
- debug!("report_region_errors: {} errors after preprocessing", errors.len());
+ debug!(
+ "report_region_errors: {} errors after preprocessing",
+ errors.len()
+ );
for error in errors {
debug!("report_region_errors: error = {:?}", error);
// the error. If all of these fails, we fall back to a rather
// general bit of code that displays the error information
RegionResolutionError::ConcreteFailure(origin, sub, sup) => {
- self.report_concrete_failure(region_scope_tree, origin, sub, sup).emit();
+ self.report_concrete_failure(region_scope_tree, origin, sub, sup)
+ .emit();
}
RegionResolutionError::GenericBoundFailure(origin, param_ty, sub) => {
);
}
- RegionResolutionError::SubSupConflict(var_origin,
- sub_origin,
- sub_r,
- sup_origin,
- sup_r) => {
- self.report_sub_sup_conflict(region_scope_tree,
- var_origin,
- sub_origin,
- sub_r,
- sup_origin,
- sup_r);
+ RegionResolutionError::SubSupConflict(
+ var_origin,
+ sub_origin,
+ sub_r,
+ sup_origin,
+ sup_r,
+ ) => {
+ self.report_sub_sup_conflict(
+ region_scope_tree,
+ var_origin,
+ sub_origin,
+ sub_r,
+ sup_origin,
+ sup_r,
+ );
}
}
}
// The method also attempts to weed out messages that seem like
// duplicates that will be unhelpful to the end-user. But
// obviously it never weeds out ALL errors.
- fn process_errors(&self, errors: &Vec<RegionResolutionError<'tcx>>)
- -> Vec<RegionResolutionError<'tcx>> {
+ fn process_errors(
+ &self,
+ errors: &Vec<RegionResolutionError<'tcx>>,
+ ) -> Vec<RegionResolutionError<'tcx>> {
debug!("process_errors()");
// We want to avoid reporting generic-bound failures if we can
let is_bound_failure = |e: &RegionResolutionError<'tcx>| match *e {
RegionResolutionError::GenericBoundFailure(..) => true,
- RegionResolutionError::ConcreteFailure(..) |
- RegionResolutionError::SubSupConflict(..) => false,
+ RegionResolutionError::ConcreteFailure(..)
+ | RegionResolutionError::SubSupConflict(..) => false,
};
-
let mut errors = if errors.iter().all(|e| is_bound_failure(e)) {
errors.clone()
} else {
- errors.iter().filter(|&e| !is_bound_failure(e)).cloned().collect()
+ errors
+ .iter()
+ .filter(|&e| !is_bound_failure(e))
+ .cloned()
+ .collect()
};
// sort the errors by span, for better error message stability.
}
/// Adds a note if the types come from similarly named crates
- fn check_and_note_conflicting_crates(&self,
- err: &mut DiagnosticBuilder,
- terr: &TypeError<'tcx>,
- sp: Span) {
+ fn check_and_note_conflicting_crates(
+ &self,
+ err: &mut DiagnosticBuilder,
+ terr: &TypeError<'tcx>,
+ sp: Span,
+ ) {
let report_path_match = |err: &mut DiagnosticBuilder, did1: DefId, did2: DefId| {
// Only external crates, if either is from a local
// module we could have false positives
let found_abs_path = self.tcx.absolute_item_path_str(did2);
// We compare strings because DefPath can be different
// for imported and non-imported crates
- if exp_path == found_path
- || exp_abs_path == found_abs_path {
+ if exp_path == found_path || exp_abs_path == found_abs_path {
let crate_name = self.tcx.crate_name(did1.krate);
- err.span_note(sp, &format!("Perhaps two different versions \
- of crate `{}` are being used?",
- crate_name));
+ err.span_note(
+ sp,
+ &format!(
+ "Perhaps two different versions \
+ of crate `{}` are being used?",
+ crate_name
+ ),
+ );
}
}
};
match (&exp_found.expected.sty, &exp_found.found.sty) {
(&ty::TyAdt(exp_adt, _), &ty::TyAdt(found_adt, _)) => {
report_path_match(err, exp_adt.did, found_adt.did);
- },
- _ => ()
+ }
+ _ => (),
}
- },
+ }
TypeError::Traits(ref exp_found) => {
report_path_match(err, exp_found.expected, exp_found.found);
- },
- _ => () // FIXME(#22750) handle traits and stuff
+ }
+ _ => (), // FIXME(#22750) handle traits and stuff
}
}
- fn note_error_origin(&self,
- err: &mut DiagnosticBuilder<'tcx>,
- cause: &ObligationCause<'tcx>)
- {
+ fn note_error_origin(&self, err: &mut DiagnosticBuilder<'tcx>, cause: &ObligationCause<'tcx>) {
match cause.code {
ObligationCauseCode::MatchExpressionArm { arm_span, source } => match source {
- hir::MatchSource::IfLetDesugar {..} => {
+ hir::MatchSource::IfLetDesugar { .. } => {
let msg = "`if let` arm with an incompatible type";
if self.tcx.sess.codemap().is_multiline(arm_span) {
err.span_note(arm_span, msg);
}
}
},
- _ => ()
+ _ => (),
}
}
/// Bar<Qux>
/// -------- this type is the same as a type argument in the other type, not highlighted
/// ```
- fn highlight_outer(&self,
- value: &mut DiagnosticStyledString,
- other_value: &mut DiagnosticStyledString,
- name: String,
- sub: &ty::subst::Substs<'tcx>,
- pos: usize,
- other_ty: &Ty<'tcx>) {
+ fn highlight_outer(
+ &self,
+ value: &mut DiagnosticStyledString,
+ other_value: &mut DiagnosticStyledString,
+ name: String,
+ sub: &ty::subst::Substs<'tcx>,
+ pos: usize,
+ other_ty: &Ty<'tcx>,
+ ) {
// `value` and `other_value` hold two incomplete type representation for display.
// `name` is the path of both types being compared. `sub`
value.push_highlighted(name);
}
// Output the lifetimes fot the first type
- let lifetimes = sub.regions().map(|lifetime| {
- let s = format!("{}", lifetime);
- if s.is_empty() {
- "'_".to_string()
- } else {
- s
- }
- }).collect::<Vec<_>>().join(", ");
+ let lifetimes = sub.regions()
+ .map(|lifetime| {
+ let s = format!("{}", lifetime);
+ if s.is_empty() {
+ "'_".to_string()
+ } else {
+ s
+ }
+ })
+ .collect::<Vec<_>>()
+ .join(", ");
if !lifetimes.is_empty() {
if sub.regions().count() < len {
value.push_normal(lifetimes + &", ");
/// Bar<Qux>
/// -------- this type is the same as a type argument in the other type, not highlighted
/// ```
- fn cmp_type_arg(&self,
- mut t1_out: &mut DiagnosticStyledString,
- mut t2_out: &mut DiagnosticStyledString,
- path: String,
- sub: &ty::subst::Substs<'tcx>,
- other_path: String,
- other_ty: &Ty<'tcx>) -> Option<()> {
+ fn cmp_type_arg(
+ &self,
+ mut t1_out: &mut DiagnosticStyledString,
+ mut t2_out: &mut DiagnosticStyledString,
+ path: String,
+ sub: &ty::subst::Substs<'tcx>,
+ other_path: String,
+ other_ty: &Ty<'tcx>,
+ ) -> Option<()> {
for (i, ta) in sub.types().enumerate() {
if &ta == other_ty {
self.highlight_outer(&mut t1_out, &mut t2_out, path, sub, i, &other_ty);
}
/// Add a `,` to the type representation only if it is appropriate.
- fn push_comma(&self,
- value: &mut DiagnosticStyledString,
- other_value: &mut DiagnosticStyledString,
- len: usize,
- pos: usize) {
+ fn push_comma(
+ &self,
+ value: &mut DiagnosticStyledString,
+ other_value: &mut DiagnosticStyledString,
+ len: usize,
+ pos: usize,
+ ) {
if len > 0 && pos != len - 1 {
value.push_normal(", ");
other_value.push_normal(", ");
/// Compare two given types, eliding parts that are the same between them and highlighting
/// relevant differences, and return two representation of those types for highlighted printing.
- fn cmp(&self, t1: Ty<'tcx>, t2: Ty<'tcx>)
- -> (DiagnosticStyledString, DiagnosticStyledString)
- {
+ fn cmp(&self, t1: Ty<'tcx>, t2: Ty<'tcx>) -> (DiagnosticStyledString, DiagnosticStyledString) {
fn equals<'tcx>(a: &Ty<'tcx>, b: &Ty<'tcx>) -> bool {
match (&a.sty, &b.sty) {
(a, b) if *a == *b => true,
- (&ty::TyInt(_), &ty::TyInfer(ty::InferTy::IntVar(_))) |
- (&ty::TyInfer(ty::InferTy::IntVar(_)), &ty::TyInt(_)) |
- (&ty::TyInfer(ty::InferTy::IntVar(_)), &ty::TyInfer(ty::InferTy::IntVar(_))) |
- (&ty::TyFloat(_), &ty::TyInfer(ty::InferTy::FloatVar(_))) |
- (&ty::TyInfer(ty::InferTy::FloatVar(_)), &ty::TyFloat(_)) |
- (&ty::TyInfer(ty::InferTy::FloatVar(_)),
- &ty::TyInfer(ty::InferTy::FloatVar(_))) => true,
+ (&ty::TyInt(_), &ty::TyInfer(ty::InferTy::IntVar(_)))
+ | (&ty::TyInfer(ty::InferTy::IntVar(_)), &ty::TyInt(_))
+ | (&ty::TyInfer(ty::InferTy::IntVar(_)), &ty::TyInfer(ty::InferTy::IntVar(_)))
+ | (&ty::TyFloat(_), &ty::TyInfer(ty::InferTy::FloatVar(_)))
+ | (&ty::TyInfer(ty::InferTy::FloatVar(_)), &ty::TyFloat(_))
+ | (
+ &ty::TyInfer(ty::InferTy::FloatVar(_)),
+ &ty::TyInfer(ty::InferTy::FloatVar(_)),
+ ) => true,
_ => false,
}
}
- fn push_ty_ref<'tcx>(r: &ty::Region<'tcx>,
- tnm: &ty::TypeAndMut<'tcx>,
- s: &mut DiagnosticStyledString) {
+ fn push_ty_ref<'tcx>(
+ r: &ty::Region<'tcx>,
+ tnm: &ty::TypeAndMut<'tcx>,
+ s: &mut DiagnosticStyledString,
+ ) {
let r = &format!("{}", r);
- s.push_highlighted(format!("&{}{}{}",
- r,
- if r == "" {
- ""
- } else {
- " "
- },
- if tnm.mutbl == hir::MutMutable {
- "mut "
- } else {
- ""
- }));
+ s.push_highlighted(format!(
+ "&{}{}{}",
+ r,
+ if r == "" { "" } else { " " },
+ if tnm.mutbl == hir::MutMutable {
+ "mut "
+ } else {
+ ""
+ }
+ ));
s.push_normal(format!("{}", tnm.ty));
}
// Foo<Bar<Qux>
// ------- this type argument is exactly the same as the other type
// Bar<Qux>
- if self.cmp_type_arg(&mut values.0,
- &mut values.1,
- path1.clone(),
- sub1,
- path2.clone(),
- &t2).is_some() {
+ if self.cmp_type_arg(
+ &mut values.0,
+ &mut values.1,
+ path1.clone(),
+ sub1,
+ path2.clone(),
+ &t2,
+ ).is_some()
+ {
return values;
}
// Check for case:
// Bar<Qux>
// Foo<Bar<Qux>>
// ------- this type argument is exactly the same as the other type
- if self.cmp_type_arg(&mut values.1,
- &mut values.0,
- path2,
- sub2,
- path1,
- &t1).is_some() {
+ if self.cmp_type_arg(&mut values.1, &mut values.0, path2, sub2, path1, &t1)
+ .is_some()
+ {
return values;
}
// We couldn't find anything in common, highlight everything.
// let x: Bar<Qux> = y::<Foo<Zar>>();
- (DiagnosticStyledString::highlighted(format!("{}", t1)),
- DiagnosticStyledString::highlighted(format!("{}", t2)))
+ (
+ DiagnosticStyledString::highlighted(format!("{}", t1)),
+ DiagnosticStyledString::highlighted(format!("{}", t2)),
+ )
}
}
_ => {
if t1 == t2 {
// The two types are the same, elide and don't highlight.
- (DiagnosticStyledString::normal("_"), DiagnosticStyledString::normal("_"))
+ (
+ DiagnosticStyledString::normal("_"),
+ DiagnosticStyledString::normal("_"),
+ )
} else {
// We couldn't find anything in common, highlight everything.
- (DiagnosticStyledString::highlighted(format!("{}", t1)),
- DiagnosticStyledString::highlighted(format!("{}", t2)))
+ (
+ DiagnosticStyledString::highlighted(format!("{}", t1)),
+ DiagnosticStyledString::highlighted(format!("{}", t2)),
+ )
}
}
}
}
- pub fn note_type_err(&self,
- diag: &mut DiagnosticBuilder<'tcx>,
- cause: &ObligationCause<'tcx>,
- secondary_span: Option<(Span, String)>,
- mut values: Option<ValuePairs<'tcx>>,
- terr: &TypeError<'tcx>)
- {
+ pub fn note_type_err(
+ &self,
+ diag: &mut DiagnosticBuilder<'tcx>,
+ cause: &ObligationCause<'tcx>,
+ secondary_span: Option<(Span, String)>,
+ mut values: Option<ValuePairs<'tcx>>,
+ terr: &TypeError<'tcx>,
+ ) {
// For some types of errors, expected-found does not make
// sense, so just ignore the values we were given.
match terr {
- TypeError::CyclicTy(_) => { values = None; }
- _ => { }
+ TypeError::CyclicTy(_) => {
+ values = None;
+ }
+ _ => {}
}
let (expected_found, exp_found, is_simple_error) = match values {
Some(values) => {
let (is_simple_error, exp_found) = match values {
ValuePairs::Types(exp_found) => {
- let is_simple_err = exp_found.expected.is_primitive()
- && exp_found.found.is_primitive();
+ let is_simple_err =
+ exp_found.expected.is_primitive() && exp_found.found.is_primitive();
(is_simple_err, Some(exp_found))
}
None => {
// Derived error. Cancel the emitter.
self.tcx.sess.diagnostic().cancel(diag);
- return
+ return;
}
};
(vals, exp_found, is_simple_error)
match (terr, is_simple_error, expected == found) {
(&TypeError::Sorts(ref values), false, true) => {
diag.note_expected_found_extra(
- &"type", expected, found,
+ &"type",
+ expected,
+ found,
&format!(" ({})", values.expected.sort_string(self.tcx)),
- &format!(" ({})", values.found.sort_string(self.tcx)));
+ &format!(" ({})", values.found.sort_string(self.tcx)),
+ );
}
(_, false, _) => {
if let Some(exp_found) = exp_found {
TypeVariants::TyFnDef(def, _) => {
(Some(def), Some(self.tcx.fn_sig(def).output()))
}
- _ => (None, None)
+ _ => (None, None),
};
let exp_is_struct = match exp_found.expected.sty {
TypeVariants::TyAdt(def, _) => def.is_struct(),
- _ => false
+ _ => false,
};
if let (Some(def_id), Some(ret_ty)) = (def_id, ret_ty) {
self.note_error_origin(diag, &cause);
}
- pub fn report_and_explain_type_error(&self,
- trace: TypeTrace<'tcx>,
- terr: &TypeError<'tcx>)
- -> DiagnosticBuilder<'tcx>
- {
- debug!("report_and_explain_type_error(trace={:?}, terr={:?})",
- trace,
- terr);
+ pub fn report_and_explain_type_error(
+ &self,
+ trace: TypeTrace<'tcx>,
+ terr: &TypeError<'tcx>,
+ ) -> DiagnosticBuilder<'tcx> {
+ debug!(
+ "report_and_explain_type_error(trace={:?}, terr={:?})",
+ trace, terr
+ );
let span = trace.cause.span(&self.tcx);
let failure_code = trace.cause.as_failure_code(terr);
diag
}
- fn values_str(&self, values: &ValuePairs<'tcx>)
- -> Option<(DiagnosticStyledString, DiagnosticStyledString)>
- {
+ fn values_str(
+ &self,
+ values: &ValuePairs<'tcx>,
+ ) -> Option<(DiagnosticStyledString, DiagnosticStyledString)> {
match *values {
infer::Types(ref exp_found) => self.expected_found_str_ty(exp_found),
infer::TraitRefs(ref exp_found) => self.expected_found_str(exp_found),
}
}
- fn expected_found_str_ty(&self,
- exp_found: &ty::error::ExpectedFound<Ty<'tcx>>)
- -> Option<(DiagnosticStyledString, DiagnosticStyledString)> {
+ fn expected_found_str_ty(
+ &self,
+ exp_found: &ty::error::ExpectedFound<Ty<'tcx>>,
+ ) -> Option<(DiagnosticStyledString, DiagnosticStyledString)> {
let exp_found = self.resolve_type_vars_if_possible(exp_found);
if exp_found.references_error() {
return None;
/// Returns a string of the form "expected `{}`, found `{}`".
fn expected_found_str<T: fmt::Display + TypeFoldable<'tcx>>(
&self,
- exp_found: &ty::error::ExpectedFound<T>)
- -> Option<(DiagnosticStyledString, DiagnosticStyledString)>
- {
+ exp_found: &ty::error::ExpectedFound<T>,
+ ) -> Option<(DiagnosticStyledString, DiagnosticStyledString)> {
let exp_found = self.resolve_type_vars_if_possible(exp_found);
if exp_found.references_error() {
return None;
}
- Some((DiagnosticStyledString::highlighted(format!("{}", exp_found.expected)),
- DiagnosticStyledString::highlighted(format!("{}", exp_found.found))))
+ Some((
+ DiagnosticStyledString::highlighted(format!("{}", exp_found.expected)),
+ DiagnosticStyledString::highlighted(format!("{}", exp_found.found)),
+ ))
}
- pub fn report_generic_bound_failure(&self,
- region_scope_tree: ®ion::ScopeTree,
- span: Span,
- origin: Option<SubregionOrigin<'tcx>>,
- bound_kind: GenericKind<'tcx>,
- sub: Region<'tcx>)
- {
+ pub fn report_generic_bound_failure(
+ &self,
+ region_scope_tree: ®ion::ScopeTree,
+ span: Span,
+ origin: Option<SubregionOrigin<'tcx>>,
+ bound_kind: GenericKind<'tcx>,
+ sub: Region<'tcx>,
+ ) {
// Attempt to obtain the span of the parameter so we can
// suggest adding an explicit lifetime bound to it.
let type_param_span = match (self.in_progress_tables, bound_kind) {
// `sp` only covers `T`, change it so that it covers
// `T:` when appropriate
let sp = if has_lifetimes {
- sp.to(self.tcx.sess.codemap().next_point(
- self.tcx.sess.codemap().next_point(sp)))
+ sp.to(self.tcx
+ .sess
+ .codemap()
+ .next_point(self.tcx.sess.codemap().next_point(sp)))
} else {
sp
};
};
let labeled_user_string = match bound_kind {
- GenericKind::Param(ref p) =>
- format!("the parameter type `{}`", p),
- GenericKind::Projection(ref p) =>
- format!("the associated type `{}`", p),
+ GenericKind::Param(ref p) => format!("the parameter type `{}`", p),
+ GenericKind::Projection(ref p) => format!("the associated type `{}`", p),
};
if let Some(SubregionOrigin::CompareImplMethodObligation {
- span, item_name, impl_item_def_id, trait_item_def_id,
- }) = origin {
- self.report_extra_impl_obligation(span,
- item_name,
- impl_item_def_id,
- trait_item_def_id,
- &format!("`{}: {}`", bound_kind, sub))
- .emit();
+ span,
+ item_name,
+ impl_item_def_id,
+ trait_item_def_id,
+ }) = origin
+ {
+ self.report_extra_impl_obligation(
+ span,
+ item_name,
+ impl_item_def_id,
+ trait_item_def_id,
+ &format!("`{}: {}`", bound_kind, sub),
+ ).emit();
return;
}
- fn binding_suggestion<'tcx, S: fmt::Display>(err: &mut DiagnosticBuilder<'tcx>,
- type_param_span: Option<(Span, bool)>,
- bound_kind: GenericKind<'tcx>,
- sub: S) {
- let consider = &format!("consider adding an explicit lifetime bound `{}: {}`...",
- bound_kind,
- sub);
+ fn binding_suggestion<'tcx, S: fmt::Display>(
+ err: &mut DiagnosticBuilder<'tcx>,
+ type_param_span: Option<(Span, bool)>,
+ bound_kind: GenericKind<'tcx>,
+ sub: S,
+ ) {
+ let consider = &format!(
+ "consider adding an explicit lifetime bound `{}: {}`...",
+ bound_kind, sub
+ );
if let Some((sp, has_lifetimes)) = type_param_span {
- let tail = if has_lifetimes {
- " + "
- } else {
- ""
- };
+ let tail = if has_lifetimes { " + " } else { "" };
let suggestion = format!("{}: {}{}", bound_kind, sub, tail);
err.span_suggestion_short(sp, consider, suggestion);
} else {
}
let mut err = match *sub {
- ty::ReEarlyBound(_) |
- ty::ReFree(ty::FreeRegion {bound_region: ty::BrNamed(..), ..}) => {
+ ty::ReEarlyBound(_)
+ | ty::ReFree(ty::FreeRegion {
+ bound_region: ty::BrNamed(..),
+ ..
+ }) => {
// Does the required lifetime have a nice name we can print?
- let mut err = struct_span_err!(self.tcx.sess,
- span,
- E0309,
- "{} may not live long enough",
- labeled_user_string);
+ let mut err = struct_span_err!(
+ self.tcx.sess,
+ span,
+ E0309,
+ "{} may not live long enough",
+ labeled_user_string
+ );
binding_suggestion(&mut err, type_param_span, bound_kind, sub);
err
}
ty::ReStatic => {
// Does the required lifetime have a nice name we can print?
- let mut err = struct_span_err!(self.tcx.sess,
- span,
- E0310,
- "{} may not live long enough",
- labeled_user_string);
+ let mut err = struct_span_err!(
+ self.tcx.sess,
+ span,
+ E0310,
+ "{} may not live long enough",
+ labeled_user_string
+ );
binding_suggestion(&mut err, type_param_span, bound_kind, "'static");
err
}
_ => {
// If not, be less specific.
- let mut err = struct_span_err!(self.tcx.sess,
- span,
- E0311,
- "{} may not live long enough",
- labeled_user_string);
- err.help(&format!("consider adding an explicit lifetime bound for `{}`",
- bound_kind));
+ let mut err = struct_span_err!(
+ self.tcx.sess,
+ span,
+ E0311,
+ "{} may not live long enough",
+ labeled_user_string
+ );
+ err.help(&format!(
+ "consider adding an explicit lifetime bound for `{}`",
+ bound_kind
+ ));
self.tcx.note_and_explain_region(
region_scope_tree,
&mut err,
&format!("{} must be valid for ", labeled_user_string),
sub,
- "...");
+ "...",
+ );
err
}
};
err.emit();
}
- fn report_sub_sup_conflict(&self,
- region_scope_tree: ®ion::ScopeTree,
- var_origin: RegionVariableOrigin,
- sub_origin: SubregionOrigin<'tcx>,
- sub_region: Region<'tcx>,
- sup_origin: SubregionOrigin<'tcx>,
- sup_region: Region<'tcx>) {
-
+ fn report_sub_sup_conflict(
+ &self,
+ region_scope_tree: ®ion::ScopeTree,
+ var_origin: RegionVariableOrigin,
+ sub_origin: SubregionOrigin<'tcx>,
+ sub_region: Region<'tcx>,
+ sup_origin: SubregionOrigin<'tcx>,
+ sup_region: Region<'tcx>,
+ ) {
let mut err = self.report_inference_failure(var_origin);
- self.tcx.note_and_explain_region(region_scope_tree, &mut err,
+ self.tcx.note_and_explain_region(
+ region_scope_tree,
+ &mut err,
"first, the lifetime cannot outlive ",
sup_region,
- "...");
+ "...",
+ );
match (&sup_origin, &sub_origin) {
(&infer::Subtype(ref sup_trace), &infer::Subtype(ref sub_trace)) => {
- if let (Some((sup_expected, sup_found)),
- Some((sub_expected, sub_found))) = (self.values_str(&sup_trace.values),
- self.values_str(&sub_trace.values)) {
+ if let (Some((sup_expected, sup_found)), Some((sub_expected, sub_found))) = (
+ self.values_str(&sup_trace.values),
+ self.values_str(&sub_trace.values),
+ ) {
if sub_expected == sup_expected && sub_found == sup_found {
self.tcx.note_and_explain_region(
region_scope_tree,
sub_region,
"...",
);
- err.note(&format!("...so that the {}:\nexpected {}\n found {}",
- sup_trace.cause.as_requirement_str(),
- sup_expected.content(),
- sup_found.content()));
+ err.note(&format!(
+ "...so that the {}:\nexpected {}\n found {}",
+ sup_trace.cause.as_requirement_str(),
+ sup_expected.content(),
+ sup_found.content()
+ ));
err.emit();
return;
}
self.note_region_origin(&mut err, &sup_origin);
- self.tcx.note_and_explain_region(region_scope_tree, &mut err,
+ self.tcx.note_and_explain_region(
+ region_scope_tree,
+ &mut err,
"but, the lifetime must be valid for ",
sub_region,
- "...");
+ "...",
+ );
self.note_region_origin(&mut err, &sub_origin);
err.emit();
}
impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
- fn report_inference_failure(&self,
- var_origin: RegionVariableOrigin)
- -> DiagnosticBuilder<'tcx> {
+ fn report_inference_failure(
+ &self,
+ var_origin: RegionVariableOrigin,
+ ) -> DiagnosticBuilder<'tcx> {
let br_string = |br: ty::BoundRegion| {
let mut s = br.to_string();
if !s.is_empty() {
infer::Autoref(_) => " for autoref".to_string(),
infer::Coercion(_) => " for automatic coercion".to_string(),
infer::LateBoundRegion(_, br, infer::FnCall) => {
- format!(" for lifetime parameter {}in function call",
- br_string(br))
+ format!(" for lifetime parameter {}in function call", br_string(br))
}
infer::LateBoundRegion(_, br, infer::HigherRankedType) => {
format!(" for lifetime parameter {}in generic type", br_string(br))
}
- infer::LateBoundRegion(_, br, infer::AssocTypeProjection(def_id)) => {
- format!(" for lifetime parameter {}in trait containing associated type `{}`",
- br_string(br), self.tcx.associated_item(def_id).name)
- }
- infer::EarlyBoundRegion(_, name) => {
- format!(" for lifetime parameter `{}`",
- name)
- }
+ infer::LateBoundRegion(_, br, infer::AssocTypeProjection(def_id)) => format!(
+ " for lifetime parameter {}in trait containing associated type `{}`",
+ br_string(br),
+ self.tcx.associated_item(def_id).name
+ ),
+ infer::EarlyBoundRegion(_, name) => format!(" for lifetime parameter `{}`", name),
infer::BoundRegionInCoherence(name) => {
- format!(" for lifetime parameter `{}` in coherence check",
- name)
+ format!(" for lifetime parameter `{}` in coherence check", name)
}
infer::UpvarRegion(ref upvar_id, _) => {
let var_node_id = self.tcx.hir.hir_to_node_id(upvar_id.var_id);
infer::NLL(..) => bug!("NLL variable found in lexical phase"),
};
- struct_span_err!(self.tcx.sess, var_origin.span(), E0495,
- "cannot infer an appropriate lifetime{} \
- due to conflicting requirements",
- var_description)
+ struct_span_err!(
+ self.tcx.sess,
+ var_origin.span(),
+ E0495,
+ "cannot infer an appropriate lifetime{} \
+ due to conflicting requirements",
+ var_description
+ )
}
}
match self.code {
CompareImplMethodObligation { .. } => Error0308("method not compatible with trait"),
MatchExpressionArm { source, .. } => Error0308(match source {
- hir::MatchSource::IfLetDesugar{..} => "`if let` arms have incompatible types",
+ hir::MatchSource::IfLetDesugar { .. } => "`if let` arms have incompatible types",
_ => "match arms have incompatible types",
}),
IfExpression => Error0308("if and else have incompatible types"),
IfExpressionWithNoElse => Error0317("if may be missing an else clause"),
- EquatePredicate => Error0308("equality predicate not satisfied"),
MainFunctionType => Error0580("main function has wrong type"),
StartFunctionType => Error0308("start function has wrong type"),
IntrinsicType => Error0308("intrinsic has wrong type"),
// say, also take a look at the error code, maybe we can
// tailor to that.
_ => match terr {
- TypeError::CyclicTy(ty) if ty.is_closure() || ty.is_generator() =>
- Error0644("closure/generator type that references itself"),
- _ =>
- Error0308("mismatched types"),
- }
+ TypeError::CyclicTy(ty) if ty.is_closure() || ty.is_generator() => {
+ Error0644("closure/generator type that references itself")
+ }
+ _ => Error0308("mismatched types"),
+ },
}
}
CompareImplMethodObligation { .. } => "method type is compatible with trait",
ExprAssignable => "expression is assignable",
MatchExpressionArm { source, .. } => match source {
- hir::MatchSource::IfLetDesugar{..} => "`if let` arms have compatible types",
+ hir::MatchSource::IfLetDesugar { .. } => "`if let` arms have compatible types",
_ => "match arms have compatible types",
},
IfExpression => "if and else have compatible types",
IfExpressionWithNoElse => "if missing an else returns ()",
- EquatePredicate => "equality where clause is satisfied",
MainFunctionType => "`main` function has the correct type",
StartFunctionType => "`start` function has the correct type",
IntrinsicType => "intrinsic has the correct type",
self.borrow_region_constraints().make_subregion(origin, a, b);
}
- pub fn equality_predicate(&self,
- cause: &ObligationCause<'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- predicate: &ty::PolyEquatePredicate<'tcx>)
- -> InferResult<'tcx, ()>
- {
- self.commit_if_ok(|snapshot| {
- let (ty::EquatePredicate(a, b), skol_map) =
- self.skolemize_late_bound_regions(predicate, snapshot);
- let cause_span = cause.span;
- let eqty_ok = self.at(cause, param_env).eq(b, a)?;
- self.leak_check(false, cause_span, &skol_map, snapshot)?;
- self.pop_skolemized(skol_map, snapshot);
- Ok(eqty_ok.unit())
- })
- }
-
pub fn subtype_predicate(&self,
cause: &ObligationCause<'tcx>,
param_env: ty::ParamEnv<'tcx>,
assert!(!obligation.has_escaping_regions());
match obligation.predicate {
ty::Predicate::Trait(..) |
- ty::Predicate::Equate(..) |
ty::Predicate::Subtype(..) |
ty::Predicate::Projection(..) |
ty::Predicate::ClosureKind(..) |
.filter_map(move |predicate| match predicate {
ty::Predicate::Projection(..) |
ty::Predicate::Trait(..) |
- ty::Predicate::Equate(..) |
ty::Predicate::Subtype(..) |
ty::Predicate::WellFormed(..) |
ty::Predicate::ObjectSafe(..) |
use ty::{self, TyCtxt};
use session::{Session, CrateDisambiguator};
use session::search_paths::PathKind;
-use util::nodemap::NodeSet;
use std::any::Any;
use std::collections::BTreeMap;
// utility functions
fn encode_metadata<'a, 'tcx>(&self,
tcx: TyCtxt<'a, 'tcx, 'tcx>,
- link_meta: &LinkMeta,
- reachable: &NodeSet)
+ link_meta: &LinkMeta)
-> EncodedMetadata;
fn metadata_encoding_version(&self) -> &[u8];
}
fn extern_mod_stmt_cnum_untracked(&self, emod_id: ast::NodeId) -> Option<CrateNum> { None }
fn encode_metadata<'a, 'tcx>(&self,
tcx: TyCtxt<'a, 'tcx, 'tcx>,
- link_meta: &LinkMeta,
- reachable: &NodeSet)
+ link_meta: &LinkMeta)
-> EncodedMetadata {
bug!("encode_metadata")
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+use hir::def_id::{DefId, LOCAL_CRATE};
+use std::cmp;
+use ty;
+
/// The SymbolExportLevel of a symbols specifies from which kinds of crates
/// the symbol will be exported. `C` symbols will be exported from any
/// kind of crate, including cdylibs which export very few things.
/// `Rust` will only be exported if the crate produced is a Rust
/// dylib.
-#[derive(Eq, PartialEq, Debug, Copy, Clone)]
+#[derive(Eq, PartialEq, Debug, Copy, Clone, RustcEncodable, RustcDecodable)]
pub enum SymbolExportLevel {
C,
Rust,
}
}
}
+
+#[derive(Eq, PartialEq, Debug, Copy, Clone, RustcEncodable, RustcDecodable)]
+pub enum ExportedSymbol {
+ NonGeneric(DefId),
+ NoDefId(ty::SymbolName),
+}
+
+impl ExportedSymbol {
+ pub fn symbol_name(&self, tcx: ty::TyCtxt) -> ty::SymbolName {
+ match *self {
+ ExportedSymbol::NonGeneric(def_id) => {
+ tcx.symbol_name(ty::Instance::mono(tcx, def_id))
+ }
+ ExportedSymbol::NoDefId(symbol_name) => {
+ symbol_name
+ }
+ }
+ }
+
+ pub fn compare_stable(&self, tcx: ty::TyCtxt, other: &ExportedSymbol) -> cmp::Ordering {
+ match *self {
+ ExportedSymbol::NonGeneric(self_def_id) => {
+ match *other {
+ ExportedSymbol::NonGeneric(other_def_id) => {
+ tcx.def_path_hash(self_def_id).cmp(&tcx.def_path_hash(other_def_id))
+ }
+ ExportedSymbol::NoDefId(_) => {
+ cmp::Ordering::Less
+ }
+ }
+ }
+ ExportedSymbol::NoDefId(self_symbol_name) => {
+ match *other {
+ ExportedSymbol::NonGeneric(_) => {
+ cmp::Ordering::Greater
+ }
+ ExportedSymbol::NoDefId(ref other_symbol_name) => {
+ self_symbol_name.cmp(other_symbol_name)
+ }
+ }
+ }
+ }
+ }
+}
+
+impl_stable_hash_for!(enum self::ExportedSymbol {
+ NonGeneric(def_id),
+ NoDefId(symbol_name)
+});
+
+pub fn metadata_symbol_name(tcx: ty::TyCtxt) -> String {
+ format!("rust_metadata_{}_{}",
+ tcx.original_crate_name(LOCAL_CRATE),
+ tcx.crate_disambiguator(LOCAL_CRATE).to_fingerprint().to_hex())
+}
// Step 2: Mark all symbols that the symbols on the worklist touch.
fn propagate(&mut self) {
let mut scanned = FxHashSet();
- loop {
- let search_item = match self.worklist.pop() {
- Some(item) => item,
- None => break,
- };
+ while let Some(search_item) = self.worklist.pop() {
if !scanned.insert(search_item) {
continue
}
item_sp: Span, kind: AnnotationKind, visit_children: F)
where F: FnOnce(&mut Self)
{
- if self.tcx.sess.features.borrow().staged_api {
+ if self.tcx.features().staged_api {
// This crate explicitly wants staged API.
debug!("annotate(id = {:?}, attrs = {:?})", id, attrs);
if let Some(..) = attr::find_deprecation(self.tcx.sess.diagnostic(), attrs, item_sp) {
pub fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Index<'tcx> {
let is_staged_api =
tcx.sess.opts.debugging_opts.force_unstable_if_unmarked ||
- tcx.sess.features.borrow().staged_api;
+ tcx.features().staged_api;
let mut staged_api = FxHashMap();
staged_api.insert(LOCAL_CRATE, is_staged_api);
let mut index = Index {
active_features: FxHashSet(),
};
- let ref active_lib_features = tcx.sess.features.borrow().declared_lib_features;
+ let ref active_lib_features = tcx.features().declared_lib_features;
// Put the active features into a map for quick lookup
index.active_features = active_lib_features.iter().map(|&(ref s, _)| s.clone()).collect();
// There's no good place to insert stability check for non-Copy unions,
// so semi-randomly perform it here in stability.rs
- hir::ItemUnion(..) if !self.tcx.sess.features.borrow().untagged_unions => {
+ hir::ItemUnion(..) if !self.tcx.features().untagged_unions => {
let def_id = self.tcx.hir.local_def_id(item.id);
let adt_def = self.tcx.adt_def(def_id);
let ty = self.tcx.type_of(def_id);
/// were expected to be library features), and the list of features used from
/// libraries, identify activated features that don't exist and error about them.
pub fn check_unused_or_stable_features<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
- let sess = &tcx.sess;
-
let access_levels = &tcx.privacy_access_levels(LOCAL_CRATE);
if tcx.stability().staged_api[&LOCAL_CRATE] {
krate.visit_all_item_likes(&mut missing.as_deep_visitor());
}
- let ref declared_lib_features = sess.features.borrow().declared_lib_features;
+ let ref declared_lib_features = tcx.features().declared_lib_features;
let mut remaining_lib_features: FxHashMap<Symbol, Span>
= declared_lib_features.clone().into_iter().collect();
remaining_lib_features.remove(&Symbol::intern("proc_macro"));
- for &(ref stable_lang_feature, span) in &sess.features.borrow().declared_stable_lang_features {
+ for &(ref stable_lang_feature, span) in &tcx.features().declared_stable_lang_features {
let version = find_lang_feature_accepted_version(&stable_lang_feature.as_str())
.expect("unexpectedly couldn't find version feature was stabilized");
tcx.lint_node(lint::builtin::STABLE_FEATURES,
use middle::allocator::AllocatorKind;
use middle::dependency_format;
use session::search_paths::PathKind;
-use session::config::{BorrowckMode, DebugInfoLevel, OutputType, Epoch};
+use session::config::{DebugInfoLevel, OutputType, Epoch};
use ty::tls;
use util::nodemap::{FxHashMap, FxHashSet};
use util::common::{duration_to_secs_str, ErrorReported};
/// multiple crates with the same name to coexist. See the
/// trans::back::symbol_names module for more information.
pub crate_disambiguator: RefCell<Option<CrateDisambiguator>>,
- pub features: RefCell<feature_gate::Features>,
+
+ features: RefCell<Option<feature_gate::Features>>,
/// The maximum recursion limit for potentially infinitely recursive
/// operations such as auto-dereference and monomorphization.
None => bug!("accessing disambiguator before initialization"),
}
}
+
pub fn struct_span_warn<'a, S: Into<MultiSpan>>(&'a self,
sp: S,
msg: &str)
self.opts.debugging_opts.print_llvm_passes
}
- /// If true, we should use NLL-style region checking instead of
- /// lexical style.
- pub fn nll(&self) -> bool {
- self.features.borrow().nll || self.opts.debugging_opts.nll
+ /// Get the features enabled for the current compilation session.
+ /// DO NOT USE THIS METHOD if there is a TyCtxt available, as it circumvents
+ /// dependency tracking. Use tcx.features() instead.
+ #[inline]
+ pub fn features_untracked(&self) -> cell::Ref<feature_gate::Features> {
+ let features = self.features.borrow();
+
+ if features.is_none() {
+ bug!("Access to Session::features before it is initialized");
+ }
+
+ cell::Ref::map(features, |r| r.as_ref().unwrap())
}
- /// If true, we should use the MIR-based borrowck (we may *also* use
- /// the AST-based borrowck).
- pub fn use_mir(&self) -> bool {
- self.borrowck_mode().use_mir()
+ pub fn init_features(&self, features: feature_gate::Features) {
+ *(self.features.borrow_mut()) = Some(features);
}
/// If true, we should gather causal information during NLL
self.opts.debugging_opts.nll_dump_cause
}
- /// If true, we should enable two-phase borrows checks. This is
- /// done with either `-Ztwo-phase-borrows` or with
- /// `#![feature(nll)]`.
- pub fn two_phase_borrows(&self) -> bool {
- self.features.borrow().nll || self.opts.debugging_opts.two_phase_borrows
- }
-
- /// What mode(s) of borrowck should we run? AST? MIR? both?
- /// (Also considers the `#![feature(nll)]` setting.)
- pub fn borrowck_mode(&self) -> BorrowckMode {
- match self.opts.borrowck_mode {
- mode @ BorrowckMode::Mir |
- mode @ BorrowckMode::Compare => mode,
-
- mode @ BorrowckMode::Ast => {
- if self.nll() {
- BorrowckMode::Mir
- } else {
- mode
- }
- }
-
- }
- }
-
- /// Should we emit EndRegion MIR statements? These are consumed by
- /// MIR borrowck, but not when NLL is used. They are also consumed
- /// by the validation stuff.
- pub fn emit_end_regions(&self) -> bool {
- // FIXME(#46875) -- we should not emit end regions when NLL is enabled,
- // but for now we can't stop doing so because it causes false positives
- self.opts.debugging_opts.emit_end_regions ||
- self.opts.debugging_opts.mir_emit_validate > 0 ||
- self.use_mir()
- }
-
/// Calculates the flavor of LTO to use for this compilation.
pub fn lto(&self) -> config::Lto {
// If our target has codegen requirements ignore the command line
crate_types: RefCell::new(Vec::new()),
dependency_formats: RefCell::new(FxHashMap()),
crate_disambiguator: RefCell::new(None),
- features: RefCell::new(feature_gate::Features::new()),
+ features: RefCell::new(None),
recursion_limit: Cell::new(64),
type_length_limit: Cell::new(1048576),
next_node_id: Cell::new(NodeId::new(1)),
span_bug!(span, "subtype requirement gave wrong error: `{:?}`", predicate)
}
- ty::Predicate::Equate(ref predicate) => {
- let predicate = self.resolve_type_vars_if_possible(predicate);
- let err = self.equality_predicate(&obligation.cause,
- obligation.param_env,
- &predicate).err().unwrap();
- struct_span_err!(self.tcx.sess, span, E0278,
- "the requirement `{}` is not satisfied (`{}`)",
- predicate, err)
- }
-
ty::Predicate::RegionOutlives(ref predicate) => {
let predicate = self.resolve_type_vars_if_possible(predicate);
let err = self.region_outlives_predicate(&obligation.cause,
ObligationCauseCode::MatchExpressionArm { .. } |
ObligationCauseCode::IfExpression |
ObligationCauseCode::IfExpressionWithNoElse |
- ObligationCauseCode::EquatePredicate |
ObligationCauseCode::MainFunctionType |
ObligationCauseCode::StartFunctionType |
ObligationCauseCode::IntrinsicType |
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use infer::{RegionObligation, InferCtxt, InferOk};
+use infer::{RegionObligation, InferCtxt};
use ty::{self, Ty, TypeFoldable, ToPolyTraitRef, ToPredicate};
use ty::error::ExpectedFound;
use rustc_data_structures::obligation_forest::{ObligationForest, Error};
}
}
- ty::Predicate::Equate(ref binder) => {
- match selcx.infcx().equality_predicate(&obligation.cause,
- obligation.param_env,
- binder) {
- Ok(InferOk { obligations, value: () }) => {
- Ok(Some(obligations))
- },
- Err(_) => Err(CodeSelectionError(Unimplemented)),
- }
- }
-
ty::Predicate::RegionOutlives(ref binder) => {
match selcx.infcx().region_outlives_predicate(&obligation.cause, binder) {
Ok(()) => Ok(Some(Vec::new())),
/// Computing common supertype of an if expression with no else counter-part
IfExpressionWithNoElse,
- /// `where a == b`
- EquatePredicate,
-
/// `main` has wrong type
MainFunctionType,
ty::Predicate::RegionOutlives(..) |
ty::Predicate::ClosureKind(..) |
ty::Predicate::Subtype(..) |
- ty::Predicate::Equate(..) |
ty::Predicate::ConstEvaluatable(..) => {
false
}
}
ty::Predicate::Projection(..) |
ty::Predicate::Trait(..) |
- ty::Predicate::Equate(..) |
ty::Predicate::Subtype(..) |
ty::Predicate::RegionOutlives(..) |
ty::Predicate::WellFormed(..) |
self.evaluate_trait_predicate_recursively(previous_stack, obligation)
}
- ty::Predicate::Equate(ref p) => {
- // does this code ever run?
- match self.infcx.equality_predicate(&obligation.cause, obligation.param_env, p) {
- Ok(InferOk { obligations, .. }) => {
- self.evaluate_predicates_recursively(previous_stack, &obligations);
- EvaluatedToOk
- },
- Err(_) => EvaluatedToErr
- }
- }
-
ty::Predicate::Subtype(ref p) => {
// does this code ever run?
match self.infcx.subtype_predicate(&obligation.cause, obligation.param_env, p) {
// The feature gate should prevent introducing new specializations, but not
// taking advantage of upstream ones.
- if !tcx.sess.features.borrow().specialization &&
+ if !tcx.features().specialization &&
(impl1_def_id.is_local() || impl2_def_id.is_local()) {
return false;
}
}
super::IfExpression => Some(super::IfExpression),
super::IfExpressionWithNoElse => Some(super::IfExpressionWithNoElse),
- super::EquatePredicate => Some(super::EquatePredicate),
super::MainFunctionType => Some(super::MainFunctionType),
super::StartFunctionType => Some(super::StartFunctionType),
super::IntrinsicType => Some(super::IntrinsicType),
super::MatchExpressionArm { arm_span: _, source: _ } |
super::IfExpression |
super::IfExpressionWithNoElse |
- super::EquatePredicate |
super::MainFunctionType |
super::StartFunctionType |
super::IntrinsicType |
super::MatchExpressionArm { arm_span: _, source: _ } |
super::IfExpression |
super::IfExpressionWithNoElse |
- super::EquatePredicate |
super::MainFunctionType |
super::StartFunctionType |
super::IntrinsicType |
ty::Predicate::Trait(ref data) =>
ty::Predicate::Trait(tcx.anonymize_late_bound_regions(data)),
- ty::Predicate::Equate(ref data) =>
- ty::Predicate::Equate(tcx.anonymize_late_bound_regions(data)),
-
ty::Predicate::RegionOutlives(ref data) =>
ty::Predicate::RegionOutlives(tcx.anonymize_late_bound_regions(data)),
// Currently, we do not elaborate object-safe
// predicates.
}
- ty::Predicate::Equate(..) => {
- // Currently, we do not "elaborate" predicates like
- // `X == Y`, though conceivably we might. For example,
- // `&X == &Y` implies that `X == Y`.
- }
ty::Predicate::Subtype(..) => {
// Currently, we do not "elaborate" predicates like `X
// <: Y`, though conceivably we might.
use dep_graph::{DepNode, DepConstructor};
use errors::DiagnosticBuilder;
use session::Session;
-use session::config::OutputFilenames;
+use session::config::{BorrowckMode, OutputFilenames};
use middle;
use hir::{TraitCandidate, HirId, ItemLocalId};
use hir::def::{Def, Export};
use ty::maps;
use ty::steal::Steal;
use ty::BindingMode;
-use util::nodemap::{NodeMap, NodeSet, DefIdSet, ItemLocalMap};
+use util::nodemap::{NodeMap, DefIdSet, ItemLocalMap};
use util::nodemap::{FxHashMap, FxHashSet};
use rustc_data_structures::accumulate_vec::AccumulateVec;
use rustc_data_structures::stable_hasher::{HashStable, hash_stable_hashmap,
use syntax::ast::{self, Name, NodeId};
use syntax::attr;
use syntax::codemap::MultiSpan;
+use syntax::feature_gate;
use syntax::symbol::{Symbol, keywords};
use syntax_pos::Span;
self.all_crate_nums(LOCAL_CRATE)
}
+ pub fn features(self) -> Lrc<feature_gate::Features> {
+ self.features_query(LOCAL_CRATE)
+ }
+
pub fn def_key(self, id: DefId) -> hir_map::DefKey {
if id.is_local() {
self.hir.def_key(id)
self.on_disk_query_result_cache.serialize(self.global_tcx(), encoder)
}
+ /// If true, we should use NLL-style region checking instead of
+ /// lexical style.
+ pub fn nll(self) -> bool {
+ self.features().nll || self.sess.opts.debugging_opts.nll
+ }
+
+ /// If true, we should use the MIR-based borrowck (we may *also* use
+ /// the AST-based borrowck).
+ pub fn use_mir(self) -> bool {
+ self.borrowck_mode().use_mir()
+ }
+
+ /// If true, we should enable two-phase borrows checks. This is
+ /// done with either `-Ztwo-phase-borrows` or with
+ /// `#![feature(nll)]`.
+ pub fn two_phase_borrows(self) -> bool {
+ self.features().nll || self.sess.opts.debugging_opts.two_phase_borrows
+ }
+
+ /// What mode(s) of borrowck should we run? AST? MIR? both?
+ /// (Also considers the `#![feature(nll)]` setting.)
+ pub fn borrowck_mode(&self) -> BorrowckMode {
+ match self.sess.opts.borrowck_mode {
+ mode @ BorrowckMode::Mir |
+ mode @ BorrowckMode::Compare => mode,
+
+ mode @ BorrowckMode::Ast => {
+ if self.nll() {
+ BorrowckMode::Mir
+ } else {
+ mode
+ }
+ }
+
+ }
+ }
+
+ /// Should we emit EndRegion MIR statements? These are consumed by
+ /// MIR borrowck, but not when NLL is used. They are also consumed
+ /// by the validation stuff.
+ pub fn emit_end_regions(self) -> bool {
+ // FIXME(#46875) -- we should not emit end regions when NLL is enabled,
+ // but for now we can't stop doing so because it causes false positives
+ self.sess.opts.debugging_opts.emit_end_regions ||
+ self.sess.opts.debugging_opts.mir_emit_validate > 0 ||
+ self.use_mir()
+ }
}
impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
- pub fn encode_metadata(self, link_meta: &LinkMeta, reachable: &NodeSet)
+ pub fn encode_metadata(self, link_meta: &LinkMeta)
-> EncodedMetadata
{
- self.cstore.encode_metadata(self, link_meta, reachable)
+ self.cstore.encode_metadata(self, link_meta)
}
}
}
pub fn mk_diverging_default(self) -> Ty<'tcx> {
- if self.sess.features.borrow().never_type {
+ if self.features().never_type {
self.types.never
} else {
self.intern_tup(&[], true)
};
providers.has_copy_closures = |tcx, cnum| {
assert_eq!(cnum, LOCAL_CRATE);
- tcx.sess.features.borrow().copy_closures
+ tcx.features().copy_closures
};
providers.has_clone_closures = |tcx, cnum| {
assert_eq!(cnum, LOCAL_CRATE);
- tcx.sess.features.borrow().clone_closures
+ tcx.features().clone_closures
};
providers.fully_normalize_monormophic_ty = |tcx, ty| {
tcx.fully_normalize_associated_types_in(&ty)
};
+ providers.features_query = |tcx, cnum| {
+ assert_eq!(cnum, LOCAL_CRATE);
+ Lrc::new(tcx.sess.features_untracked().clone())
+ };
+ providers.is_panic_runtime = |tcx, cnum| {
+ assert_eq!(cnum, LOCAL_CRATE);
+ attr::contains_name(tcx.hir.krate_attrs(), "panic_runtime")
+ };
+ providers.is_compiler_builtins = |tcx, cnum| {
+ assert_eq!(cnum, LOCAL_CRATE);
+ attr::contains_name(tcx.hir.krate_attrs(), "compiler_builtins")
+ };
}
}
let pointee = tcx.normalize_associated_type_in_env(&pointee, param_env);
- if pointee.is_sized(tcx, param_env, DUMMY_SP) {
+ if pointee.is_sized(tcx.at(DUMMY_SP), param_env) {
return Ok(tcx.intern_layout(LayoutDetails::scalar(self, data_ptr)));
}
let param_env = tcx.param_env(def.did);
let last_field = def.variants[v].fields.last().unwrap();
let always_sized = tcx.type_of(last_field.did)
- .is_sized(tcx, param_env, DUMMY_SP);
+ .is_sized(tcx.at(DUMMY_SP), param_env);
if !always_sized { StructKind::MaybeUnsized }
else { StructKind::AlwaysSized }
};
}
}
-impl<'tcx> QueryDescription<'tcx> for queries::is_exported_symbol<'tcx> {
+impl<'tcx> QueryDescription<'tcx> for queries::is_reachable_non_generic<'tcx> {
fn describe(_: TyCtxt, _: DefId) -> String {
- bug!("is_exported_symbol")
+ bug!("is_reachable_non_generic")
}
}
}
}
-impl<'tcx> QueryDescription<'tcx> for queries::exported_symbol_ids<'tcx> {
+impl<'tcx> QueryDescription<'tcx> for queries::reachable_non_generics<'tcx> {
fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
format!("looking up the exported symbols of a crate")
}
}
}
+impl<'tcx> QueryDescription<'tcx> for queries::features_query<'tcx> {
+ fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
+ format!("looking up enabled feature gates")
+ }
+}
+
impl<'tcx> QueryDescription<'tcx> for queries::typeck_tables_of<'tcx> {
#[inline]
fn cache_on_disk(def_id: Self::Key) -> bool {
use middle::resolve_lifetime::{ResolveLifetimes, Region, ObjectLifetimeDefault};
use middle::stability::{self, DeprecationEntry};
use middle::lang_items::{LanguageItems, LangItem};
-use middle::exported_symbols::SymbolExportLevel;
+use middle::exported_symbols::{SymbolExportLevel, ExportedSymbol};
use mir::mono::{CodegenUnit, Stats};
use mir;
use session::{CompileResult, CrateDisambiguator};
use syntax_pos::symbol::InternedString;
use syntax::attr;
use syntax::ast;
+use syntax::feature_gate;
use syntax::symbol::Symbol;
#[macro_use]
[] fn fn_arg_names: FnArgNames(DefId) -> Vec<ast::Name>,
[] fn impl_parent: ImplParent(DefId) -> Option<DefId>,
[] fn trait_of_item: TraitOfItem(DefId) -> Option<DefId>,
- [] fn is_exported_symbol: IsExportedSymbol(DefId) -> bool,
[] fn item_body_nested_bodies: ItemBodyNestedBodies(DefId) -> ExternBodyNestedBodies,
[] fn const_is_rvalue_promotable_to_static: ConstIsRvaluePromotableToStatic(DefId) -> bool,
[] fn rvalue_promotable_map: RvaluePromotableMap(DefId) -> Lrc<ItemLocalSet>,
[] fn lint_levels: lint_levels_node(CrateNum) -> Lrc<lint::LintLevelMap>,
[] fn impl_defaultness: ImplDefaultness(DefId) -> hir::Defaultness,
- [] fn exported_symbol_ids: ExportedSymbolIds(CrateNum) -> Lrc<DefIdSet>,
+
+ // The DefIds of all non-generic functions and statics in the given crate
+ // that can be reached from outside the crate.
+ //
+ // We expect this items to be available for being linked to.
+ //
+ // This query can also be called for LOCAL_CRATE. In this case it will
+ // compute which items will be reachable to other crates, taking into account
+ // the kind of crate that is currently compiled. Crates with only a
+ // C interface have fewer reachable things.
+ //
+ // Does not include external symbols that don't have a corresponding DefId,
+ // like the compiler-generated `main` function and so on.
+ [] fn reachable_non_generics: ReachableNonGenerics(CrateNum) -> Lrc<DefIdSet>,
+ [] fn is_reachable_non_generic: IsReachableNonGeneric(DefId) -> bool,
+
+
[] fn native_libraries: NativeLibraries(CrateNum) -> Lrc<Vec<NativeLibrary>>,
[] fn plugin_registrar_fn: PluginRegistrarFn(CrateNum) -> Option<DefId>,
[] fn derive_registrar_fn: DeriveRegistrarFn(CrateNum) -> Option<DefId>,
[] fn all_crate_nums: all_crate_nums_node(CrateNum) -> Lrc<Vec<CrateNum>>,
[] fn exported_symbols: ExportedSymbols(CrateNum)
- -> Arc<Vec<(String, Option<DefId>, SymbolExportLevel)>>,
+ -> Arc<Vec<(ExportedSymbol, SymbolExportLevel)>>,
[] fn collect_and_partition_translation_items:
collect_and_partition_translation_items_node(CrateNum)
-> (Arc<DefIdSet>, Arc<Vec<Arc<CodegenUnit<'tcx>>>>),
// Get an estimate of the size of an InstanceDef based on its MIR for CGU partitioning.
[] fn instance_def_size_estimate: instance_def_size_estimate_dep_node(ty::InstanceDef<'tcx>)
-> usize,
+
+ [] fn features_query: features_node(CrateNum) -> Lrc<feature_gate::Features>,
}
//////////////////////////////////////////////////////////////////////
// These functions are little shims used to find the dep-node for a
// given query when there is not a *direct* mapping:
+
+fn features_node<'tcx>(_: CrateNum) -> DepConstructor<'tcx> {
+ DepConstructor::Features
+}
+
fn erase_regions_ty<'tcx>(ty: Ty<'tcx>) -> DepConstructor<'tcx> {
DepConstructor::EraseRegionsTy { ty }
}
DepKind::RvaluePromotableMap => { force!(rvalue_promotable_map, def_id!()); }
DepKind::ImplParent => { force!(impl_parent, def_id!()); }
DepKind::TraitOfItem => { force!(trait_of_item, def_id!()); }
- DepKind::IsExportedSymbol => { force!(is_exported_symbol, def_id!()); }
+ DepKind::IsReachableNonGeneric => { force!(is_reachable_non_generic, def_id!()); }
DepKind::IsMirAvailable => { force!(is_mir_available, def_id!()); }
DepKind::ItemAttrs => { force!(item_attrs, def_id!()); }
DepKind::FnArgNames => { force!(fn_arg_names, def_id!()); }
DepKind::GetPanicStrategy => { force!(panic_strategy, krate!()); }
DepKind::IsNoBuiltins => { force!(is_no_builtins, krate!()); }
DepKind::ImplDefaultness => { force!(impl_defaultness, def_id!()); }
- DepKind::ExportedSymbolIds => { force!(exported_symbol_ids, krate!()); }
+ DepKind::ReachableNonGenerics => { force!(reachable_non_generics, krate!()); }
DepKind::NativeLibraries => { force!(native_libraries, krate!()); }
DepKind::PluginRegistrarFn => { force!(plugin_registrar_fn, krate!()); }
DepKind::DeriveRegistrarFn => { force!(derive_registrar_fn, krate!()); }
DepKind::TargetFeaturesEnabled => { force!(target_features_enabled, def_id!()); }
DepKind::GetSymbolExportLevel => { force!(symbol_export_level, def_id!()); }
+ DepKind::Features => { force!(features_query, LOCAL_CRATE); }
}
true
/// would be the type parameters.
Trait(PolyTraitPredicate<'tcx>),
- /// where `T1 == T2`.
- Equate(PolyEquatePredicate<'tcx>),
-
/// where 'a : 'b
RegionOutlives(PolyRegionOutlivesPredicate<'tcx>),
match *self {
Predicate::Trait(ty::Binder(ref data)) =>
Predicate::Trait(ty::Binder(data.subst(tcx, substs))),
- Predicate::Equate(ty::Binder(ref data)) =>
- Predicate::Equate(ty::Binder(data.subst(tcx, substs))),
Predicate::Subtype(ty::Binder(ref data)) =>
Predicate::Subtype(ty::Binder(data.subst(tcx, substs))),
Predicate::RegionOutlives(ty::Binder(ref data)) =>
}
}
-#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)]
-pub struct EquatePredicate<'tcx>(pub Ty<'tcx>, pub Ty<'tcx>); // `0 == 1`
-pub type PolyEquatePredicate<'tcx> = ty::Binder<EquatePredicate<'tcx>>;
-
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)]
pub struct OutlivesPredicate<A,B>(pub A, pub B); // `A : B`
pub type PolyOutlivesPredicate<A,B> = ty::Binder<OutlivesPredicate<A,B>>;
}
}
-impl<'tcx> ToPredicate<'tcx> for PolyEquatePredicate<'tcx> {
- fn to_predicate(&self) -> Predicate<'tcx> {
- Predicate::Equate(self.clone())
- }
-}
-
impl<'tcx> ToPredicate<'tcx> for PolyRegionOutlivesPredicate<'tcx> {
fn to_predicate(&self) -> Predicate<'tcx> {
Predicate::RegionOutlives(self.clone())
ty::Predicate::Trait(ref data) => {
data.skip_binder().input_types().collect()
}
- ty::Predicate::Equate(ty::Binder(ref data)) => {
- vec![data.0, data.1]
- }
ty::Predicate::Subtype(ty::Binder(SubtypePredicate { a, b, a_is_expected: _ })) => {
vec![a, b]
}
Some(t.to_poly_trait_ref())
}
Predicate::Projection(..) |
- Predicate::Equate(..) |
Predicate::Subtype(..) |
Predicate::RegionOutlives(..) |
Predicate::WellFormed(..) |
}
Predicate::Trait(..) |
Predicate::Projection(..) |
- Predicate::Equate(..) |
Predicate::Subtype(..) |
Predicate::RegionOutlives(..) |
Predicate::WellFormed(..) |
/// Returns true if the impls are the same polarity and are implementing
/// a trait which contains no items
pub fn impls_are_allowed_to_overlap(self, def_id1: DefId, def_id2: DefId) -> bool {
- if !self.sess.features.borrow().overlapping_marker_traits {
+ if !self.features().overlapping_marker_traits {
return false;
}
let trait1_is_empty = self.impl_trait_ref(def_id1)
}
}
-#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, RustcEncodable, RustcDecodable)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, RustcEncodable, RustcDecodable)]
pub struct SymbolName {
// FIXME: we don't rely on interning or equality here - better have
// this be a `&'tcx str`.
name
});
+impl SymbolName {
+ pub fn new(name: &str) -> SymbolName {
+ SymbolName {
+ name: Symbol::intern(name).as_str()
+ }
+ }
+}
+
impl Deref for SymbolName {
type Target = str;
fmt::Display::fmt(&self.name, fmt)
}
}
+
+impl fmt::Debug for SymbolName {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ fmt::Display::fmt(&self.name, fmt)
+ }
+}
}
}
-impl<'a, 'tcx> Lift<'tcx> for ty::EquatePredicate<'a> {
- type Lifted = ty::EquatePredicate<'tcx>;
- fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>)
- -> Option<ty::EquatePredicate<'tcx>> {
- tcx.lift(&(self.0, self.1)).map(|(a, b)| ty::EquatePredicate(a, b))
- }
-}
-
impl<'a, 'tcx> Lift<'tcx> for ty::SubtypePredicate<'a> {
type Lifted = ty::SubtypePredicate<'tcx>;
fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>)
ty::Predicate::Trait(ref binder) => {
tcx.lift(binder).map(ty::Predicate::Trait)
}
- ty::Predicate::Equate(ref binder) => {
- tcx.lift(binder).map(ty::Predicate::Equate)
- }
ty::Predicate::Subtype(ref binder) => {
tcx.lift(binder).map(ty::Predicate::Subtype)
}
match *self {
ty::Predicate::Trait(ref a) =>
ty::Predicate::Trait(a.fold_with(folder)),
- ty::Predicate::Equate(ref binder) =>
- ty::Predicate::Equate(binder.fold_with(folder)),
ty::Predicate::Subtype(ref binder) =>
ty::Predicate::Subtype(binder.fold_with(folder)),
ty::Predicate::RegionOutlives(ref binder) =>
fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
match *self {
ty::Predicate::Trait(ref a) => a.visit_with(visitor),
- ty::Predicate::Equate(ref binder) => binder.visit_with(visitor),
ty::Predicate::Subtype(ref binder) => binder.visit_with(visitor),
ty::Predicate::RegionOutlives(ref binder) => binder.visit_with(visitor),
ty::Predicate::TypeOutlives(ref binder) => binder.visit_with(visitor),
}
}
-impl<'tcx> TypeFoldable<'tcx> for ty::EquatePredicate<'tcx> {
- fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
- ty::EquatePredicate(self.0.fold_with(folder), self.1.fold_with(folder))
- }
-
- fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
- self.0.visit_with(visitor) || self.1.visit_with(visitor)
- }
-}
-
impl<'tcx> TypeFoldable<'tcx> for ty::SubtypePredicate<'tcx> {
fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
ty::SubtypePredicate {
use ty::{self, Ty, TyCtxt, TypeFoldable};
use ty::fold::TypeVisitor;
use ty::subst::{Subst, UnpackedKind};
+use ty::maps::TyCtxtAt;
use ty::TypeVariants::*;
use util::common::ErrorReported;
use middle::lang_items;
match predicate {
ty::Predicate::Projection(..) |
ty::Predicate::Trait(..) |
- ty::Predicate::Equate(..) |
ty::Predicate::Subtype(..) |
ty::Predicate::WellFormed(..) |
ty::Predicate::ObjectSafe(..) |
}
pub fn is_sized(&'tcx self,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- span: Span)-> bool
+ tcx_at: TyCtxtAt<'a, 'tcx, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>)-> bool
{
- tcx.at(span).is_sized_raw(param_env.and(self))
+ tcx_at.is_sized_raw(param_env.and(self))
}
pub fn is_freeze(&'tcx self,
ty::Predicate::Trait(ref t) => {
wf.compute_trait_ref(&t.skip_binder().trait_ref, Elaborate::None); // (*)
}
- ty::Predicate::Equate(ref t) => {
- wf.compute(t.skip_binder().0);
- wf.compute(t.skip_binder().1);
- }
ty::Predicate::RegionOutlives(..) => {
}
ty::Predicate::TypeOutlives(ref t) => {
('tcx) ty::Binder<ty::TraitRef<'tcx>>,
('tcx) ty::Binder<ty::FnSig<'tcx>>,
('tcx) ty::Binder<ty::TraitPredicate<'tcx>>,
- ('tcx) ty::Binder<ty::EquatePredicate<'tcx>>,
('tcx) ty::Binder<ty::SubtypePredicate<'tcx>>,
('tcx) ty::Binder<ty::ProjectionPredicate<'tcx>>,
('tcx) ty::Binder<ty::OutlivesPredicate<Ty<'tcx>, ty::Region<'tcx>>>,
}
}
-define_print! {
- ('tcx) ty::EquatePredicate<'tcx>, (self, f, cx) {
- display {
- print!(f, cx, print(self.0), write(" == "), print(self.1))
- }
- }
-}
-
define_print! {
('tcx) ty::SubtypePredicate<'tcx>, (self, f, cx) {
display {
display {
match *self {
ty::Predicate::Trait(ref data) => data.print(f, cx),
- ty::Predicate::Equate(ref predicate) => predicate.print(f, cx),
ty::Predicate::Subtype(ref predicate) => predicate.print(f, cx),
ty::Predicate::RegionOutlives(ref predicate) => predicate.print(f, cx),
ty::Predicate::TypeOutlives(ref predicate) => predicate.print(f, cx),
debug {
match *self {
ty::Predicate::Trait(ref a) => a.print(f, cx),
- ty::Predicate::Equate(ref pair) => pair.print(f, cx),
ty::Predicate::Subtype(ref pair) => pair.print(f, cx),
ty::Predicate::RegionOutlives(ref pair) => pair.print(f, cx),
ty::Predicate::TypeOutlives(ref pair) => pair.print(f, cx),
use serialize::json::{Json, ToJson};
-macro_rules! linker_flavor {
- ($(($variant:ident, $string:expr),)+) => {
- #[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd, Hash,
- RustcEncodable, RustcDecodable)]
- pub enum LinkerFlavor {
- $($variant,)+
- }
+#[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd, Hash,
+ RustcEncodable, RustcDecodable)]
+pub enum LinkerFlavor {
+ Em,
+ Gcc,
+ Ld,
+ Msvc,
+ Lld(LldFlavor),
+}
+
+#[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd, Hash,
+ RustcEncodable, RustcDecodable)]
+pub enum LldFlavor {
+ Wasm,
+ Ld64,
+ Ld,
+ Link,
+}
+impl ToJson for LinkerFlavor {
+ fn to_json(&self) -> Json {
+ self.desc().to_json()
+ }
+}
+macro_rules! flavor_mappings {
+ ($((($($flavor:tt)*), $string:expr),)*) => (
impl LinkerFlavor {
pub const fn one_of() -> &'static str {
concat!("one of: ", $($string, " ",)+)
pub fn from_str(s: &str) -> Option<Self> {
Some(match s {
- $($string => LinkerFlavor::$variant,)+
+ $($string => $($flavor)*,)+
_ => return None,
})
}
pub fn desc(&self) -> &str {
match *self {
- $(LinkerFlavor::$variant => $string,)+
+ $($($flavor)* => $string,)+
}
}
}
-
- impl ToJson for LinkerFlavor {
- fn to_json(&self) -> Json {
- self.desc().to_json()
- }
- }
- }
+ )
}
-linker_flavor! {
- (Em, "em"),
- (Binaryen, "binaryen"),
- (Gcc, "gcc"),
- (Ld, "ld"),
- (Msvc, "msvc"),
+
+flavor_mappings! {
+ ((LinkerFlavor::Em), "em"),
+ ((LinkerFlavor::Gcc), "gcc"),
+ ((LinkerFlavor::Ld), "ld"),
+ ((LinkerFlavor::Msvc), "msvc"),
+ ((LinkerFlavor::Lld(LldFlavor::Wasm)), "wasm-ld"),
+ ((LinkerFlavor::Lld(LldFlavor::Ld64)), "ld64.lld"),
+ ((LinkerFlavor::Lld(LldFlavor::Ld)), "ld.lld"),
+ ((LinkerFlavor::Lld(LldFlavor::Link)), "lld-link"),
}
#[derive(Clone, Copy, Debug, PartialEq, Hash, RustcEncodable, RustcDecodable)]
let mut base = super::cloudabi_base::opts();
base.max_atomic_width = Some(128);
base.abi_blacklist = super::arm_base::abi_blacklist();
- base.linker = "aarch64-unknown-cloudabi-cc".to_string();
+ base.linker = Some("aarch64-unknown-cloudabi-cc".to_string());
Ok(Target {
llvm_target: "aarch64-unknown-cloudabi".to_string(),
base.max_atomic_width = Some(64);
base.features = "+v7,+vfp3,+neon".to_string();
base.abi_blacklist = super::arm_base::abi_blacklist();
- base.linker = "armv7-unknown-cloudabi-eabihf-cc".to_string();
+ base.linker = Some("armv7-unknown-cloudabi-eabihf-cc".to_string());
Ok(Target {
llvm_target: "armv7-unknown-cloudabi-eabihf".to_string(),
use target::{Target, TargetOptions, TargetResult};
pub fn target() -> TargetResult {
- let mut base = super::linux_musl_base::opts();
-
- // Most of these settings are copied from the armv7_unknown_linux_gnueabihf
- // target.
- base.features = "+v7,+vfp3,+neon".to_string();
- base.cpu = "cortex-a8".to_string();
- base.max_atomic_width = Some(64);
+ let base = super::linux_musl_base::opts();
Ok(Target {
// It's important we use "gnueabihf" and not "musleabihf" here. LLVM
// uses it to determine the calling convention and float ABI, and LLVM
target_env: "musl".to_string(),
target_vendor: "unknown".to_string(),
linker_flavor: LinkerFlavor::Gcc,
+
+ // Most of these settings are copied from the armv7_unknown_linux_gnueabihf
+ // target.
options: TargetOptions {
+ features: "+v7,+vfp3,+d16,+thumb2,-neon".to_string(),
+ cpu: "generic".to_string(),
+ max_atomic_width: Some(64),
abi_blacklist: super::arm_base::abi_blacklist(),
.. base
- },
+ }
})
}
use LinkerFlavor;
use super::{LinkArgs, Target, TargetOptions};
-use super::emscripten_base::{cmd};
pub fn target() -> Result<Target, String> {
let mut args = LinkArgs::new();
"ERROR_ON_UNDEFINED_SYMBOLS=1".to_string()]);
let opts = TargetOptions {
- linker: cmd("emcc"),
-
dynamic_linking: false,
executables: true,
exe_suffix: ".js".to_string(),
+++ /dev/null
-// Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-pub fn cmd(name: &str) -> String {
- if cfg!(windows) {
- format!("{}.bat", name)
- } else {
- name.to_string()
- }
-}
pub fn opts() -> TargetOptions {
TargetOptions {
- linker: "cc".to_string(),
dynamic_linking: true,
executables: true,
has_rpath: false,
let mut base = super::cloudabi_base::opts();
base.cpu = "pentium4".to_string();
base.max_atomic_width = Some(64);
- base.linker = "i686-unknown-cloudabi-cc".to_string();
+ base.linker = Some("i686-unknown-cloudabi-cc".to_string());
base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-m32".to_string());
base.stack_probes = true;
has_elf_tls: false,
exe_allocation_crate: None,
panic_strategy: PanicStrategy::Abort,
- linker: "ld".to_string(),
pre_link_args,
post_link_args,
target_family: Some("unix".to_string()),
mod bitrig_base;
mod cloudabi_base;
mod dragonfly_base;
-mod emscripten_base;
mod freebsd_base;
mod haiku_base;
mod linux_base;
/// Whether the target is built-in or loaded from a custom target specification.
pub is_builtin: bool,
- /// Linker to invoke. Defaults to "cc".
- pub linker: String,
+ /// Linker to invoke
+ pub linker: Option<String>,
/// Linker arguments that are unconditionally passed *before* any
/// user-defined libraries.
fn default() -> TargetOptions {
TargetOptions {
is_builtin: false,
- linker: option_env!("CFG_DEFAULT_LINKER").unwrap_or("cc").to_string(),
+ linker: option_env!("CFG_DEFAULT_LINKER").map(|s| s.to_string()),
pre_link_args: LinkArgs::new(),
post_link_args: LinkArgs::new(),
asm_args: Vec::new(),
}
key!(is_builtin, bool);
- key!(linker);
+ key!(linker, optional);
key!(pre_link_args, link_args);
key!(pre_link_objects_exe, list);
key!(pre_link_objects_dll, list);
// to gcc to get object files. For this reason we have a hard
// dependency on this specific gcc.
asm_args: vec!["-mcpu=msp430".to_string()],
- linker: "msp430-elf-gcc".to_string(),
+ linker: Some("msp430-elf-gcc".to_string()),
no_integrated_as: true,
// There are no atomic instructions available in the MSP430
executables: true,
// In 99%+ of cases, we want to use the `arm-none-eabi-gcc` compiler (there aren't many
// options around)
- linker: "arm-none-eabi-gcc".to_string(),
+ linker: Some("arm-none-eabi-gcc".to_string()),
// Because these devices have very little resources having an unwinder is too onerous so we
// default to "abort" because the "unwind" strategy is very rare.
panic_strategy: PanicStrategy::Abort,
use LinkerFlavor;
use super::{LinkArgs, Target, TargetOptions};
-use super::emscripten_base::{cmd};
pub fn target() -> Result<Target, String> {
let mut post_link_args = LinkArgs::new();
"-g3".to_string()]);
let opts = TargetOptions {
- linker: cmd("emcc"),
-
dynamic_linking: false,
executables: true,
// Today emcc emits two files - a .js file to bootstrap and
use LinkerFlavor;
use super::{LinkArgs, Target, TargetOptions};
-use super::emscripten_base::{cmd};
pub fn target() -> Result<Target, String> {
let mut post_link_args = LinkArgs::new();
"ERROR_ON_UNDEFINED_SYMBOLS=1".to_string()]);
let opts = TargetOptions {
- linker: cmd("emcc"),
-
dynamic_linking: false,
executables: true,
// Today emcc emits two files - a .js file to bootstrap and
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// The wasm32-unknown-unknown target is currently a highly experimental version
-// of a wasm-based target which does *not* use the Emscripten toolchain. Instead
-// this is a pretty flavorful (aka hacked up) target right now. The definition
-// and semantics of this target are likely to change and so this shouldn't be
-// relied on just yet.
+// The wasm32-unknown-unknown target is currently an experimental version of a
+// wasm-based target which does *not* use the Emscripten toolchain. Instead
+// this toolchain is based purely on LLVM's own toolchain, using LLVM's native
+// WebAssembly backend as well as LLD for a native linker.
//
-// In general everyone is currently waiting on a linker for wasm code. In the
-// meantime we have no means of actually making use of the traditional separate
-// compilation model. At a high level this means that assembling Rust programs
-// into a WebAssembly program looks like:
-//
-// 1. All intermediate artifacts are LLVM bytecode. We'll be using LLVM as
-// a linker later on.
-// 2. For the final artifact we emit one giant assembly file (WebAssembly
-// doesn't have an object file format). To do this we force LTO to be turned
-// on (`requires_lto` below) to ensure all Rust code is in one module. Any
-// "linked" C library is basically just ignored.
-// 3. Using LLVM we emit a `foo.s` file (assembly) with some... what I can only
-// describe as arcane syntax. From there we need to actually change this
-// into a wasm module. For this step we use the `binaryen` project. This
-// project is mostly intended as a WebAssembly code generator, but for now
-// we're just using its LLVM-assembly-to-wasm-module conversion utilities.
-//
-// And voila, out comes a web assembly module! There's some various tweaks here
-// and there, but that's the high level at least. Note that this will be
-// rethought from the ground up once a linker (lld) is available, so this is all
-// temporary and should improve in the future.
+// There's some trickery below on crate types supported and various defaults
+// (aka panic=abort by default), but otherwise this is in general a relatively
+// standard target.
-use LinkerFlavor;
+use {LinkerFlavor, LldFlavor};
use super::{Target, TargetOptions, PanicStrategy};
pub fn target() -> Result<Target, String> {
let opts = TargetOptions {
- linker: "not-used".to_string(),
-
// we allow dynamic linking, but only cdylibs. Basically we allow a
// final library artifact that exports some symbols (a wasm module) but
// we don't allow intermediate `dylib` crate types
dll_suffix: ".wasm".to_string(),
linker_is_gnu: false,
- // We're storing bitcode for now in all the rlibs
- obj_is_bitcode: true,
-
// A bit of a lie, but "eh"
max_atomic_width: Some(32),
// the future once unwinding is implemented. Don't rely on this.
panic_strategy: PanicStrategy::Abort,
- // There's no linker yet so we're forced to use LLVM as a linker. This
- // means that we must always enable LTO for final artifacts.
- requires_lto: true,
-
// Wasm doesn't have atomics yet, so tell LLVM that we're in a single
// threaded model which will legalize atomics to normal operations.
singlethread: true,
- // Because we're always enabling LTO we can't enable builtin lowering as
- // otherwise we'll lower the definition of the `memcpy` function to
- // memcpy itself. Note that this is specifically because we're
- // performing LTO with compiler-builtins.
- no_builtins: true,
-
// no dynamic linking, no need for default visibility!
default_hidden_visibility: true,
.. Default::default()
};
Ok(Target {
- llvm_target: "wasm32-unknown-unknown".to_string(),
+ llvm_target: "wasm32-unknown-unknown-wasm".to_string(),
target_endian: "little".to_string(),
target_pointer_width: "32".to_string(),
target_c_int_width: "32".to_string(),
target_vendor: "unknown".to_string(),
data_layout: "e-m:e-p:32:32-i64:64-n32:64-S128".to_string(),
arch: "wasm32".to_string(),
- // A bit of a lie, but it gets the job done
- linker_flavor: LinkerFlavor::Binaryen,
+ linker_flavor: LinkerFlavor::Lld(LldFlavor::Wasm),
options: opts,
})
}
TargetOptions {
// FIXME(#13846) this should be enabled for windows
function_sections: false,
- linker: "gcc".to_string(),
+ linker: Some("gcc".to_string()),
dynamic_linking: true,
executables: true,
dll_prefix: "".to_string(),
TargetOptions {
function_sections: true,
- linker: "link.exe".to_string(),
dynamic_linking: true,
executables: true,
dll_prefix: "".to_string(),
let mut base = super::netbsd_base::opts();
base.cpu = "x86-64".to_string();
base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-m64".to_string());
- base.linker = "x86_64-rumprun-netbsd-gcc".to_string();
+ base.linker = Some("x86_64-rumprun-netbsd-gcc".to_string());
base.max_atomic_width = Some(64);
base.dynamic_linking = false;
let mut base = super::cloudabi_base::opts();
base.cpu = "x86-64".to_string();
base.max_atomic_width = Some(64);
- base.linker = "x86_64-unknown-cloudabi-cc".to_string();
+ base.linker = Some("x86_64-unknown-cloudabi-cc".to_string());
base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-m64".to_string());
base.stack_probes = true;
+++ /dev/null
-// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// This is a small C API inserted on top of the Binaryen C++ API which we use
-// from Rust. Once we have a real linker for we'll be able to remove all this,
-// and otherwise this is just all on a "as we need it" basis for now.
-
-#include <stdint.h>
-#include <string>
-#include <sstream>
-#include <stdlib.h>
-
-#include "s2wasm.h"
-#include "wasm-binary.h"
-#include "wasm-linker.h"
-
-using namespace wasm;
-
-struct BinaryenRustModule {
- BufferWithRandomAccess buffer;
- std::string sourceMapJSON;
-};
-
-struct BinaryenRustModuleOptions {
- uint64_t globalBase;
- bool debug;
- uint64_t stackAllocation;
- uint64_t initialMem;
- uint64_t maxMem;
- bool importMemory;
- bool ignoreUnknownSymbols;
- bool debugInfo;
- std::string startFunction;
- std::string sourceMapUrl;
-
- BinaryenRustModuleOptions() :
- globalBase(0),
- debug(false),
- stackAllocation(0),
- initialMem(0),
- maxMem(0),
- importMemory(false),
- ignoreUnknownSymbols(false),
- debugInfo(false),
- startFunction(""),
- sourceMapUrl("")
- {}
-
-};
-
-extern "C" BinaryenRustModuleOptions*
-BinaryenRustModuleOptionsCreate() {
- return new BinaryenRustModuleOptions;
-}
-
-extern "C" void
-BinaryenRustModuleOptionsFree(BinaryenRustModuleOptions *options) {
- delete options;
-}
-
-extern "C" void
-BinaryenRustModuleOptionsSetDebugInfo(BinaryenRustModuleOptions *options,
- bool debugInfo) {
- options->debugInfo = debugInfo;
-}
-
-extern "C" void
-BinaryenRustModuleOptionsSetStart(BinaryenRustModuleOptions *options,
- char *start) {
- options->startFunction = start;
-}
-
-extern "C" void
-BinaryenRustModuleOptionsSetSourceMapUrl(BinaryenRustModuleOptions *options,
- char *sourceMapUrl) {
- options->sourceMapUrl = sourceMapUrl;
-}
-
-extern "C" void
-BinaryenRustModuleOptionsSetStackAllocation(BinaryenRustModuleOptions *options,
- uint64_t stack) {
- options->stackAllocation = stack;
-}
-
-extern "C" void
-BinaryenRustModuleOptionsSetImportMemory(BinaryenRustModuleOptions *options,
- bool import) {
- options->importMemory = import;
-}
-
-extern "C" BinaryenRustModule*
-BinaryenRustModuleCreate(const BinaryenRustModuleOptions *options,
- const char *assembly) {
- Linker linker(
- options->globalBase,
- options->stackAllocation,
- options->initialMem,
- options->maxMem,
- options->importMemory,
- options->ignoreUnknownSymbols,
- options->startFunction,
- options->debug);
-
- S2WasmBuilder mainbuilder(assembly, options->debug);
- linker.linkObject(mainbuilder);
- linker.layout();
-
- auto ret = make_unique<BinaryenRustModule>();
- {
- WasmBinaryWriter writer(&linker.getOutput().wasm, ret->buffer, options->debug);
- writer.setNamesSection(options->debugInfo);
-
- std::unique_ptr<std::ostringstream> sourceMapStream = nullptr;
- {
- sourceMapStream = make_unique<std::ostringstream>();
- writer.setSourceMap(sourceMapStream.get(), options->sourceMapUrl);
- }
-
- // FIXME: support symbol maps?
- // writer.setSymbolMap(symbolMap);
- writer.write();
-
- if (sourceMapStream) {
- ret->sourceMapJSON = sourceMapStream->str();
- }
- }
- return ret.release();
-}
-
-extern "C" const uint8_t*
-BinaryenRustModulePtr(const BinaryenRustModule *M) {
- return M->buffer.data();
-}
-
-extern "C" size_t
-BinaryenRustModuleLen(const BinaryenRustModule *M) {
- return M->buffer.size();
-}
-
-extern "C" const char*
-BinaryenRustModuleSourceMapPtr(const BinaryenRustModule *M) {
- return M->sourceMapJSON.data();
-}
-
-extern "C" size_t
-BinaryenRustModuleSourceMapLen(const BinaryenRustModule *M) {
- return M->sourceMapJSON.length();
-}
-
-extern "C" void
-BinaryenRustModuleFree(BinaryenRustModule *M) {
- delete M;
-}
+++ /dev/null
-# Wondering what this crate is? Take a look at the `lib.rs`!
-
-[package]
-name = "rustc_binaryen"
-version = "0.0.0"
-authors = ["The Rust Project Developers"]
-
-[lib]
-path = "lib.rs"
-
-[dependencies]
-libc = "0.2"
-
-[build-dependencies]
-cmake = "0.1"
-cc = "1.0"
+++ /dev/null
-// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-extern crate cc;
-extern crate cmake;
-
-use std::env;
-
-use cmake::Config;
-
-fn main() {
- let target = env::var("TARGET").unwrap();
-
- // Bring in `__emutls_get_address` which is apparently needed for now
- if target.contains("pc-windows-gnu") {
- println!("cargo:rustc-link-lib=gcc_eh");
- println!("cargo:rustc-link-lib=pthread");
- }
-
- Config::new("../binaryen")
- .define("BUILD_STATIC_LIB", "ON")
- .build_target("binaryen")
- .build();
-
- // I couldn't figure out how to link just one of these, so link everything.
- println!("cargo:rustc-link-lib=static=asmjs");
- println!("cargo:rustc-link-lib=static=binaryen");
- println!("cargo:rustc-link-lib=static=cfg");
- println!("cargo:rustc-link-lib=static=emscripten-optimizer");
- println!("cargo:rustc-link-lib=static=ir");
- println!("cargo:rustc-link-lib=static=passes");
- println!("cargo:rustc-link-lib=static=support");
- println!("cargo:rustc-link-lib=static=wasm");
-
- let out_dir = env::var("OUT_DIR").unwrap();
- println!("cargo:rustc-link-search=native={}/build/lib", out_dir);
-
- // Add in our own little shim along with some extra files that weren't
- // included in the main build.
- let mut cfg = cc::Build::new();
- cfg.file("BinaryenWrapper.cpp")
- .file("../binaryen/src/wasm-linker.cpp")
- .file("../binaryen/src/wasm-emscripten.cpp")
- .include("../binaryen/src")
- .cpp_link_stdlib(None)
- .warnings(false)
- .cpp(true);
-
- if !target.contains("msvc") {
- cfg.flag("-std=c++11");
- }
- cfg.compile("binaryen_wrapper");
-}
+++ /dev/null
-// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Rustc bindings to the binaryen project.
-//!
-//! This crate is a small shim around the binaryen project which provides us the
-//! ability to take LLVM's output and generate a wasm module. Specifically this
-//! only supports one operation, creating a module from LLVM's assembly format
-//! and then serializing that module to a wasm module.
-
-extern crate libc;
-
-use std::slice;
-use std::ffi::{CString, CStr};
-
-/// In-memory representation of a serialized wasm module.
-pub struct Module {
- ptr: *mut BinaryenRustModule,
-}
-
-impl Module {
- /// Creates a new wasm module from the LLVM-assembly provided (in a C string
- /// format).
- ///
- /// The actual module creation can be tweaked through the various options in
- /// `ModuleOptions` as well. Any errors are just returned as a bland string.
- pub fn new(assembly: &CStr, opts: &ModuleOptions) -> Result<Module, String> {
- unsafe {
- let ptr = BinaryenRustModuleCreate(opts.ptr, assembly.as_ptr());
- if ptr.is_null() {
- Err(format!("failed to create binaryen module"))
- } else {
- Ok(Module { ptr })
- }
- }
- }
-
- /// Returns the data of the serialized wasm module. This is a `foo.wasm`
- /// file contents.
- pub fn data(&self) -> &[u8] {
- unsafe {
- let ptr = BinaryenRustModulePtr(self.ptr);
- let len = BinaryenRustModuleLen(self.ptr);
- slice::from_raw_parts(ptr, len)
- }
- }
-
- /// Returns the data of the source map JSON.
- pub fn source_map(&self) -> &[u8] {
- unsafe {
- let ptr = BinaryenRustModuleSourceMapPtr(self.ptr);
- let len = BinaryenRustModuleSourceMapLen(self.ptr);
- slice::from_raw_parts(ptr, len)
- }
- }
-}
-
-impl Drop for Module {
- fn drop(&mut self) {
- unsafe {
- BinaryenRustModuleFree(self.ptr);
- }
- }
-}
-
-pub struct ModuleOptions {
- ptr: *mut BinaryenRustModuleOptions,
-}
-
-impl ModuleOptions {
- pub fn new() -> ModuleOptions {
- unsafe {
- let ptr = BinaryenRustModuleOptionsCreate();
- ModuleOptions { ptr }
- }
- }
-
- /// Turns on or off debug info.
- ///
- /// From what I can tell this just creates a "names" section of the wasm
- /// module which contains a table of the original function names.
- pub fn debuginfo(&mut self, debug: bool) -> &mut Self {
- unsafe {
- BinaryenRustModuleOptionsSetDebugInfo(self.ptr, debug);
- }
- self
- }
-
- /// Configures a `start` function for the module, to be executed when it's
- /// loaded.
- pub fn start(&mut self, func: &str) -> &mut Self {
- let func = CString::new(func).unwrap();
- unsafe {
- BinaryenRustModuleOptionsSetStart(self.ptr, func.as_ptr());
- }
- self
- }
-
- /// Configures a `sourceMappingURL` custom section value for the module.
- pub fn source_map_url(&mut self, url: &str) -> &mut Self {
- let url = CString::new(url).unwrap();
- unsafe {
- BinaryenRustModuleOptionsSetSourceMapUrl(self.ptr, url.as_ptr());
- }
- self
- }
-
- /// Configures how much stack is initially allocated for the module. 1MB is
- /// probably good enough for now.
- pub fn stack(&mut self, amt: u64) -> &mut Self {
- unsafe {
- BinaryenRustModuleOptionsSetStackAllocation(self.ptr, amt);
- }
- self
- }
-
- /// Flags whether the initial memory should be imported or exported. So far
- /// we export it by default.
- pub fn import_memory(&mut self, import: bool) -> &mut Self {
- unsafe {
- BinaryenRustModuleOptionsSetImportMemory(self.ptr, import);
- }
- self
- }
-}
-
-impl Drop for ModuleOptions {
- fn drop(&mut self) {
- unsafe {
- BinaryenRustModuleOptionsFree(self.ptr);
- }
- }
-}
-
-enum BinaryenRustModule {}
-enum BinaryenRustModuleOptions {}
-
-extern {
- fn BinaryenRustModuleCreate(opts: *const BinaryenRustModuleOptions,
- assembly: *const libc::c_char)
- -> *mut BinaryenRustModule;
- fn BinaryenRustModulePtr(module: *const BinaryenRustModule) -> *const u8;
- fn BinaryenRustModuleLen(module: *const BinaryenRustModule) -> usize;
- fn BinaryenRustModuleSourceMapPtr(module: *const BinaryenRustModule) -> *const u8;
- fn BinaryenRustModuleSourceMapLen(module: *const BinaryenRustModule) -> usize;
- fn BinaryenRustModuleFree(module: *mut BinaryenRustModule);
-
- fn BinaryenRustModuleOptionsCreate()
- -> *mut BinaryenRustModuleOptions;
- fn BinaryenRustModuleOptionsSetDebugInfo(module: *mut BinaryenRustModuleOptions,
- debuginfo: bool);
- fn BinaryenRustModuleOptionsSetStart(module: *mut BinaryenRustModuleOptions,
- start: *const libc::c_char);
- fn BinaryenRustModuleOptionsSetSourceMapUrl(module: *mut BinaryenRustModuleOptions,
- sourceMapUrl: *const libc::c_char);
- fn BinaryenRustModuleOptionsSetStackAllocation(
- module: *mut BinaryenRustModuleOptions,
- stack: u64,
- );
- fn BinaryenRustModuleOptionsSetImportMemory(
- module: *mut BinaryenRustModuleOptions,
- import: bool,
- );
- fn BinaryenRustModuleOptionsFree(module: *mut BinaryenRustModuleOptions);
-}
o: Origin)
-> DiagnosticBuilder<'a>
{
- if !o.should_emit_errors(self.tcx.sess.borrowck_mode()) {
+ if !o.should_emit_errors(self.tcx.borrowck_mode()) {
self.tcx.sess.diagnostic().cancel(&mut diag);
}
diag
}
fn is_uninhabited(&self, ty: Ty<'tcx>) -> bool {
- if self.tcx.sess.features.borrow().never_type {
+ if self.tcx.features().never_type {
self.tcx.is_ty_uninhabited_from(self.module, ty)
} else {
false
substs: &'tcx ty::subst::Substs<'tcx>)
-> bool
{
- if self.tcx.sess.features.borrow().never_type {
+ if self.tcx.features().never_type {
self.tcx.is_enum_variant_uninhabited_from(self.module, variant, substs)
} else {
false
let pat_ty = self.tables.node_id_to_type(scrut.hir_id);
let module = self.tcx.hir.get_module_parent(scrut.id);
if inlined_arms.is_empty() {
- let scrutinee_is_uninhabited = if self.tcx.sess.features.borrow().never_type {
+ let scrutinee_is_uninhabited = if self.tcx.features().never_type {
self.tcx.is_ty_uninhabited_from(module, pat_ty)
} else {
self.conservative_is_uninhabited(pat_ty)
}).collect::<Result<Vec<_>, _>>()?))))
}
hir::ExprIndex(ref arr, ref idx) => {
- if !tcx.sess.features.borrow().const_indexing {
+ if !tcx.features().const_indexing {
signal!(e, IndexOpFeatureGated);
}
let arr = cx.eval(arr)?;
}
}
- loop {
- // non-standard `while let` to bypass #6393
- let i = match error_stack.pop() {
- Some(i) => i,
- None => break
- };
-
+ while let Some(i) = error_stack.pop() {
let node = &self.nodes[i];
match node.state.get() {
use bitvec::BitMatrix;
use fx::FxHashMap;
+use sync::Lock;
use rustc_serialize::{Encodable, Encoder, Decodable, Decoder};
use stable_hasher::{HashStable, StableHasher, StableHasherResult};
-use std::cell::RefCell;
use std::fmt::Debug;
use std::hash::Hash;
use std::mem;
#[derive(Clone, Debug)]
-pub struct TransitiveRelation<T: Clone + Debug + Eq + Hash + Clone> {
+pub struct TransitiveRelation<T: Clone + Debug + Eq + Hash> {
// List of elements. This is used to map from a T to a usize.
elements: Vec<T>,
// This is a cached transitive closure derived from the edges.
// Currently, we build it lazilly and just throw out any existing
- // copy whenever a new edge is added. (The RefCell is to permit
+ // copy whenever a new edge is added. (The Lock is to permit
// the lazy computation.) This is kind of silly, except for the
// fact its size is tied to `self.elements.len()`, so I wanted to
// wait before building it up to avoid reallocating as new edges
// are added with new elements. Perhaps better would be to ask the
// user for a batch of edges to minimize this effect, but I
// already wrote the code this way. :P -nmatsakis
- closure: RefCell<Option<BitMatrix>>,
+ closure: Lock<Option<BitMatrix>>,
}
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable, Debug)]
target: Index,
}
-impl<T: Clone + Debug + Eq + Hash + Clone> TransitiveRelation<T> {
+impl<T: Clone + Debug + Eq + Hash> TransitiveRelation<T> {
pub fn new() -> TransitiveRelation<T> {
TransitiveRelation {
elements: vec![],
map: FxHashMap(),
edges: vec![],
- closure: RefCell::new(None),
+ closure: Lock::new(None),
}
}
fn add_index(&mut self, a: T) -> Index {
let &mut TransitiveRelation {
ref mut elements,
- ref closure,
+ ref mut closure,
ref mut map,
..
} = self;
elements.push(a);
// if we changed the dimensions, clear the cache
- *closure.borrow_mut() = None;
+ *closure.get_mut() = None;
Index(elements.len() - 1)
})
self.edges.push(edge);
// added an edge, clear the cache
- *self.closure.borrow_mut() = None;
+ *self.closure.get_mut() = None;
}
}
.enumerate()
.map(|(index, elem)| (elem.clone(), Index(index)))
.collect();
- Ok(TransitiveRelation { elements, edges, map, closure: RefCell::new(None) })
+ Ok(TransitiveRelation { elements, edges, map, closure: Lock::new(None) })
})
}
}
arena = { path = "../libarena" }
graphviz = { path = "../libgraphviz" }
log = "0.4"
-env_logger = { version = "0.4", default-features = false }
+env_logger = { version = "0.5", default-features = false }
rustc = { path = "../librustc" }
rustc_allocator = { path = "../librustc_allocator" }
rustc_back = { path = "../librustc_back" }
let (mut krate, features) = syntax::config::features(krate, &sess.parse_sess, sess.opts.test);
// these need to be set "early" so that expansion sees `quote` if enabled.
- *sess.features.borrow_mut() = features;
+ sess.init_features(features);
*sess.crate_types.borrow_mut() = collect_crate_types(sess, &krate.attrs);
let mut registry = registry.unwrap_or(Registry::new(sess, krate.span));
time(time_passes, "plugin registration", || {
- if sess.features.borrow().rustc_diagnostic_macros {
+ if sess.features_untracked().rustc_diagnostic_macros {
registry.register_macro("__diagnostic_used",
diagnostics::plugin::expand_diagnostic_used);
registry.register_macro("__register_diagnostic",
crate_loader,
&resolver_arenas);
resolver.whitelisted_legacy_custom_derives = whitelisted_legacy_custom_derives;
- syntax_ext::register_builtins(&mut resolver, syntax_exts, sess.features.borrow().quote);
+ syntax_ext::register_builtins(&mut resolver, syntax_exts, sess.features_untracked().quote);
krate = time(time_passes, "expansion", || {
// Windows dlls do not have rpaths, so they don't know how to find their
.filter(|p| env::join_paths(iter::once(p)).is_ok()))
.unwrap());
}
- let features = sess.features.borrow();
+ let features = sess.features_untracked();
let cfg = syntax::ext::expand::ExpansionConfig {
features: Some(&features),
recursion_limit: sess.recursion_limit.get(),
sess.opts.test,
krate,
sess.diagnostic(),
- &sess.features.borrow())
+ &sess.features_untracked())
});
// If we're actually rustdoc then there's no need to actually compile
sess.track_errors(|| {
syntax::feature_gate::check_crate(&krate,
&sess.parse_sess,
- &sess.features.borrow(),
+ &sess.features_untracked(),
&attributes,
sess.opts.unstable_features);
})
let sysroot = sysroot_candidates.iter()
.map(|sysroot| {
let libdir = filesearch::relative_target_lib_path(&sysroot, &target);
- sysroot.join(libdir).with_file_name("codegen-backends")
+ sysroot.join(libdir)
+ .with_file_name(option_env!("CFG_CODEGEN_BACKENDS_DIR")
+ .unwrap_or("codegen-backends"))
})
.filter(|f| {
info!("codegen backend candidate: {}", f.display());
}
pub fn main() {
- env_logger::init().unwrap();
+ env_logger::init();
let result = run(|| {
let args = env::args_os().enumerate()
.map(|(i, arg)| arg.into_string().unwrap_or_else(|arg| {
// if the `rustc_attrs` feature is not enabled, then the
// attributes we are interested in cannot be present anyway, so
// skip the walk.
- if !tcx.sess.features.borrow().rustc_attrs {
+ if !tcx.features().rustc_attrs {
return;
}
pub fn check_dirty_clean_annotations<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
// can't add `#[rustc_dirty]` etc without opting in to this feature
- if !tcx.sess.features.borrow().rustc_attrs {
+ if !tcx.features().rustc_attrs {
return;
}
// visibility is token at start of declaration (can be macro
// variable rather than literal `pub`)
let pub_span = cx.tcx.sess.codemap().span_until_char(def_span, ' ');
- let replacement = if cx.tcx.sess.features.borrow().crate_visibility_modifier {
+ let replacement = if cx.tcx.features().crate_visibility_modifier {
"crate"
} else {
"pub(crate)"
// Detect literal value out of range [min, max] inclusive
// avoiding use of -min to prevent overflow/panic
- if (negative && v > max + 1) ||
- (!negative && v > max) {
- cx.span_lint(OVERFLOWING_LITERALS,
- e.span,
- &format!("literal out of range for {:?}", t));
+ if (negative && v > max + 1) || (!negative && v > max) {
+ if let Some(repr_str) = get_bin_hex_repr(cx, lit) {
+ report_bin_hex_error(
+ cx,
+ e,
+ ty::TyInt(t),
+ repr_str,
+ v,
+ negative,
+ );
+ return;
+ }
+ cx.span_lint(
+ OVERFLOWING_LITERALS,
+ e.span,
+ &format!("literal out of range for {:?}", t),
+ );
return;
}
}
let mut err = cx.struct_span_lint(
OVERFLOWING_LITERALS,
parent_expr.span,
- "only u8 can be casted into char");
+ "only u8 can be cast into char");
err.span_suggestion(parent_expr.span,
&"use a char literal instead",
format!("'\\u{{{:X}}}'", lit_val));
}
}
}
- cx.span_lint(OVERFLOWING_LITERALS,
- e.span,
- &format!("literal out of range for {:?}", t));
+ if let Some(repr_str) = get_bin_hex_repr(cx, lit) {
+ report_bin_hex_error(
+ cx,
+ e,
+ ty::TyUint(t),
+ repr_str,
+ lit_val,
+ false,
+ );
+ return;
+ }
+ cx.span_lint(
+ OVERFLOWING_LITERALS,
+ e.span,
+ &format!("literal out of range for {:?}", t),
+ );
}
}
ty::TyFloat(t) => {
_ => false,
}
}
+
+ fn get_bin_hex_repr(cx: &LateContext, lit: &ast::Lit) -> Option<String> {
+ let src = cx.sess().codemap().span_to_snippet(lit.span).ok()?;
+ let firstch = src.chars().next()?;
+
+ if firstch == '0' {
+ match src.chars().nth(1) {
+ Some('x') | Some('b') => return Some(src),
+ _ => return None,
+ }
+ }
+
+ None
+ }
+
+ // This function finds the next fitting type and generates a suggestion string.
+ // It searches for fitting types in the following way (`X < Y`):
+ // - `iX`: if literal fits in `uX` => `uX`, else => `iY`
+ // - `-iX` => `iY`
+ // - `uX` => `uY`
+ //
+ // No suggestion for: `isize`, `usize`.
+ fn get_type_suggestion<'a>(
+ t: &ty::TypeVariants,
+ val: u128,
+ negative: bool,
+ ) -> Option<String> {
+ use syntax::ast::IntTy::*;
+ use syntax::ast::UintTy::*;
+ macro_rules! find_fit {
+ ($ty:expr, $val:expr, $negative:expr,
+ $($type:ident => [$($utypes:expr),*] => [$($itypes:expr),*]),+) => {
+ {
+ let _neg = if negative { 1 } else { 0 };
+ match $ty {
+ $($type => {
+ $(if !negative && val <= uint_ty_range($utypes).1 {
+ return Some(format!("{:?}", $utypes))
+ })*
+ $(if val <= int_ty_range($itypes).1 as u128 + _neg {
+ return Some(format!("{:?}", $itypes))
+ })*
+ None
+ },)*
+ _ => None
+ }
+ }
+ }
+ }
+ match t {
+ &ty::TyInt(i) => find_fit!(i, val, negative,
+ I8 => [U8] => [I16, I32, I64, I128],
+ I16 => [U16] => [I32, I64, I128],
+ I32 => [U32] => [I64, I128],
+ I64 => [U64] => [I128],
+ I128 => [U128] => []),
+ &ty::TyUint(u) => find_fit!(u, val, negative,
+ U8 => [U8, U16, U32, U64, U128] => [],
+ U16 => [U16, U32, U64, U128] => [],
+ U32 => [U32, U64, U128] => [],
+ U64 => [U64, U128] => [],
+ U128 => [U128] => []),
+ _ => None,
+ }
+ }
+
+ fn report_bin_hex_error(
+ cx: &LateContext,
+ expr: &hir::Expr,
+ ty: ty::TypeVariants,
+ repr_str: String,
+ val: u128,
+ negative: bool,
+ ) {
+ let (t, actually) = match ty {
+ ty::TyInt(t) => {
+ let bits = int_ty_bits(t, cx.sess().target.isize_ty);
+ let actually = (val << (128 - bits)) as i128 >> (128 - bits);
+ (format!("{:?}", t), actually.to_string())
+ }
+ ty::TyUint(t) => {
+ let bits = uint_ty_bits(t, cx.sess().target.usize_ty);
+ let actually = (val << (128 - bits)) >> (128 - bits);
+ (format!("{:?}", t), actually.to_string())
+ }
+ _ => bug!(),
+ };
+ let mut err = cx.struct_span_lint(
+ OVERFLOWING_LITERALS,
+ expr.span,
+ &format!("literal out of range for {}", t),
+ );
+ err.note(&format!(
+ "the literal `{}` (decimal `{}`) does not fit into \
+ an `{}` and will become `{}{}`",
+ repr_str, val, t, actually, t
+ ));
+ if let Some(sugg_ty) =
+ get_type_suggestion(&cx.tables.node_id_to_type(expr.hir_id).sty, val, negative)
+ {
+ if let Some(pos) = repr_str.chars().position(|c| c == 'i' || c == 'u') {
+ let (sans_suffix, _) = repr_str.split_at(pos);
+ err.span_suggestion(
+ expr.span,
+ &format!("consider using `{}` instead", sugg_ty),
+ format!("{}{}", sans_suffix, sugg_ty),
+ );
+ } else {
+ err.help(&format!("consider using `{}` instead", sugg_ty));
+ }
+ }
+
+ err.emit();
+ }
}
}
let mut fn_warned = false;
let mut op_warned = false;
- if cx.tcx.sess.features.borrow().fn_must_use {
+ if cx.tcx.features().fn_must_use {
let maybe_def = match expr.node {
hir::ExprCall(ref callee, _) => {
match callee.node {
crate_root.def_path_table.decode((&metadata, self.sess))
});
- let exported_symbols = crate_root.exported_symbols
- .decode((&metadata, self.sess))
- .collect();
let trait_impls = crate_root
.impls
.decode((&metadata, self.sess))
name,
extern_crate: Cell::new(None),
def_path_table: Lrc::new(def_path_table),
- exported_symbols,
trait_impls,
proc_macros: crate_root.macro_derive_registrar.map(|_| {
self.load_derive_macros(&crate_root, dylib.clone().map(|p| p.0), span)
/// compilation support.
pub def_path_table: Lrc<DefPathTable>,
- pub exported_symbols: FxHashSet<DefIndex>,
-
pub trait_impls: FxHashMap<(u32, DefIndex), schema::LazySeq<DefIndex>>,
pub dep_kind: Cell<DepKind>,
use rustc::middle::cstore::{CrateStore, DepKind,
MetadataLoader, LinkMeta,
LoadedMacro, EncodedMetadata, NativeLibraryKind};
+use rustc::middle::exported_symbols::ExportedSymbol;
use rustc::middle::stability::DeprecationEntry;
use rustc::hir::def;
use rustc::session::{CrateDisambiguator, Session};
use rustc::hir::map::{DefKey, DefPath, DefPathHash};
use rustc::hir::map::blocks::FnLikeNode;
use rustc::hir::map::definitions::DefPathTable;
-use rustc::util::nodemap::{NodeSet, DefIdMap};
+use rustc::util::nodemap::DefIdMap;
use std::any::Any;
use rustc_data_structures::sync::Lrc;
+use std::sync::Arc;
use syntax::ast;
use syntax::attr;
fn_arg_names => { cdata.get_fn_arg_names(def_id.index) }
impl_parent => { cdata.get_parent_impl(def_id.index) }
trait_of_item => { cdata.get_trait_of_item(def_id.index) }
- is_exported_symbol => {
- cdata.exported_symbols.contains(&def_id.index)
- }
item_body_nested_bodies => { cdata.item_body_nested_bodies(def_id.index) }
const_is_rvalue_promotable_to_static => {
cdata.const_is_rvalue_promotable_to_static(def_id.index)
extern_crate => { Lrc::new(cdata.extern_crate.get()) }
is_no_builtins => { cdata.is_no_builtins(tcx.sess) }
impl_defaultness => { cdata.get_impl_defaultness(def_id.index) }
- exported_symbol_ids => { Lrc::new(cdata.get_exported_symbols()) }
+ reachable_non_generics => {
+ let reachable_non_generics = tcx
+ .exported_symbols(cdata.cnum)
+ .iter()
+ .filter_map(|&(exported_symbol, _)| {
+ if let ExportedSymbol::NonGeneric(def_id) = exported_symbol {
+ return Some(def_id)
+ } else {
+ None
+ }
+ })
+ .collect();
+
+ Lrc::new(reachable_non_generics)
+ }
native_libraries => { Lrc::new(cdata.get_native_libraries(tcx.sess)) }
plugin_registrar_fn => {
cdata.root.plugin_registrar_fn.map(|index| {
has_copy_closures => { cdata.has_copy_closures(tcx.sess) }
has_clone_closures => { cdata.has_clone_closures(tcx.sess) }
+
+ exported_symbols => {
+ let cnum = cdata.cnum;
+ assert!(cnum != LOCAL_CRATE);
+
+ // If this crate is a custom derive crate, then we're not even going to
+ // link those in so we skip those crates.
+ if cdata.root.macro_derive_registrar.is_some() {
+ return Arc::new(Vec::new())
+ }
+
+ Arc::new(cdata.exported_symbols())
+ }
}
pub fn provide<'tcx>(providers: &mut Providers<'tcx>) {
fn encode_metadata<'a, 'tcx>(&self,
tcx: TyCtxt<'a, 'tcx, 'tcx>,
- link_meta: &LinkMeta,
- reachable: &NodeSet)
+ link_meta: &LinkMeta)
-> EncodedMetadata
{
- encoder::encode_metadata(tcx, link_meta, reachable)
+ encoder::encode_metadata(tcx, link_meta)
}
fn metadata_encoding_version(&self) -> &[u8]
use rustc::hir;
use rustc::middle::cstore::{LinkagePreference, ExternConstBody,
ExternBodyNestedBodies};
+use rustc::middle::exported_symbols::{ExportedSymbol, SymbolExportLevel};
use rustc::hir::def::{self, Def, CtorKind};
use rustc::hir::def_id::{CrateNum, DefId, DefIndex,
CRATE_DEF_INDEX, LOCAL_CRATE};
use rustc::session::Session;
use rustc::ty::{self, Ty, TyCtxt};
use rustc::ty::codec::TyDecoder;
-use rustc::util::nodemap::DefIdSet;
use rustc::mir::Mir;
use std::cell::Ref;
arg_names.decode(self).collect()
}
- pub fn get_exported_symbols(&self) -> DefIdSet {
- self.exported_symbols
- .iter()
- .map(|&index| self.local_def_id(index))
+ pub fn exported_symbols(&self) -> Vec<(ExportedSymbol, SymbolExportLevel)> {
+ self.root
+ .exported_symbols
+ .decode(self)
.collect()
}
use rustc::hir::map::definitions::DefPathTable;
use rustc::ich::Fingerprint;
use rustc::middle::dependency_format::Linkage;
+use rustc::middle::exported_symbols::{ExportedSymbol, SymbolExportLevel,
+ metadata_symbol_name};
use rustc::middle::lang_items;
use rustc::mir;
use rustc::traits::specialization_graph;
-use rustc::ty::{self, Ty, TyCtxt, ReprOptions};
+use rustc::ty::{self, Ty, TyCtxt, ReprOptions, SymbolName};
use rustc::ty::codec::{self as ty_codec, TyEncoder};
use rustc::session::config::{self, CrateTypeProcMacro};
-use rustc::util::nodemap::{FxHashMap, NodeSet};
+use rustc::util::nodemap::FxHashMap;
use rustc_data_structures::stable_hasher::StableHasher;
use rustc_serialize::{Encodable, Encoder, SpecializedEncoder, opaque};
opaque: opaque::Encoder<'a>,
pub tcx: TyCtxt<'a, 'tcx, 'tcx>,
link_meta: &'a LinkMeta,
- exported_symbols: &'a NodeSet,
lazy_state: LazyState,
type_shorthands: FxHashMap<Ty<'tcx>, usize>,
// Encode exported symbols info.
i = self.position();
+ let exported_symbols = self.tcx.exported_symbols(LOCAL_CRATE);
let exported_symbols = self.tracked(
IsolatedEncoder::encode_exported_symbols,
- self.exported_symbols);
+ &exported_symbols);
let exported_symbols_bytes = self.position() - i;
// Encode and index the items.
// middle::reachable module but filters out items that either don't have a
// symbol associated with them (they weren't translated) or if they're an FFI
// definition (as that's not defined in this crate).
- fn encode_exported_symbols(&mut self, exported_symbols: &NodeSet) -> LazySeq<DefIndex> {
- let tcx = self.tcx;
- self.lazy_seq(exported_symbols.iter().map(|&id| tcx.hir.local_def_id(id).index))
+ fn encode_exported_symbols(&mut self,
+ exported_symbols: &[(ExportedSymbol, SymbolExportLevel)])
+ -> LazySeq<(ExportedSymbol, SymbolExportLevel)> {
+
+ // The metadata symbol name is special. It should not show up in
+ // downstream crates.
+ let metadata_symbol_name = SymbolName::new(&metadata_symbol_name(self.tcx));
+
+ self.lazy_seq(exported_symbols
+ .iter()
+ .filter(|&&(ref exported_symbol, _)| {
+ match *exported_symbol {
+ ExportedSymbol::NoDefId(symbol_name) => {
+ symbol_name != metadata_symbol_name
+ },
+ _ => true,
+ }
+ })
+ .cloned())
}
fn encode_dylib_dependency_formats(&mut self, _: ()) -> LazySeq<Option<LinkagePreference>> {
// generated regardless of trailing bytes that end up in it.
pub fn encode_metadata<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- link_meta: &LinkMeta,
- exported_symbols: &NodeSet)
+ link_meta: &LinkMeta)
-> EncodedMetadata
{
let mut cursor = Cursor::new(vec![]);
opaque: opaque::Encoder::new(&mut cursor),
tcx,
link_meta,
- exported_symbols,
lazy_state: LazyState::NoNode,
type_shorthands: Default::default(),
predicate_shorthands: Default::default(),
None => self.tcx.sess.err(msg),
}
}
- if lib.cfg.is_some() && !self.tcx.sess.features.borrow().link_cfg {
+ if lib.cfg.is_some() && !self.tcx.features().link_cfg {
feature_gate::emit_feature_err(&self.tcx.sess.parse_sess,
"link_cfg",
span.unwrap(),
"is feature gated");
}
if lib.kind == cstore::NativeStaticNobundle &&
- !self.tcx.sess.features.borrow().static_nobundle {
+ !self.tcx.features().static_nobundle {
feature_gate::emit_feature_err(&self.tcx.sess.parse_sess,
"static_nobundle",
span.unwrap(),
use rustc::hir::def_id::{DefIndex, DefId, CrateNum};
use rustc::ich::StableHashingContext;
use rustc::middle::cstore::{DepKind, LinkagePreference, NativeLibrary};
+use rustc::middle::exported_symbols::{ExportedSymbol, SymbolExportLevel};
use rustc::middle::lang_items;
use rustc::mir;
use rustc::session::CrateDisambiguator;
pub codemap: LazySeq<syntax_pos::FileMap>,
pub def_path_table: Lazy<hir::map::definitions::DefPathTable>,
pub impls: LazySeq<TraitImpls>,
- pub exported_symbols: LazySeq<DefIndex>,
+ pub exported_symbols: LazySeq<(ExportedSymbol, SymbolExportLevel)>,
+
pub index: LazySeq<index::Index>,
}
use rustc_data_structures::indexed_vec::Idx;
use rustc_data_structures::sync::Lrc;
-use super::{MirBorrowckCtxt, Context};
+use super::{Context, MirBorrowckCtxt};
use super::{InitializationRequiringAction, PrefixSet};
use dataflow::{ActiveBorrows, BorrowData, FlowAtLocation, MovingOutStatements};
use dataflow::move_paths::MovePathIndex;
} else {
true
}
- },
+ }
_ => true,
};
None => "value".to_owned(),
};
- err.note(&format!("move occurs because {} has type `{}`, \
- which does not implement the `Copy` trait",
- note_msg, ty));
+ err.note(&format!(
+ "move occurs because {} has type `{}`, \
+ which does not implement the `Copy` trait",
+ note_msg, ty
+ ));
}
}
span,
&self.describe_place(place).unwrap_or("_".to_owned()),
self.retrieve_borrow_span(borrow),
- &self.describe_place(&borrow.borrowed_place).unwrap_or("_".to_owned()),
+ &self.describe_place(&borrow.borrowed_place)
+ .unwrap_or("_".to_owned()),
Origin::Mir,
);
use rustc::hir::ExprClosure;
use rustc::mir::AggregateKind;
- let local = match self.mir[location.block].statements.get(location.statement_index) {
- Some(&Statement { kind: StatementKind::Assign(Place::Local(local), _), .. }) => local,
+ let local = match self.mir[location.block]
+ .statements
+ .get(location.statement_index)
+ {
+ Some(&Statement {
+ kind: StatementKind::Assign(Place::Local(local), _),
+ ..
+ }) => local,
_ => return None,
};
.with_freevars(node_id, |freevars| {
for (v, place) in freevars.iter().zip(places) {
match *place {
- Operand::Copy(Place::Local(l)) |
- Operand::Move(Place::Local(l)) if local == l =>
+ Operand::Copy(Place::Local(l))
+ | Operand::Move(Place::Local(l)) if local == l =>
{
debug!(
"find_closure_span: found captured local {:?}",
context: Context,
(place, span): (&Place<'tcx>, Span),
gen_borrow_kind: BorrowKind,
- issued_borrow: &BorrowData,
+ issued_borrow: &BorrowData<'tcx>,
end_issued_loan_span: Option<Span>,
) {
let issued_span = self.retrieve_borrow_span(issued_borrow);
"immutable",
"mutable",
) {
- (BorrowKind::Shared, lft, _, BorrowKind::Mut { .. }, _, rgt) |
- (BorrowKind::Mut { .. }, _, lft, BorrowKind::Shared, rgt, _) => self.tcx
+ (BorrowKind::Shared, lft, _, BorrowKind::Mut { .. }, _, rgt)
+ | (BorrowKind::Mut { .. }, _, lft, BorrowKind::Shared, rgt, _) => self.tcx
.cannot_reborrow_already_borrowed(
span,
&desc_place,
context: Context,
borrow: &BorrowData<'tcx>,
drop_span: Span,
- borrows: &ActiveBorrows<'cx, 'gcx, 'tcx>
+ borrows: &ActiveBorrows<'cx, 'gcx, 'tcx>,
) {
let end_span = borrows.opt_region_end_span(&borrow.region);
let scope_tree = borrows.0.scope_tree();
- let root_place = self.prefixes(&borrow.borrowed_place, PrefixSet::All).last().unwrap();
+ let root_place = self.prefixes(&borrow.borrowed_place, PrefixSet::All)
+ .last()
+ .unwrap();
let borrow_span = self.mir.source_info(borrow.location).span;
let proper_span = match *root_place {
_ => drop_span,
};
- if self.access_place_error_reported.contains(&(root_place.clone(), borrow_span)) {
- debug!("suppressing access_place error when borrow doesn't live long enough for {:?}",
- borrow_span);
+ if self.access_place_error_reported
+ .contains(&(root_place.clone(), borrow_span))
+ {
+ debug!(
+ "suppressing access_place error when borrow doesn't live long enough for {:?}",
+ borrow_span
+ );
return;
}
- self.access_place_error_reported.insert((root_place.clone(), borrow_span));
+ self.access_place_error_reported
+ .insert((root_place.clone(), borrow_span));
match (borrow.region, &self.describe_place(&borrow.borrowed_place)) {
(RegionKind::ReScope(_), Some(name)) => {
drop_span,
borrow_span,
proper_span,
- end_span
+ end_span,
);
- },
+ }
(RegionKind::ReScope(_), None) => {
self.report_scoped_temporary_value_does_not_live_long_enough(
context,
drop_span,
borrow_span,
proper_span,
- end_span
+ end_span,
);
- },
- (RegionKind::ReEarlyBound(_), Some(name)) |
- (RegionKind::ReFree(_), Some(name)) |
- (RegionKind::ReStatic, Some(name)) |
- (RegionKind::ReEmpty, Some(name)) |
- (RegionKind::ReVar(_), Some(name)) => {
+ }
+ (RegionKind::ReEarlyBound(_), Some(name))
+ | (RegionKind::ReFree(_), Some(name))
+ | (RegionKind::ReStatic, Some(name))
+ | (RegionKind::ReEmpty, Some(name))
+ | (RegionKind::ReVar(_), Some(name)) => {
self.report_unscoped_local_value_does_not_live_long_enough(
context,
name,
proper_span,
end_span,
);
- },
- (RegionKind::ReEarlyBound(_), None) |
- (RegionKind::ReFree(_), None) |
- (RegionKind::ReStatic, None) |
- (RegionKind::ReEmpty, None) |
- (RegionKind::ReVar(_), None) => {
+ }
+ (RegionKind::ReEarlyBound(_), None)
+ | (RegionKind::ReFree(_), None)
+ | (RegionKind::ReStatic, None)
+ | (RegionKind::ReEmpty, None)
+ | (RegionKind::ReVar(_), None) => {
self.report_unscoped_temporary_value_does_not_live_long_enough(
context,
&scope_tree,
proper_span,
end_span,
);
- },
- (RegionKind::ReLateBound(_, _), _) |
- (RegionKind::ReSkolemized(_, _), _) |
- (RegionKind::ReClosureBound(_), _) |
- (RegionKind::ReErased, _) => {
+ }
+ (RegionKind::ReLateBound(_, _), _)
+ | (RegionKind::ReSkolemized(_, _), _)
+ | (RegionKind::ReClosureBound(_), _)
+ | (RegionKind::ReErased, _) => {
span_bug!(drop_span, "region does not make sense in this context");
- },
+ }
}
}
_proper_span: Span,
end_span: Option<Span>,
) {
- let mut err = self.tcx.path_does_not_live_long_enough(borrow_span,
- &format!("`{}`", name),
- Origin::Mir);
+ let mut err = self.tcx.path_does_not_live_long_enough(
+ borrow_span,
+ &format!("`{}`", name),
+ Origin::Mir,
+ );
err.span_label(borrow_span, "borrowed value does not live long enough");
- err.span_label(drop_span, format!("`{}` dropped here while still borrowed", name));
+ err.span_label(
+ drop_span,
+ format!("`{}` dropped here while still borrowed", name),
+ );
if let Some(end) = end_span {
err.span_label(end, "borrowed value needs to live until here");
}
proper_span: Span,
end_span: Option<Span>,
) {
- let mut err = self.tcx.path_does_not_live_long_enough(proper_span,
- "borrowed value",
- Origin::Mir);
+ let mut err =
+ self.tcx
+ .path_does_not_live_long_enough(proper_span, "borrowed value", Origin::Mir);
err.span_label(proper_span, "temporary value does not live long enough");
- err.span_label(drop_span, "temporary value dropped here while still borrowed");
+ err.span_label(
+ drop_span,
+ "temporary value dropped here while still borrowed",
+ );
err.note("consider using a `let` binding to increase its lifetime");
if let Some(end) = end_span {
err.span_label(end, "temporary value needs to live until here");
_proper_span: Span,
_end_span: Option<Span>,
) {
- let mut err = self.tcx.path_does_not_live_long_enough(borrow_span,
- &format!("`{}`", name),
- Origin::Mir);
+ debug!(
+ "report_unscoped_local_value_does_not_live_long_enough(\
+ {:?}, {:?}, {:?}, {:?}, {:?}, {:?}\
+ )",
+ context, name, scope_tree, borrow, drop_span, borrow_span
+ );
+
+ let mut err = self.tcx.path_does_not_live_long_enough(
+ borrow_span,
+ &format!("`{}`", name),
+ Origin::Mir,
+ );
err.span_label(borrow_span, "borrowed value does not live long enough");
err.span_label(drop_span, "borrowed value only lives until here");
- self.tcx.note_and_explain_region(scope_tree, &mut err,
- "borrowed value must be valid for ",
- borrow.region, "...");
+
+ if !self.tcx.nll() {
+ self.tcx.note_and_explain_region(
+ scope_tree,
+ &mut err,
+ "borrowed value must be valid for ",
+ borrow.region,
+ "...",
+ );
+ }
+
self.explain_why_borrow_contains_point(context, borrow, &mut err);
err.emit();
}
drop_span: Span,
_borrow_span: Span,
proper_span: Span,
- _end_span: Option<Span>
+ _end_span: Option<Span>,
) {
- let mut err = self.tcx.path_does_not_live_long_enough(proper_span,
- "borrowed value",
- Origin::Mir);
+ debug!(
+ "report_unscoped_temporary_value_does_not_live_long_enough(\
+ {:?}, {:?}, {:?}, {:?}, {:?}\
+ )",
+ context, scope_tree, borrow, drop_span, proper_span
+ );
+
+ let mut err =
+ self.tcx
+ .path_does_not_live_long_enough(proper_span, "borrowed value", Origin::Mir);
err.span_label(proper_span, "temporary value does not live long enough");
err.span_label(drop_span, "temporary value only lives until here");
- self.tcx.note_and_explain_region(scope_tree, &mut err,
- "borrowed value must be valid for ",
- borrow.region, "...");
+
+ if !self.tcx.nll() {
+ self.tcx.note_and_explain_region(
+ scope_tree,
+ &mut err,
+ "borrowed value must be valid for ",
+ borrow.region,
+ "...",
+ );
+ }
+
self.explain_why_borrow_contains_point(context, borrow, &mut err);
err.emit();
}
&mut self,
context: Context,
(place, span): (&Place<'tcx>, Span),
- loan: &BorrowData,
+ loan: &BorrowData<'tcx>,
) {
let mut err = self.tcx.cannot_assign_to_borrowed(
span,
ProjectionElem::Field(_, field_type) => {
self.describe_field_from_ty(&field_type, field)
}
- ProjectionElem::Index(..) |
- ProjectionElem::ConstantIndex { .. } |
- ProjectionElem::Subslice { .. } => {
+ ProjectionElem::Index(..)
+ | ProjectionElem::ConstantIndex { .. }
+ | ProjectionElem::Subslice { .. } => {
format!("{}", self.describe_field(&proj.base, field))
}
},
Place::Local(local) => {
let local = &self.mir.local_decls[*local];
Some(local.ty)
- },
+ }
Place::Static(ref st) => Some(st.ty),
- Place::Projection(ref proj) => {
- match proj.elem {
- ProjectionElem::Field(_, ty) => Some(ty),
- _ => None,
- }
+ Place::Projection(ref proj) => match proj.elem {
+ ProjectionElem::Field(_, ty) => Some(ty),
+ _ => None,
},
}
}
let input_mir = tcx.mir_validated(def_id);
debug!("run query mir_borrowck: {}", tcx.item_path_str(def_id));
- if !tcx.has_attr(def_id, "rustc_mir_borrowck") && !tcx.sess.use_mir() {
+ if !tcx.has_attr(def_id, "rustc_mir_borrowck") && !tcx.use_mir() {
return None;
}
// contain non-lexical lifetimes. It will have a lifetime tied
// to the inference context.
let mut mir: Mir<'tcx> = input_mir.clone();
- let free_regions = if !tcx.sess.nll() {
+ let free_regions = if !tcx.nll() {
None
} else {
let mir = &mut mir;
);
(Some(Rc::new(regioncx)), opt_closure_req)
} else {
- assert!(!tcx.sess.nll());
+ assert!(!tcx.nll());
(None, None)
};
let flow_inits = flow_inits; // remove mut
/// allowed to be split into separate Reservation and
/// Activation phases.
fn allow_two_phase_borrow(&self, kind: BorrowKind) -> bool {
- self.tcx.sess.two_phase_borrows() &&
+ self.tcx.two_phase_borrows() &&
(kind.allows_two_phase_borrow() ||
self.tcx.sess.opts.debugging_opts.two_phase_beyond_autoref)
}
span: Span,
flow_state: &Flows<'cx, 'gcx, 'tcx>,
) {
- if !self.tcx.sess.two_phase_borrows() {
+ if !self.tcx.two_phase_borrows() {
return;
}
pub(in borrow_check) fn explain_why_borrow_contains_point(
&self,
context: Context,
- borrow: &BorrowData<'_>,
+ borrow: &BorrowData<'tcx>,
err: &mut DiagnosticBuilder<'_>,
) {
if let Some(regioncx) = &self.nonlexical_regioncx {
}
}
- _ => {
- cause.label_diagnostic(mir, err);
+ Cause::UniversalRegion(region_vid) => {
+ if let Some(region) = regioncx.to_error_region(region_vid) {
+ self.tcx.note_and_explain_free_region(
+ err,
+ "borrowed value must be valid for ",
+ region,
+ "...",
+ );
+ }
}
+
+ _ => {}
}
}
}
use rustc::util::common::ErrorReported;
use rustc_data_structures::bitvec::BitVector;
use rustc_data_structures::indexed_vec::IndexVec;
-use rustc_errors::DiagnosticBuilder;
use std::fmt;
use std::rc::Rc;
use syntax::ast;
self.check_type_tests(infcx, mir, mir_def_id, outlives_requirements.as_mut());
- self.check_universal_regions(infcx, mir, mir_def_id, outlives_requirements.as_mut());
+ self.check_universal_regions(infcx, mir_def_id, outlives_requirements.as_mut());
let outlives_requirements = outlives_requirements.unwrap_or(vec![]);
// an error that multiple bounds are required.
tcx.sess.span_err(
type_test.span,
- &format!(
- "`{}` does not live long enough",
- type_test.generic_kind,
- ),
+ &format!("`{}` does not live long enough", type_test.generic_kind,),
);
}
}
/// existentially bound, then we check its inferred value and try
/// to find a good name from that. Returns `None` if we can't find
/// one (e.g., this is just some random part of the CFG).
- fn to_error_region(&self, r: RegionVid) -> Option<ty::Region<'tcx>> {
+ pub fn to_error_region(&self, r: RegionVid) -> Option<ty::Region<'tcx>> {
if self.universal_regions.is_universal_region(r) {
return self.definitions[r].external_name;
} else {
let inferred_values = self.inferred_values
- .as_ref()
- .expect("region values not yet inferred");
+ .as_ref()
+ .expect("region values not yet inferred");
let upper_bound = self.universal_upper_bound(r);
if inferred_values.contains(r, upper_bound) {
self.to_error_region(upper_bound)
) -> bool {
debug!(
"eval_region_test(point={:?}, lower_bound={:?}, test={:?})",
- point,
- lower_bound,
- test
+ point, lower_bound, test
);
match test {
) -> bool {
debug!(
"eval_outlives({:?}: {:?} @ {:?})",
- sup_region,
- sub_region,
- point
+ sup_region, sub_region, point
);
// Roughly speaking, do a DFS of all region elements reachable
fn check_universal_regions<'gcx>(
&self,
infcx: &InferCtxt<'_, 'gcx, 'tcx>,
- mir: &Mir<'tcx>,
mir_def_id: DefId,
mut propagated_outlives_requirements: Option<&mut Vec<ClosureOutlivesRequirement<'gcx>>>,
) {
for (fr, _) in universal_definitions {
self.check_universal_region(
infcx,
- mir,
mir_def_id,
fr,
&mut propagated_outlives_requirements,
fn check_universal_region<'gcx>(
&self,
infcx: &InferCtxt<'_, 'gcx, 'tcx>,
- mir: &Mir<'tcx>,
mir_def_id: DefId,
longer_fr: RegionVid,
propagated_outlives_requirements: &mut Option<&mut Vec<ClosureOutlivesRequirement<'gcx>>>,
debug!(
"check_universal_region: fr={:?} does not outlive shorter_fr={:?}",
- longer_fr,
- shorter_fr,
+ longer_fr, shorter_fr,
);
let blame_span = self.blame_span(longer_fr, shorter_fr);
// Note: in this case, we use the unapproximated regions
// to report the error. This gives better error messages
// in some cases.
- self.report_error(infcx, mir, mir_def_id, longer_fr, shorter_fr, blame_span);
+ self.report_error(infcx, mir_def_id, longer_fr, shorter_fr, blame_span);
}
}
fn report_error(
&self,
infcx: &InferCtxt<'_, '_, 'tcx>,
- mir: &Mir<'tcx>,
mir_def_id: DefId,
fr: RegionVid,
outlived_fr: RegionVid,
&format!("{} does not outlive {}", fr_string, outlived_fr_string,),
);
- // Find out why `fr` had to outlive `outlived_fr`...
- let inferred_values = self.inferred_values.as_ref().unwrap();
- if let Some(cause) = inferred_values.cause(fr, outlived_fr) {
- cause.label_diagnostic(mir, &mut diag);
- }
-
diag.emit();
}
write!(
formatter,
"({:?}: {:?} @ {:?}) due to {:?}",
- self.sup,
- self.sub,
- self.point,
- self.span
+ self.sup, self.sub, self.point, self.span
)
}
}
debug!(
"apply_requirements(location={:?}, closure_def_id={:?}, closure_substs={:?})",
- location,
- closure_def_id,
- closure_substs
+ location, closure_def_id, closure_substs
);
// Get Tu.
"apply_requirements: region={:?} \
outlived_region={:?} \
outlives_requirement={:?}",
- region,
- outlived_region,
- outlives_requirement,
+ region, outlived_region, outlives_requirement,
);
infcx.sub_regions(origin, outlived_region, region);
}
"apply_requirements: ty={:?} \
outlived_region={:?} \
outlives_requirement={:?}",
- ty,
- outlived_region,
- outlives_requirement,
+ ty, outlived_region, outlives_requirement,
);
infcx.register_region_obligation(
body_id,
}
impl Cause {
- pub(crate) fn label_diagnostic(&self, mir: &Mir<'_>, diag: &mut DiagnosticBuilder<'_>) {
- // The cause information is pretty messy. Only dump it as an
- // internal debugging aid if -Znll-dump-cause is given.
- let nll_dump_cause = ty::tls::with(|tcx| tcx.sess.nll_dump_cause());
- if !nll_dump_cause {
- return;
- }
-
- let mut string = String::new();
- self.push_diagnostic_string(mir, &mut string);
- diag.note(&string);
- }
-
- fn push_diagnostic_string(&self, mir: &Mir<'_>, string: &mut String) {
- match self {
- Cause::LiveVar(local, location) => {
- string.push_str(&format!("because `{:?}` is live at {:?}", local, location));
- }
-
- Cause::DropVar(local, location) => {
- string.push_str(&format!(
- "because `{:?}` is dropped at {:?}",
- local,
- location
- ));
- }
-
- Cause::LiveOther(location) => {
- string.push_str(&format!(
- "because of a general liveness constraint at {:?}",
- location
- ));
- }
-
- Cause::UniversalRegion(region_vid) => {
- string.push_str(&format!(
- "because `{:?}` is universally quantified",
- region_vid
- ));
- }
-
- Cause::Outlives {
- original_cause,
- constraint_location,
- constraint_span: _,
- } => {
- string.push_str(&format!(
- "because of an outlives relation created at `{:?}`\n",
- constraint_location
- ));
-
- original_cause.push_diagnostic_string(mir, string);
- }
- }
- }
-
pub(crate) fn root_cause(&self) -> &Cause {
match self {
- Cause::LiveVar(..) |
- Cause::DropVar(..) |
- Cause::LiveOther(..) |
- Cause::UniversalRegion(..) => {
- self
- }
+ Cause::LiveVar(..)
+ | Cause::DropVar(..)
+ | Cause::LiveOther(..)
+ | Cause::UniversalRegion(..) => self,
- Cause::Outlives {
- original_cause,
- ..
- } => {
- original_cause.root_cause()
- }
+ Cause::Outlives { original_cause, .. } => original_cause.root_cause(),
}
}
}
// shouldn't affect `is_sized`.
let gcx = self.tcx().global_tcx();
let erased_ty = gcx.lift(&self.tcx().erase_regions(&ty)).unwrap();
- if !erased_ty.is_sized(gcx, self.param_env, span) {
+ if !erased_ty.is_sized(gcx.at(span), self.param_env) {
// in current MIR construction, all non-control-flow rvalue
// expressions evaluate through `as_temp` or `into` a return
// slot or local, so to find all unsized rvalues it is enough
// When NLL is enabled, the borrow checker runs the typeck
// itself, so we don't need this MIR pass anymore.
- if tcx.sess.nll() {
+ if tcx.nll() {
return;
}
block: BasicBlock,
source_info: SourceInfo,
region_scope: region::Scope) {
- if tcx.sess.emit_end_regions() {
+ if tcx.emit_end_regions() {
if let region::ScopeData::CallSite(_) = region_scope.data() {
// The CallSite scope (aka the root scope) is sort of weird, in that it is
// supposed to "separate" the "interior" and "exterior" of a closure. Being
PatternKind::Variant { adt_def, substs, variant_index, ref subpatterns } => {
let irrefutable = adt_def.variants.iter().enumerate().all(|(i, v)| {
i == variant_index || {
- self.hir.tcx().sess.features.borrow().never_type &&
+ self.hir.tcx().features().never_type &&
self.hir.tcx().is_variant_uninhabited_from_all_modules(v, substs)
}
});
}
pub(super) fn type_is_sized(&self, ty: Ty<'tcx>) -> bool {
- ty.is_sized(self.tcx, self.param_env, DUMMY_SP)
+ ty.is_sized(self.tcx.at(DUMMY_SP), self.param_env)
}
pub fn load_mir(
}
Some(_) => true,
None => {
- if tcx.is_exported_symbol(def_id) ||
+ if tcx.is_reachable_non_generic(def_id) ||
tcx.is_foreign_item(def_id)
{
// We can link to the item in question, no instance needed
let ptr_vtable = |inner_source: Ty<'tcx>, inner_target: Ty<'tcx>| {
let type_has_metadata = |ty: Ty<'tcx>| -> bool {
use syntax_pos::DUMMY_SP;
- if ty.is_sized(tcx, ty::ParamEnv::empty(traits::Reveal::All), DUMMY_SP) {
+ if ty.is_sized(tcx.at(DUMMY_SP), ty::ParamEnv::empty(traits::Reveal::All)) {
return false;
}
let tail = tcx.struct_tail(ty);
}
MonoItemCollectionMode::Lazy => {
self.entry_fn == Some(def_id) ||
- self.tcx.is_exported_symbol(def_id) ||
+ self.tcx.is_reachable_non_generic(def_id) ||
attr::contains_name(&self.tcx.get_attrs(def_id),
"rustc_std_internal_symbol")
}
can_be_internalized = false;
Visibility::Hidden
} else if def_id.is_local() {
- if tcx.is_exported_symbol(def_id) {
+ if tcx.is_reachable_non_generic(def_id) {
can_be_internalized = false;
default_visibility(def_id)
} else {
(Linkage::External, visibility)
}
MonoItem::Static(def_id) => {
- let visibility = if tcx.is_exported_symbol(def_id) {
+ let visibility = if tcx.is_reachable_non_generic(def_id) {
can_be_internalized = false;
default_visibility(def_id)
} else {
}
MonoItem::GlobalAsm(node_id) => {
let def_id = tcx.hir.local_def_id(node_id);
- let visibility = if tcx.is_exported_symbol(def_id) {
+ let visibility = if tcx.is_reachable_non_generic(def_id) {
can_be_internalized = false;
default_visibility(def_id)
} else {
tcx: TyCtxt<'a, 'tcx, 'tcx>,
_source: MirSource,
mir: &mut Mir<'tcx>) {
- if !tcx.sess.emit_end_regions() { return; }
+ if !tcx.emit_end_regions() { return; }
let mut gather = GatherBorrowedRegions {
seen_regions: FxHashSet()
};
let bin_statement = block.statements.pop().unwrap();
- let (source_info, place, lhs, mut rhs) = match bin_statement {
- Statement {
- source_info,
- kind: StatementKind::Assign(
- place,
- Rvalue::BinaryOp(_, lhs, rhs))
- } => (source_info, place, lhs, rhs),
- Statement {
- source_info,
- kind: StatementKind::Assign(
- place,
- Rvalue::CheckedBinaryOp(_, lhs, rhs))
- } => (source_info, place, lhs, rhs),
+ let source_info = bin_statement.source_info;
+ let (place, lhs, mut rhs) = match bin_statement.kind {
+ StatementKind::Assign(place, Rvalue::BinaryOp(_, lhs, rhs))
+ | StatementKind::Assign(place, Rvalue::CheckedBinaryOp(_, lhs, rhs)) => {
+ (place, lhs, rhs)
+ }
_ => bug!("Statement doesn't match pattern any more?"),
};
if self.mode != Mode::Fn &&
// feature-gate is not enabled,
- !self.tcx.sess.features.borrow()
+ !self.tcx.features()
.declared_lib_features
.iter()
.any(|&(ref sym, _)| sym == feature_name) &&
o: Origin)
-> DiagnosticBuilder<'a>
{
- if !o.should_emit_errors(self.sess.borrowck_mode()) {
+ if !o.should_emit_errors(self.borrowck_mode()) {
self.sess.diagnostic().cancel(&mut diag);
}
diag
// do not report any error now. since crate attributes are
// not touched by expansion, every use of plugin without
// the feature enabled will result in an error later...
- if sess.features.borrow().plugin {
+ if sess.features_untracked().plugin {
for attr in &krate.attrs {
if !attr.check_name("plugin") {
continue;
};
let ext = Lrc::new(macro_rules::compile(&self.session.parse_sess,
- &self.session.features,
+ &self.session.features_untracked(),
¯o_def));
self.macro_map.insert(def_id, ext.clone());
ext
invocations.insert(Mark::root(),
arenas.alloc_invocation_data(InvocationData::root(graph_root)));
- let features = session.features.borrow();
+ let features = session.features_untracked();
let mut macro_defs = FxHashMap();
macro_defs.insert(Mark::root(), root_def_id);
let prim = self.primitive_type_table.primitive_types[&path[0].node.name];
match prim {
TyUint(UintTy::U128) | TyInt(IntTy::I128) => {
- if !self.session.features.borrow().i128_type {
+ if !self.session.features_untracked().i128_type {
emit_feature_err(&self.session.parse_sess,
"i128_type", span, GateIssue::Language,
"128-bit type is unstable");
let prev_name = path[0].node.name;
if prev_name == keywords::Extern.name() ||
prev_name == keywords::CrateRoot.name() &&
- self.session.features.borrow().extern_absolute_paths {
+ self.session.features_untracked().extern_absolute_paths {
// `::extern_crate::a::b`
let crate_id = self.crate_loader.resolve_crate_from_path(name, ident.span);
let crate_root =
let def_id = self.definitions.local_def_id(item.id);
let ext = Lrc::new(macro_rules::compile(&self.session.parse_sess,
- &self.session.features,
+ &self.session.features_untracked(),
item));
self.macro_map.insert(def_id, ext);
}
fn gate_legacy_custom_derive(&mut self, name: Symbol, span: Span) {
- if !self.session.features.borrow().custom_derive {
+ if !self.session.features_untracked().custom_derive {
let sess = &self.session.parse_sess;
let explain = feature_gate::EXPLAIN_CUSTOM_DERIVE;
emit_feature_err(sess, "custom_derive", span, GateIssue::Language, explain);
if module_path.len() == 1 && (module_path[0].node.name == keywords::CrateRoot.name() ||
module_path[0].node.name == keywords::Extern.name()) {
let is_extern = module_path[0].node.name == keywords::Extern.name() ||
- self.session.features.borrow().extern_absolute_paths;
+ self.session.features_untracked().extern_absolute_paths;
match directive.subclass {
GlobImport { .. } if is_extern => {
return Some((directive.span,
if !self.span.filter_generated(sub_span, ex.span) {
let span =
self.span_from_span(sub_span.expect("No span found for var ref"));
- let ref_id =
- ::id_from_def_id(def.non_enum_variant().fields[idx.node].did);
- self.dumper.dump_ref(Ref {
- kind: RefKind::Variable,
- span,
- ref_id,
- });
+ if let Some(field) = def.non_enum_variant().fields.get(idx.node) {
+ let ref_id = ::id_from_def_id(field.did);
+ self.dumper.dump_ref(Ref {
+ kind: RefKind::Variable,
+ span,
+ ref_id,
+ });
+ } else {
+ return;
+ }
}
}
ty::TyTuple(..) => {}
- _ => span_bug!(ex.span, "Expected struct or tuple type, found {:?}", ty),
+ _ => {
+ debug!("Expected struct or tuple type, found {:?}", ty);
+ return;
+ }
}
}
ast::ExprKind::Closure(_, _, ref decl, ref body, _fn_decl_span) => {
[dependencies]
bitflags = "1.0"
+cc = "1.0.1"
flate2 = "1.0"
jobserver = "0.1.5"
libc = "0.2"
rustc_allocator = { path = "../librustc_allocator" }
rustc_apfloat = { path = "../librustc_apfloat" }
rustc_back = { path = "../librustc_back" }
-rustc_binaryen = { path = "../librustc_binaryen" }
rustc_const_math = { path = "../librustc_const_math" }
rustc_data_structures = { path = "../librustc_data_structures" }
rustc_errors = { path = "../librustc_errors" }
syntax_pos = { path = "../libsyntax_pos" }
tempdir = "0.3"
-[target."cfg(windows)".dependencies]
-cc = "1.0.1"
+# not actually used but needed to make sure we enable the same feature set as
+# winapi used in librustc
+env_logger = { version = "0.5", default-features = false }
[features]
# Used to communicate the feature to `rustc_back` in the same manner that the
use std::mem;
use std::process::{self, Output};
+use rustc_back::LldFlavor;
+
#[derive(Clone)]
pub struct Command {
program: Program,
enum Program {
Normal(OsString),
CmdBatScript(OsString),
+ Lld(OsString, LldFlavor)
}
impl Command {
Command::_new(Program::CmdBatScript(program.as_ref().to_owned()))
}
+ pub fn lld<P: AsRef<OsStr>>(program: P, flavor: LldFlavor) -> Command {
+ Command::_new(Program::Lld(program.as_ref().to_owned(), flavor))
+ }
+
fn _new(program: Program) -> Command {
Command {
program,
self
}
- pub fn envs<I, K, V>(&mut self, envs: I) -> &mut Command
- where I: IntoIterator<Item=(K, V)>,
- K: AsRef<OsStr>,
- V: AsRef<OsStr>
- {
- for (key, value) in envs {
- self._env(key.as_ref(), value.as_ref());
- }
- self
- }
-
fn _env(&mut self, key: &OsStr, value: &OsStr) {
self.env.push((key.to_owned(), value.to_owned()));
}
c.arg("/c").arg(p);
c
}
+ Program::Lld(ref p, flavor) => {
+ let mut c = process::Command::new(p);
+ c.arg("-flavor").arg(match flavor {
+ LldFlavor::Wasm => "wasm",
+ LldFlavor::Ld => "gnu",
+ LldFlavor::Link => "link",
+ LldFlavor::Ld64 => "darwin",
+ });
+ c
+ }
};
ret.args(&self.args);
ret.envs(self.env.clone());
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+use cc::windows_registry;
use super::archive::{ArchiveBuilder, ArchiveConfig};
use super::bytecode::RLIB_BYTECODE_EXTENSION;
use super::linker::Linker;
use super::rpath::RPathConfig;
use super::rpath;
use metadata::METADATA_FILENAME;
+use rustc_back::LinkerFlavor;
use rustc::session::config::{self, NoDebugInfo, OutputFilenames, OutputType, PrintRequest};
use rustc::session::config::{RUST_CGU_EXT, Lto};
use rustc::session::filesearch;
use rustc::util::fs::fix_windows_verbatim_for_gcc;
use rustc::hir::def_id::CrateNum;
use tempdir::TempDir;
-use rustc_back::{PanicStrategy, RelroLevel, LinkerFlavor};
+use rustc_back::{PanicStrategy, RelroLevel};
use context::get_reloc_model;
use llvm;
use std::ascii;
use std::char;
use std::env;
-use std::ffi::OsString;
use std::fmt;
use std::fs;
use std::io;
// The third parameter is for env vars, used on windows to set up the
// path for MSVC to find its DLLs, and gcc to find its bundled
// toolchain
-pub fn get_linker(sess: &Session) -> (PathBuf, Command, Vec<(OsString, OsString)>) {
- let envs = vec![("PATH".into(), command_path(sess))];
-
+pub fn get_linker(sess: &Session) -> (PathBuf, Command) {
// If our linker looks like a batch script on Windows then to execute this
// we'll need to spawn `cmd` explicitly. This is primarily done to handle
// emscripten where the linker is `emcc.bat` and needs to be spawned as
return Command::bat_script(linker)
}
}
- Command::new(linker)
- };
+ match sess.linker_flavor() {
+ LinkerFlavor::Lld(f) => Command::lld(linker, f),
+ _ => Command::new(linker),
- if let Some(ref linker) = sess.opts.cg.linker {
- (linker.clone(), cmd(linker), envs)
- } else if sess.target.target.options.is_like_msvc {
- let (cmd, envs) = msvc_link_exe_cmd(sess);
- (PathBuf::from("link.exe"), cmd, envs)
- } else {
- let linker = PathBuf::from(&sess.target.target.options.linker);
- let cmd = cmd(&linker);
- (linker, cmd, envs)
- }
-}
-
-#[cfg(windows)]
-pub fn msvc_link_exe_cmd(sess: &Session) -> (Command, Vec<(OsString, OsString)>) {
- use cc::windows_registry;
+ }
+ };
- let target = &sess.opts.target_triple;
- let tool = windows_registry::find_tool(target, "link.exe");
+ let msvc_tool = windows_registry::find_tool(&sess.opts.target_triple, "link.exe");
- if let Some(tool) = tool {
- let mut cmd = Command::new(tool.path());
- cmd.args(tool.args());
- for &(ref k, ref v) in tool.env() {
- cmd.env(k, v);
- }
- let envs = tool.env().to_vec();
- (cmd, envs)
- } else {
- debug!("Failed to locate linker.");
- (Command::new("link.exe"), vec![])
- }
-}
+ let linker_path = sess.opts.cg.linker.as_ref().map(|s| &**s)
+ .or(sess.target.target.options.linker.as_ref().map(|s| s.as_ref()))
+ .unwrap_or(match sess.linker_flavor() {
+ LinkerFlavor::Msvc => {
+ msvc_tool.as_ref().map(|t| t.path()).unwrap_or("link.exe".as_ref())
+ }
+ LinkerFlavor::Em if cfg!(windows) => "emcc.bat".as_ref(),
+ LinkerFlavor::Em => "emcc".as_ref(),
+ LinkerFlavor::Gcc => "cc".as_ref(),
+ LinkerFlavor::Ld => "ld".as_ref(),
+ LinkerFlavor::Lld(_) => "lld".as_ref(),
+ });
-#[cfg(not(windows))]
-pub fn msvc_link_exe_cmd(_sess: &Session) -> (Command, Vec<(OsString, OsString)>) {
- (Command::new("link.exe"), vec![])
-}
+ let mut cmd = cmd(linker_path);
-fn command_path(sess: &Session) -> OsString {
// The compiler's sysroot often has some bundled tools, so add it to the
// PATH for the child.
let mut new_path = sess.host_filesearch(PathKind::All)
.get_tools_search_paths();
- if let Some(path) = env::var_os("PATH") {
- new_path.extend(env::split_paths(&path));
+ let mut msvc_changed_path = false;
+ if sess.target.target.options.is_like_msvc {
+ if let Some(ref tool) = msvc_tool {
+ cmd.args(tool.args());
+ for &(ref k, ref v) in tool.env() {
+ if k == "PATH" {
+ new_path.extend(env::split_paths(v));
+ msvc_changed_path = true;
+ } else {
+ cmd.env(k, v);
+ }
+ }
+ }
+ }
+
+ if !msvc_changed_path {
+ if let Some(path) = env::var_os("PATH") {
+ new_path.extend(env::split_paths(&path));
+ }
}
- env::join_paths(new_path).unwrap()
+ cmd.env("PATH", env::join_paths(new_path).unwrap());
+
+ (linker_path.to_path_buf(), cmd)
}
pub fn remove(sess: &Session, path: &Path) {
info!("preparing {:?} to {:?}", crate_type, out_filename);
let flavor = sess.linker_flavor();
- // The "binaryen linker" is massively special, so skip everything below.
- if flavor == LinkerFlavor::Binaryen {
- return link_binaryen(sess, crate_type, out_filename, trans, tmpdir);
- }
-
// The invocations of cc share some flags across platforms
- let (pname, mut cmd, envs) = get_linker(sess);
- // This will set PATH on windows
- cmd.envs(envs);
+ let (pname, mut cmd) = get_linker(sess);
let root = sess.target_filesearch(PathKind::Native).get_lib_path();
if let Some(args) = sess.target.target.options.pre_link_args.get(&flavor) {
}
}
-/// For now "linking with binaryen" is just "move the one module we generated in
-/// the backend to the final output"
-///
-/// That is, all the heavy lifting happens during the `back::write` phase. Here
-/// we just clean up after that.
-///
-/// Note that this is super temporary and "will not survive the night", this is
-/// guaranteed to get removed as soon as a linker for wasm exists. This should
-/// not be used for anything other than wasm.
-fn link_binaryen(sess: &Session,
- _crate_type: config::CrateType,
- out_filename: &Path,
- trans: &CrateTranslation,
- _tmpdir: &Path) {
- assert!(trans.allocator_module.is_none());
- assert_eq!(trans.modules.len(), 1);
-
- let object = trans.modules[0].object.as_ref().expect("object must exist");
- let res = fs::hard_link(object, out_filename)
- .or_else(|_| fs::copy(object, out_filename).map(|_| ()));
- if let Err(e) = res {
- sess.fatal(&format!("failed to create `{}`: {}",
- out_filename.display(),
- e));
- }
-}
-
fn is_full_lto_enabled(sess: &Session) -> bool {
match sess.lto() {
Lto::Yes |
use rustc::session::Session;
use rustc::session::config::{self, CrateType, OptLevel, DebugInfoLevel};
use rustc::ty::TyCtxt;
-use rustc_back::LinkerFlavor;
+use rustc_back::{LinkerFlavor, LldFlavor};
use serialize::{json, Encoder};
/// For all the linkers we support, and information they might
cmd: Command,
sess: &'a Session) -> Box<Linker+'a> {
match sess.linker_flavor() {
+ LinkerFlavor::Lld(LldFlavor::Link) |
LinkerFlavor::Msvc => {
Box::new(MsvcLinker {
cmd,
is_ld: false,
}) as Box<Linker>
}
+
+ LinkerFlavor::Lld(LldFlavor::Ld) |
+ LinkerFlavor::Lld(LldFlavor::Ld64) |
LinkerFlavor::Ld => {
Box::new(GccLinker {
cmd,
is_ld: true,
}) as Box<Linker>
}
- LinkerFlavor::Binaryen => {
- panic!("can't instantiate binaryen linker")
+
+ LinkerFlavor::Lld(LldFlavor::Wasm) => {
+ Box::new(WasmLd {
+ cmd,
+ }) as Box<Linker>
}
}
}
let mut symbols = Vec::new();
let export_threshold = symbol_export::crates_export_threshold(&[crate_type]);
- for &(ref name, _, level) in tcx.exported_symbols(LOCAL_CRATE).iter() {
+ for &(symbol, level) in tcx.exported_symbols(LOCAL_CRATE).iter() {
if level.is_below_threshold(export_threshold) {
- symbols.push(name.clone());
+ symbols.push(symbol.symbol_name(tcx).to_string());
}
}
// For each dependency that we are linking to statically ...
if *dep_format == Linkage::Static {
// ... we add its symbol list to our export list.
- for &(ref name, _, level) in tcx.exported_symbols(cnum).iter() {
+ for &(symbol, level) in tcx.exported_symbols(cnum).iter() {
if level.is_below_threshold(export_threshold) {
- symbols.push(name.clone());
+ symbols.push(symbol.symbol_name(tcx).to_string());
}
}
}
symbols
}
+
+pub struct WasmLd {
+ cmd: Command,
+}
+
+impl Linker for WasmLd {
+ fn link_dylib(&mut self, lib: &str) {
+ self.cmd.arg("-l").arg(lib);
+ }
+
+ fn link_staticlib(&mut self, lib: &str) {
+ self.cmd.arg("-l").arg(lib);
+ }
+
+ fn link_rlib(&mut self, lib: &Path) {
+ self.cmd.arg(lib);
+ }
+
+ fn include_path(&mut self, path: &Path) {
+ self.cmd.arg("-L").arg(path);
+ }
+
+ fn framework_path(&mut self, _path: &Path) {
+ panic!("frameworks not supported")
+ }
+
+ fn output_filename(&mut self, path: &Path) {
+ self.cmd.arg("-o").arg(path);
+ }
+
+ fn add_object(&mut self, path: &Path) {
+ self.cmd.arg(path);
+ }
+
+ fn position_independent_executable(&mut self) {
+ }
+
+ fn partial_relro(&mut self) {
+ }
+
+ fn full_relro(&mut self) {
+ }
+
+ fn build_static_executable(&mut self) {
+ }
+
+ fn args(&mut self, args: &[String]) {
+ self.cmd.args(args);
+ }
+
+ fn link_rust_dylib(&mut self, lib: &str, _path: &Path) {
+ self.cmd.arg("-l").arg(lib);
+ }
+
+ fn link_framework(&mut self, _framework: &str) {
+ panic!("frameworks not supported")
+ }
+
+ fn link_whole_staticlib(&mut self, lib: &str, _search_path: &[PathBuf]) {
+ self.cmd.arg("-l").arg(lib);
+ }
+
+ fn link_whole_rlib(&mut self, lib: &Path) {
+ self.cmd.arg(lib);
+ }
+
+ fn gc_sections(&mut self, _keep_metadata: bool) {
+ }
+
+ fn optimize(&mut self) {
+ }
+
+ fn debuginfo(&mut self) {
+ }
+
+ fn no_default_libraries(&mut self) {
+ }
+
+ fn build_dylib(&mut self, _out_filename: &Path) {
+ }
+
+ fn export_symbols(&mut self, _tmpdir: &Path, _crate_type: CrateType) {
+ }
+
+ fn subsystem(&mut self, _subsystem: &str) {
+ }
+
+ fn no_position_independent_executable(&mut self) {
+ }
+
+ fn finalize(&mut self) -> Command {
+ self.cmd.arg("--threads");
+
+ // FIXME we probably shouldn't pass this but instead pass an explicit
+ // whitelist of symbols we'll allow to be undefined. Unfortunately
+ // though we can't handle symbols like `log10` that LLVM injects at a
+ // super late date without actually parsing object files. For now let's
+ // stick to this and hopefully fix it before stabilization happens.
+ self.cmd.arg("--allow-undefined");
+
+ // For now we just never have an entry symbol
+ self.cmd.arg("--no-entry");
+
+ let mut cmd = Command::new("");
+ ::std::mem::swap(&mut cmd, &mut self.cmd);
+ cmd
+ }
+}
Lto::No => panic!("didn't request LTO but we're doing LTO"),
};
- let symbol_filter = &|&(ref name, _, level): &(String, _, SymbolExportLevel)| {
+ let symbol_filter = &|&(ref name, level): &(String, SymbolExportLevel)| {
if level.is_below_threshold(export_threshold) {
let mut bytes = Vec::with_capacity(name.len() + 1);
bytes.extend(name.bytes());
use rustc_data_structures::sync::Lrc;
use std::sync::Arc;
-use base;
use monomorphize::Instance;
+use rustc::hir;
use rustc::hir::def_id::CrateNum;
use rustc::hir::def_id::{DefId, LOCAL_CRATE};
-use rustc::middle::exported_symbols::SymbolExportLevel;
+use rustc::middle::exported_symbols::{SymbolExportLevel, ExportedSymbol, metadata_symbol_name};
use rustc::session::config;
-use rustc::ty::TyCtxt;
+use rustc::ty::{TyCtxt, SymbolName};
use rustc::ty::maps::Providers;
-use rustc::util::nodemap::FxHashMap;
+use rustc::util::nodemap::{FxHashMap, DefIdSet};
use rustc_allocator::ALLOCATOR_METHODS;
-use rustc_back::LinkerFlavor;
use syntax::attr;
pub type ExportedSymbols = FxHashMap<
CrateNum,
- Arc<Vec<(String, Option<DefId>, SymbolExportLevel)>>,
+ Arc<Vec<(String, SymbolExportLevel)>>,
>;
pub fn threshold(tcx: TyCtxt) -> SymbolExportLevel {
crates_export_threshold(&tcx.sess.crate_types.borrow())
}
-pub fn metadata_symbol_name(tcx: TyCtxt) -> String {
- format!("rust_metadata_{}_{}",
- tcx.crate_name(LOCAL_CRATE),
- tcx.crate_disambiguator(LOCAL_CRATE).to_fingerprint().to_hex())
-}
-
fn crate_export_threshold(crate_type: config::CrateType) -> SymbolExportLevel {
match crate_type {
config::CrateTypeExecutable |
}
}
-pub fn provide(providers: &mut Providers) {
- providers.exported_symbol_ids = |tcx, cnum| {
- let export_threshold = threshold(tcx);
- Lrc::new(tcx.exported_symbols(cnum)
- .iter()
- .filter_map(|&(_, id, level)| {
- id.and_then(|id| {
- if level.is_below_threshold(export_threshold) {
- Some(id)
+fn reachable_non_generics_provider<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ cnum: CrateNum)
+ -> Lrc<DefIdSet>
+{
+ assert_eq!(cnum, LOCAL_CRATE);
+
+ if !tcx.sess.opts.output_types.should_trans() {
+ return Lrc::new(DefIdSet())
+ }
+
+ let export_threshold = threshold(tcx);
+
+ // We already collect all potentially reachable non-generic items for
+ // `exported_symbols`. Now we just filter them down to what is actually
+ // exported for the given crate we are compiling.
+ let reachable_non_generics = tcx
+ .exported_symbols(LOCAL_CRATE)
+ .iter()
+ .filter_map(|&(exported_symbol, level)| {
+ if let ExportedSymbol::NonGeneric(def_id) = exported_symbol {
+ if level.is_below_threshold(export_threshold) {
+ return Some(def_id)
+ }
+ }
+
+ None
+ })
+ .collect();
+
+ Lrc::new(reachable_non_generics)
+}
+
+fn is_reachable_non_generic_provider<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ def_id: DefId)
+ -> bool {
+ tcx.reachable_non_generics(def_id.krate).contains(&def_id)
+}
+
+fn exported_symbols_provider_local<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ cnum: CrateNum)
+ -> Arc<Vec<(ExportedSymbol,
+ SymbolExportLevel)>>
+{
+ assert_eq!(cnum, LOCAL_CRATE);
+
+ if !tcx.sess.opts.output_types.should_trans() {
+ return Arc::new(vec![])
+ }
+
+ // Check to see if this crate is a "special runtime crate". These
+ // crates, implementation details of the standard library, typically
+ // have a bunch of `pub extern` and `#[no_mangle]` functions as the
+ // ABI between them. We don't want their symbols to have a `C`
+ // export level, however, as they're just implementation details.
+ // Down below we'll hardwire all of the symbols to the `Rust` export
+ // level instead.
+ let special_runtime_crate = tcx.is_panic_runtime(LOCAL_CRATE) ||
+ tcx.is_compiler_builtins(LOCAL_CRATE);
+
+ let reachable_non_generics: DefIdSet = tcx.reachable_set(LOCAL_CRATE).0
+ .iter()
+ .filter_map(|&node_id| {
+ // We want to ignore some FFI functions that are not exposed from
+ // this crate. Reachable FFI functions can be lumped into two
+ // categories:
+ //
+ // 1. Those that are included statically via a static library
+ // 2. Those included otherwise (e.g. dynamically or via a framework)
+ //
+ // Although our LLVM module is not literally emitting code for the
+ // statically included symbols, it's an export of our library which
+ // needs to be passed on to the linker and encoded in the metadata.
+ //
+ // As a result, if this id is an FFI item (foreign item) then we only
+ // let it through if it's included statically.
+ match tcx.hir.get(node_id) {
+ hir::map::NodeForeignItem(..) => {
+ let def_id = tcx.hir.local_def_id(node_id);
+ if tcx.is_statically_included_foreign_item(def_id) {
+ Some(def_id)
} else {
None
}
- })
- })
- .collect())
- };
-
- providers.is_exported_symbol = |tcx, id| {
- tcx.exported_symbol_ids(id.krate).contains(&id)
- };
-
- providers.exported_symbols = |tcx, cnum| {
- assert_eq!(cnum, LOCAL_CRATE);
- let local_exported_symbols = base::find_exported_symbols(tcx);
-
- let mut local_crate: Vec<_> = local_exported_symbols
- .iter()
- .map(|&node_id| {
- tcx.hir.local_def_id(node_id)
- })
- .map(|def_id| {
- let name = tcx.symbol_name(Instance::mono(tcx, def_id));
- let export_level = export_level(tcx, def_id);
- debug!("EXPORTED SYMBOL (local): {} ({:?})", name, export_level);
- (str::to_owned(&name), Some(def_id), export_level)
- })
- .collect();
-
- if let Some(_) = *tcx.sess.entry_fn.borrow() {
- local_crate.push(("main".to_string(),
- None,
- SymbolExportLevel::C));
- }
+ }
+
+ // Only consider nodes that actually have exported symbols.
+ hir::map::NodeItem(&hir::Item {
+ node: hir::ItemStatic(..),
+ ..
+ }) |
+ hir::map::NodeItem(&hir::Item {
+ node: hir::ItemFn(..), ..
+ }) |
+ hir::map::NodeImplItem(&hir::ImplItem {
+ node: hir::ImplItemKind::Method(..),
+ ..
+ }) => {
+ let def_id = tcx.hir.local_def_id(node_id);
+ let generics = tcx.generics_of(def_id);
+ if (generics.parent_types == 0 && generics.types.is_empty()) &&
+ // Functions marked with #[inline] are only ever translated
+ // with "internal" linkage and are never exported.
+ !Instance::mono(tcx, def_id).def.requires_local(tcx) {
+ Some(def_id)
+ } else {
+ None
+ }
+ }
- if tcx.sess.allocator_kind.get().is_some() {
- for method in ALLOCATOR_METHODS {
- local_crate.push((format!("__rust_{}", method.name),
- None,
- SymbolExportLevel::Rust));
+ _ => None
}
- }
+ })
+ .collect();
- if let Some(id) = tcx.sess.derive_registrar_fn.get() {
- let def_id = tcx.hir.local_def_id(id);
- let disambiguator = tcx.sess.local_crate_disambiguator();
- let registrar = tcx.sess.generate_derive_registrar_symbol(disambiguator);
- local_crate.push((registrar, Some(def_id), SymbolExportLevel::C));
- }
+ let mut symbols: Vec<_> = reachable_non_generics
+ .iter()
+ .map(|&def_id| {
+ let export_level = if special_runtime_crate {
+ let name = tcx.symbol_name(Instance::mono(tcx, def_id));
+ // We can probably do better here by just ensuring that
+ // it has hidden visibility rather than public
+ // visibility, as this is primarily here to ensure it's
+ // not stripped during LTO.
+ //
+ // In general though we won't link right if these
+ // symbols are stripped, and LTO currently strips them.
+ if &*name == "rust_eh_personality" ||
+ &*name == "rust_eh_register_frames" ||
+ &*name == "rust_eh_unregister_frames" {
+ SymbolExportLevel::C
+ } else {
+ SymbolExportLevel::Rust
+ }
+ } else {
+ tcx.symbol_export_level(def_id)
+ };
+ debug!("EXPORTED SYMBOL (local): {} ({:?})",
+ tcx.symbol_name(Instance::mono(tcx, def_id)),
+ export_level);
+ (ExportedSymbol::NonGeneric(def_id), export_level)
+ })
+ .collect();
- if tcx.sess.crate_types.borrow().contains(&config::CrateTypeDylib) {
- local_crate.push((metadata_symbol_name(tcx),
- None,
- SymbolExportLevel::Rust));
+ if let Some(id) = tcx.sess.derive_registrar_fn.get() {
+ let def_id = tcx.hir.local_def_id(id);
+ symbols.push((ExportedSymbol::NonGeneric(def_id), SymbolExportLevel::C));
+ }
+
+ if let Some(id) = tcx.sess.plugin_registrar_fn.get() {
+ let def_id = tcx.hir.local_def_id(id);
+ symbols.push((ExportedSymbol::NonGeneric(def_id), SymbolExportLevel::C));
+ }
+
+ if let Some(_) = *tcx.sess.entry_fn.borrow() {
+ let symbol_name = "main".to_string();
+ let exported_symbol = ExportedSymbol::NoDefId(SymbolName::new(&symbol_name));
+
+ symbols.push((exported_symbol, SymbolExportLevel::C));
+ }
+
+ if tcx.sess.allocator_kind.get().is_some() {
+ for method in ALLOCATOR_METHODS {
+ let symbol_name = format!("__rust_{}", method.name);
+ let exported_symbol = ExportedSymbol::NoDefId(SymbolName::new(&symbol_name));
+
+ symbols.push((exported_symbol, SymbolExportLevel::Rust));
}
+ }
- // Sort so we get a stable incr. comp. hash.
- local_crate.sort_unstable_by(|&(ref name1, ..), &(ref name2, ..)| {
- name1.cmp(name2)
- });
+ if tcx.sess.crate_types.borrow().contains(&config::CrateTypeDylib) {
+ let symbol_name = metadata_symbol_name(tcx);
+ let exported_symbol = ExportedSymbol::NoDefId(SymbolName::new(&symbol_name));
- Arc::new(local_crate)
- };
+ symbols.push((exported_symbol, SymbolExportLevel::Rust));
+ }
+
+ // Sort so we get a stable incr. comp. hash.
+ symbols.sort_unstable_by(|&(ref symbol1, ..), &(ref symbol2, ..)| {
+ symbol1.compare_stable(tcx, symbol2)
+ });
- providers.symbol_export_level = export_level;
+ Arc::new(symbols)
}
-pub fn provide_extern(providers: &mut Providers) {
- providers.exported_symbols = |tcx, cnum| {
- // If this crate is a plugin and/or a custom derive crate, then
- // we're not even going to link those in so we skip those crates.
- if tcx.plugin_registrar_fn(cnum).is_some() ||
- tcx.derive_registrar_fn(cnum).is_some() {
- return Arc::new(Vec::new())
- }
+pub fn provide(providers: &mut Providers) {
+ providers.reachable_non_generics = reachable_non_generics_provider;
+ providers.is_reachable_non_generic = is_reachable_non_generic_provider;
+ providers.exported_symbols = exported_symbols_provider_local;
+ providers.symbol_export_level = symbol_export_level_provider;
+}
- // Check to see if this crate is a "special runtime crate". These
- // crates, implementation details of the standard library, typically
- // have a bunch of `pub extern` and `#[no_mangle]` functions as the
- // ABI between them. We don't want their symbols to have a `C`
- // export level, however, as they're just implementation details.
- // Down below we'll hardwire all of the symbols to the `Rust` export
- // level instead.
- let special_runtime_crate =
- tcx.is_panic_runtime(cnum) || tcx.is_compiler_builtins(cnum);
-
- // Dealing with compiler-builtins and wasm right now is super janky.
- // There's no linker! As a result we need all of the compiler-builtins
- // exported symbols to make their way through all the way to the end of
- // compilation. We want to make sure that LLVM doesn't remove them as
- // well because we may or may not need them in the final output
- // artifact. For now just force them to always get exported at the C
- // layer, and we'll worry about gc'ing them later.
- let compiler_builtins_and_binaryen =
- tcx.is_compiler_builtins(cnum) &&
- tcx.sess.linker_flavor() == LinkerFlavor::Binaryen;
-
- let mut crate_exports: Vec<_> = tcx
- .exported_symbol_ids(cnum)
- .iter()
- .map(|&def_id| {
- let name = tcx.symbol_name(Instance::mono(tcx, def_id));
- let export_level = if compiler_builtins_and_binaryen &&
- tcx.contains_extern_indicator(def_id) {
- SymbolExportLevel::C
- } else if special_runtime_crate {
- // We can probably do better here by just ensuring that
- // it has hidden visibility rather than public
- // visibility, as this is primarily here to ensure it's
- // not stripped during LTO.
- //
- // In general though we won't link right if these
- // symbols are stripped, and LTO currently strips them.
- if &*name == "rust_eh_personality" ||
- &*name == "rust_eh_register_frames" ||
- &*name == "rust_eh_unregister_frames" {
- SymbolExportLevel::C
- } else {
- SymbolExportLevel::Rust
- }
- } else {
- export_level(tcx, def_id)
- };
- debug!("EXPORTED SYMBOL (re-export): {} ({:?})", name, export_level);
- (str::to_owned(&name), Some(def_id), export_level)
- })
- .collect();
-
- // Sort so we get a stable incr. comp. hash.
- crate_exports.sort_unstable_by(|&(ref name1, ..), &(ref name2, ..)| {
- name1.cmp(name2)
- });
-
- Arc::new(crate_exports)
- };
- providers.symbol_export_level = export_level;
+pub fn provide_extern(providers: &mut Providers) {
+ providers.is_reachable_non_generic = is_reachable_non_generic_provider;
+ providers.symbol_export_level = symbol_export_level_provider;
}
-fn export_level(tcx: TyCtxt, sym_def_id: DefId) -> SymbolExportLevel {
+fn symbol_export_level_provider(tcx: TyCtxt, sym_def_id: DefId) -> SymbolExportLevel {
// We export anything that's not mangled at the "C" layer as it probably has
// to do with ABI concerns. We do not, however, apply such treatment to
// special symbols in the standard library for various plumbing between
AllPasses, Sanitizer, Lto};
use rustc::session::Session;
use rustc::util::nodemap::FxHashMap;
-use rustc_back::LinkerFlavor;
use time_graph::{self, TimeGraph, Timeline};
use llvm;
use llvm::{ModuleRef, TargetMachineRef, PassManagerRef, DiagnosticInfoRef};
pub tm_factory: Arc<Fn() -> Result<TargetMachineRef, String> + Send + Sync>,
pub msvc_imps_needed: bool,
pub target_pointer_width: String,
- binaryen_linker: bool,
debuginfo: config::DebugInfoLevel,
- wasm_import_memory: bool,
// Number of cgus excluding the allocator/metadata modules
pub total_cgus: usize,
f(cpm)
}
- // If we're going to generate wasm code from the assembly that llvm
- // generates then we'll be transitively affecting a ton of options below.
- // This only happens on the wasm target now.
- let asm2wasm = cgcx.binaryen_linker &&
- !cgcx.crate_types.contains(&config::CrateTypeRlib) &&
- mtrans.kind == ModuleKind::Regular;
-
// If we don't have the integrated assembler, then we need to emit asm
// from LLVM and use `gcc` to create the object file.
let asm_to_obj = config.emit_obj && config.no_integrated_as;
// just llvm bitcode. In that case write bitcode, and possibly
// delete the bitcode if it wasn't requested. Don't generate the
// machine code, instead copy the .o file from the .bc
- let write_bc = config.emit_bc || (config.obj_is_bitcode && !asm2wasm);
- let rm_bc = !config.emit_bc && config.obj_is_bitcode && !asm2wasm;
- let write_obj = config.emit_obj && !config.obj_is_bitcode && !asm2wasm && !asm_to_obj;
- let copy_bc_to_obj = config.emit_obj && config.obj_is_bitcode && !asm2wasm;
+ let write_bc = config.emit_bc || config.obj_is_bitcode;
+ let rm_bc = !config.emit_bc && config.obj_is_bitcode;
+ let write_obj = config.emit_obj && !config.obj_is_bitcode && !asm_to_obj;
+ let copy_bc_to_obj = config.emit_obj && config.obj_is_bitcode;
let bc_out = cgcx.output_filenames.temp_path(OutputType::Bitcode, module_name);
let obj_out = cgcx.output_filenames.temp_path(OutputType::Object, module_name);
timeline.record("ir");
}
- if config.emit_asm || (asm2wasm && config.emit_obj) || asm_to_obj {
+ if config.emit_asm || asm_to_obj {
let path = cgcx.output_filenames.temp_path(OutputType::Assembly, module_name);
// We can't use the same module for asm and binary output, because that triggers
// various errors like invalid IR or broken binaries, so we might have to clone the
// module to produce the asm output
- let llmod = if config.emit_obj && !asm2wasm {
+ let llmod = if config.emit_obj {
llvm::LLVMCloneModule(llmod)
} else {
llmod
write_output_file(diag_handler, tm, cpm, llmod, &path,
llvm::FileType::AssemblyFile)
})?;
- if config.emit_obj && !asm2wasm {
+ if config.emit_obj {
llvm::LLVMDisposeModule(llmod);
}
timeline.record("asm");
}
- if asm2wasm && config.emit_obj {
- let assembly = cgcx.output_filenames.temp_path(OutputType::Assembly, module_name);
- let suffix = ".wasm.map"; // FIXME use target suffix
- let map = cgcx.output_filenames.path(OutputType::Exe)
- .with_extension(&suffix[1..]);
- binaryen_assemble(cgcx, diag_handler, &assembly, &obj_out, &map);
- timeline.record("binaryen");
-
- if !config.emit_asm {
- drop(fs::remove_file(&assembly));
- }
- } else if write_obj {
+ if write_obj {
with_codegen(tm, llmod, config.no_builtins, |cpm| {
write_output_file(diag_handler, tm, cpm, llmod, &obj_out,
llvm::FileType::ObjectFile)
&cgcx.output_filenames))
}
-/// Translates the LLVM-generated `assembly` on the filesystem into a wasm
-/// module using binaryen, placing the output at `object`.
-///
-/// In this case the "object" is actually a full and complete wasm module. We
-/// won't actually be doing anything else to the output for now. This is all
-/// pretty janky and will get removed as soon as a linker for wasm exists.
-fn binaryen_assemble(cgcx: &CodegenContext,
- handler: &Handler,
- assembly: &Path,
- object: &Path,
- map: &Path) {
- use rustc_binaryen::{Module, ModuleOptions};
-
- let input = fs::read(&assembly).and_then(|contents| {
- Ok(CString::new(contents)?)
- });
- let mut options = ModuleOptions::new();
- if cgcx.debuginfo != config::NoDebugInfo {
- options.debuginfo(true);
- let map_file_name = map.file_name().unwrap();
- options.source_map_url(map_file_name.to_str().unwrap());
- }
-
- options.stack(1024 * 1024);
- options.import_memory(cgcx.wasm_import_memory);
- let assembled = input.and_then(|input| {
- Module::new(&input, &options)
- .map_err(|e| io::Error::new(io::ErrorKind::Other, e))
- });
- let err = assembled.and_then(|binary| {
- fs::write(&object, binary.data()).and_then(|()| {
- if cgcx.debuginfo != config::NoDebugInfo {
- fs::write(map, binary.source_map())
- } else {
- Ok(())
- }
- })
- });
- if let Err(e) = err {
- handler.err(&format!("failed to run binaryen assembler: {}", e));
- }
-}
-
pub(crate) struct CompiledModules {
pub modules: Vec<CompiledModule>,
pub metadata_module: CompiledModule,
let coordinator_send = tcx.tx_to_llvm_workers.clone();
let sess = tcx.sess;
- let exported_symbols = match sess.lto() {
- Lto::No => None,
- Lto::ThinLocal => {
- let mut exported_symbols = FxHashMap();
- exported_symbols.insert(LOCAL_CRATE, tcx.exported_symbols(LOCAL_CRATE));
- Some(Arc::new(exported_symbols))
- }
- Lto::Yes | Lto::Fat | Lto::Thin => {
- let mut exported_symbols = FxHashMap();
- exported_symbols.insert(LOCAL_CRATE, tcx.exported_symbols(LOCAL_CRATE));
- for &cnum in tcx.crates().iter() {
- exported_symbols.insert(cnum, tcx.exported_symbols(cnum));
+ // Compute the set of symbols we need to retain when doing LTO (if we need to)
+ let exported_symbols = {
+ let mut exported_symbols = FxHashMap();
+
+ let copy_symbols = |cnum| {
+ let symbols = tcx.exported_symbols(cnum)
+ .iter()
+ .map(|&(s, lvl)| (s.symbol_name(tcx).to_string(), lvl))
+ .collect();
+ Arc::new(symbols)
+ };
+
+ match sess.lto() {
+ Lto::No => None,
+ Lto::ThinLocal => {
+ exported_symbols.insert(LOCAL_CRATE, copy_symbols(LOCAL_CRATE));
+ Some(Arc::new(exported_symbols))
+ }
+ Lto::Yes | Lto::Fat | Lto::Thin => {
+ exported_symbols.insert(LOCAL_CRATE, copy_symbols(LOCAL_CRATE));
+ for &cnum in tcx.crates().iter() {
+ exported_symbols.insert(cnum, copy_symbols(cnum));
+ }
+ Some(Arc::new(exported_symbols))
}
- Some(Arc::new(exported_symbols))
}
};
each_linked_rlib_for_lto.push((cnum, path.to_path_buf()));
}));
- let wasm_import_memory =
- attr::contains_name(&tcx.hir.krate().attrs, "wasm_import_memory");
-
let assembler_cmd = if modules_config.no_integrated_as {
// HACK: currently we use linker (gcc) as our assembler
- let (name, mut cmd, _) = get_linker(sess);
+ let (name, mut cmd) = get_linker(sess);
cmd.args(&sess.target.target.options.asm_args);
Some(Arc::new(AssemblerCommand {
name,
total_cgus,
msvc_imps_needed: msvc_imps_needed(tcx),
target_pointer_width: tcx.sess.target.target.target_pointer_width.clone(),
- binaryen_linker: tcx.sess.linker_flavor() == LinkerFlavor::Binaryen,
debuginfo: tcx.sess.opts.debuginfo,
- wasm_import_memory,
assembler_cmd,
};
use abi;
use back::link;
-use back::symbol_export;
use back::write::{self, OngoingCrateTranslation, create_target_machine};
use llvm::{ContextRef, ModuleRef, ValueRef, Vector, get_param};
use llvm;
use rustc::dep_graph::{DepNode, DepConstructor};
use rustc::ty::subst::Kind;
use rustc::middle::cstore::{self, LinkMeta, LinkagePreference};
+use rustc::middle::exported_symbols;
use rustc::util::common::{time, print_time_passes_entry};
use rustc::session::config::{self, NoDebugInfo};
use rustc::session::Session;
use trans_item::{MonoItem, BaseMonoItemExt, MonoItemExt, DefPathBasedNames};
use type_::Type;
use type_of::LayoutLlvmExt;
-use rustc::util::nodemap::{NodeSet, FxHashMap, FxHashSet, DefIdSet};
+use rustc::util::nodemap::{FxHashMap, FxHashSet, DefIdSet};
use CrateInfo;
use std::any::Any;
use mir::operand::OperandValue;
-pub use rustc_trans_utils::{find_exported_symbols, check_for_rustc_errors_attr};
+pub use rustc_trans_utils::check_for_rustc_errors_attr;
pub use rustc_mir::monomorphize::item::linkage_by_name;
pub struct StatRecorder<'a, 'tcx: 'a> {
fn write_metadata<'a, 'gcx>(tcx: TyCtxt<'a, 'gcx, 'gcx>,
llmod_id: &str,
- link_meta: &LinkMeta,
- exported_symbols: &NodeSet)
+ link_meta: &LinkMeta)
-> (ContextRef, ModuleRef, EncodedMetadata) {
use std::io::Write;
use flate2::Compression;
EncodedMetadata::new());
}
- let metadata = tcx.encode_metadata(link_meta, exported_symbols);
+ let metadata = tcx.encode_metadata(link_meta);
if kind == MetadataKind::Uncompressed {
return (metadata_llcx, metadata_llmod, metadata);
}
let llmeta = C_bytes_in_context(metadata_llcx, &compressed);
let llconst = C_struct_in_context(metadata_llcx, &[llmeta], false);
- let name = symbol_export::metadata_symbol_name(tcx);
+ let name = exported_symbols::metadata_symbol_name(tcx);
let buf = CString::new(name).unwrap();
let llglobal = unsafe {
llvm::LLVMAddGlobal(metadata_llmod, val_ty(llconst).to_ref(), buf.as_ptr())
let crate_hash = tcx.crate_hash(LOCAL_CRATE);
let link_meta = link::build_link_meta(crate_hash);
- let exported_symbol_node_ids = find_exported_symbols(tcx);
// Translate the metadata.
let llmod_id = "metadata";
let (metadata_llcx, metadata_llmod, metadata) =
time(tcx.sess.time_passes(), "write metadata", || {
- write_metadata(tcx, llmod_id, &link_meta, &exported_symbol_node_ids)
+ write_metadata(tcx, llmod_id, &link_meta)
});
let metadata_module = ModuleTranslation {
if cx.tcx.is_translated_item(instance_def_id) {
if instance_def_id.is_local() {
- if !cx.tcx.is_exported_symbol(instance_def_id) {
+ if !cx.tcx.is_reachable_non_generic(instance_def_id) {
llvm::LLVMRustSetVisibility(llfn, llvm::Visibility::Hidden);
}
} else {
}
pub fn type_is_sized<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, ty: Ty<'tcx>) -> bool {
- ty.is_sized(tcx, ty::ParamEnv::empty(traits::Reveal::All), DUMMY_SP)
+ ty.is_sized(tcx.at(DUMMY_SP), ty::ParamEnv::empty(traits::Reveal::All))
}
pub fn type_is_freeze<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, ty: Ty<'tcx>) -> bool {
let g = declare::define_global(cx, &sym[..], llty).unwrap();
- if !cx.tcx.is_exported_symbol(def_id) {
+ if !cx.tcx.is_reachable_non_generic(def_id) {
unsafe {
llvm::LLVMRustSetVisibility(g, llvm::Visibility::Hidden);
}
pub fn type_has_metadata(&self, ty: Ty<'tcx>) -> bool {
use syntax_pos::DUMMY_SP;
- if ty.is_sized(self.tcx, ty::ParamEnv::empty(traits::Reveal::All), DUMMY_SP) {
+ if ty.is_sized(self.tcx.at(DUMMY_SP), ty::ParamEnv::empty(traits::Reveal::All)) {
return false;
}
ifn!("llvm.bswap.i64", fn(t_i64) -> t_i64);
ifn!("llvm.bswap.i128", fn(t_i128) -> t_i128);
+ ifn!("llvm.bitreverse.i8", fn(t_i8) -> t_i8);
+ ifn!("llvm.bitreverse.i16", fn(t_i16) -> t_i16);
+ ifn!("llvm.bitreverse.i32", fn(t_i32) -> t_i32);
+ ifn!("llvm.bitreverse.i64", fn(t_i64) -> t_i64);
+ ifn!("llvm.bitreverse.i128", fn(t_i128) -> t_i128);
+
ifn!("llvm.sadd.with.overflow.i8", fn(t_i8, t_i8) -> mk_struct!{t_i8, i1});
ifn!("llvm.sadd.with.overflow.i16", fn(t_i16, t_i16) -> mk_struct!{t_i16, i1});
ifn!("llvm.sadd.with.overflow.i32", fn(t_i32, t_i32) -> mk_struct!{t_i32, i1});
// visible). It might better to use the `exported_items` set from
// `driver::CrateAnalysis` in the future, but (atm) this set is not
// available in the translation pass.
- !cx.tcx.is_exported_symbol(def_id)
+ !cx.tcx.is_reachable_non_generic(def_id)
}
#[allow(non_snake_case)]
], None)
},
"ctlz" | "ctlz_nonzero" | "cttz" | "cttz_nonzero" | "ctpop" | "bswap" |
- "add_with_overflow" | "sub_with_overflow" | "mul_with_overflow" |
- "overflowing_add" | "overflowing_sub" | "overflowing_mul" |
+ "bitreverse" | "add_with_overflow" | "sub_with_overflow" |
+ "mul_with_overflow" | "overflowing_add" | "overflowing_sub" | "overflowing_mul" |
"unchecked_div" | "unchecked_rem" | "unchecked_shl" | "unchecked_shr" => {
let ty = arg_tys[0];
match int_type_width_signed(ty, cx) {
&[args[0].immediate()], None)
}
}
+ "bitreverse" => {
+ bx.call(cx.get_intrinsic(&format!("llvm.bitreverse.i{}", width)),
+ &[args[0].immediate()], None)
+ }
"add_with_overflow" | "sub_with_overflow" | "mul_with_overflow" => {
let intrinsic = format!("llvm.{}{}.with.overflow.i{}",
if signed { 's' } else { 'u' },
extern crate rustc_allocator;
extern crate rustc_apfloat;
extern crate rustc_back;
-extern crate rustc_binaryen;
extern crate rustc_const_math;
extern crate rustc_data_structures;
extern crate rustc_demangle;
extern crate syntax_pos;
extern crate rustc_errors as errors;
extern crate serialize;
-#[cfg(windows)]
extern crate cc; // Used to locate MSVC
extern crate tempdir;
pub extern crate rustc as __rustc;
-use rustc::ty::{TyCtxt, Instance};
-use rustc::hir;
-use rustc::hir::def_id::LOCAL_CRATE;
-use rustc::hir::map as hir_map;
-use rustc::util::nodemap::NodeSet;
+use rustc::ty::TyCtxt;
pub mod diagnostics;
pub mod link;
}
}
-/// The context provided lists a set of reachable ids as calculated by
-/// middle::reachable, but this contains far more ids and symbols than we're
-/// actually exposing from the object file. This function will filter the set in
-/// the context to the set of ids which correspond to symbols that are exposed
-/// from the object file being generated.
-///
-/// This list is later used by linkers to determine the set of symbols needed to
-/// be exposed from a dynamic library and it's also encoded into the metadata.
-pub fn find_exported_symbols<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> NodeSet {
- tcx.reachable_set(LOCAL_CRATE).0.iter().cloned().filter(|&id| {
- // Next, we want to ignore some FFI functions that are not exposed from
- // this crate. Reachable FFI functions can be lumped into two
- // categories:
- //
- // 1. Those that are included statically via a static library
- // 2. Those included otherwise (e.g. dynamically or via a framework)
- //
- // Although our LLVM module is not literally emitting code for the
- // statically included symbols, it's an export of our library which
- // needs to be passed on to the linker and encoded in the metadata.
- //
- // As a result, if this id is an FFI item (foreign item) then we only
- // let it through if it's included statically.
- match tcx.hir.get(id) {
- hir_map::NodeForeignItem(..) => {
- let def_id = tcx.hir.local_def_id(id);
- tcx.is_statically_included_foreign_item(def_id)
- }
-
- // Only consider nodes that actually have exported symbols.
- hir_map::NodeItem(&hir::Item {
- node: hir::ItemStatic(..), .. }) |
- hir_map::NodeItem(&hir::Item {
- node: hir::ItemFn(..), .. }) |
- hir_map::NodeImplItem(&hir::ImplItem {
- node: hir::ImplItemKind::Method(..), .. }) => {
- let def_id = tcx.hir.local_def_id(id);
- let generics = tcx.generics_of(def_id);
- (generics.parent_types == 0 && generics.types.is_empty()) &&
- // Functions marked with #[inline] are only ever translated
- // with "internal" linkage and are never exported.
- !Instance::mono(tcx, def_id).def.requires_local(tcx)
- }
-
- _ => false
- }
- }).collect()
-}
-
__build_diagnostic_array! { librustc_trans_utils, DIAGNOSTICS }
// if the `rustc_attrs` feature is not enabled, then the
// attributes we are interested in cannot be present anyway, so
// skip the walk.
- if !tcx.sess.features.borrow().rustc_attrs {
+ if !tcx.features().rustc_attrs {
return;
}
tcx.sess.abort_if_errors();
let link_meta = build_link_meta(tcx.crate_hash(LOCAL_CRATE));
- let exported_symbols = ::find_exported_symbols(tcx);
- let metadata = tcx.encode_metadata(&link_meta, &exported_symbols);
+ let metadata = tcx.encode_metadata(&link_meta);
box OngoingCrateTranslation {
metadata: metadata,
let trait_def = self.tcx().trait_def(trait_def_id);
- if !self.tcx().sess.features.borrow().unboxed_closures &&
+ if !self.tcx().features().unboxed_closures &&
trait_segment.with_parameters(|p| p.parenthesized) != trait_def.paren_sugar {
// For now, require that parenthetical notation be used only with `Fn()` etc.
let msg = if trait_def.paren_sugar {
}
};
if pat_adjustments.len() > 0 {
- if tcx.sess.features.borrow().match_default_bindings {
+ if tcx.features().match_default_bindings {
debug!("default binding mode is now {:?}", def_bm);
self.inh.tables.borrow_mut()
.pat_adjustments_mut()
let opt_trait_ref = match obligation.predicate {
ty::Predicate::Projection(ref data) => Some(data.to_poly_trait_ref(self.tcx)),
ty::Predicate::Trait(ref data) => Some(data.to_poly_trait_ref()),
- ty::Predicate::Equate(..) => None,
ty::Predicate::Subtype(..) => None,
ty::Predicate::RegionOutlives(..) => None,
ty::Predicate::TypeOutlives(..) => None,
}
}
- if has_unsized_tuple_coercion && !self.tcx.sess.features.borrow().unsized_tuple_coercion {
+ if has_unsized_tuple_coercion && !self.tcx.features().unsized_tuple_coercion {
feature_gate::emit_feature_err(&self.tcx.sess.parse_sess,
"unsized_tuple_coercion",
self.cause.span,
"volatile_store" =>
(1, vec![ tcx.mk_mut_ptr(param(0)), param(0) ], tcx.mk_nil()),
- "ctpop" | "ctlz" | "ctlz_nonzero" | "cttz" | "cttz_nonzero" | "bswap" =>
+ "ctpop" | "ctlz" | "ctlz_nonzero" | "cttz" | "cttz_nonzero" |
+ "bswap" | "bitreverse" =>
(1, vec![param(0)], param(0)),
"add_with_overflow" | "sub_with_overflow" | "mul_with_overflow" =>
// possible that there will be multiple applicable methods.
if !is_suggestion.0 {
if reached_raw_pointer
- && !self.tcx.sess.features.borrow().arbitrary_self_types {
+ && !self.tcx.features().arbitrary_self_types {
// this case used to be allowed by the compiler,
// so we do a future-compat lint here for the 2015 epoch
// (see https://github.com/rust-lang/rust/issues/46906)
_ => None,
}
}
- ty::Predicate::Equate(..) |
ty::Predicate::Subtype(..) |
ty::Predicate::Projection(..) |
ty::Predicate::RegionOutlives(..) |
}
fcx.demand_suptype(span, ret_ty, actual_return_ty);
- if fcx.tcx.sess.features.borrow().termination_trait {
+ if fcx.tcx.features().termination_trait {
// If the termination trait language item is activated, check that the main return type
// implements the termination trait.
if let Some(term_id) = fcx.tcx.lang_items().termination() {
let repr_type_ty = def.repr.discr_type().to_ty(tcx);
if repr_type_ty == tcx.types.i128 || repr_type_ty == tcx.types.u128 {
- if !tcx.sess.features.borrow().repr128 {
+ if !tcx.features().repr128 {
emit_feature_err(&tcx.sess.parse_sess,
"repr128",
sp,
let is_self_ty = |ty| fcx.infcx.can_eq(fcx.param_env, self_ty, ty).is_ok();
let self_kind = ExplicitSelf::determine(self_arg_ty, is_self_ty);
- if !fcx.tcx.sess.features.borrow().arbitrary_self_types {
+ if !fcx.tcx.features().arbitrary_self_types {
match self_kind {
ExplicitSelf::ByValue |
ExplicitSelf::ByReference(_, _) |
return;
}
- if tcx.sess.features.borrow().unboxed_closures {
+ if tcx.features().unboxed_closures {
// the feature gate allows all Fn traits
return;
}
};
let paren_sugar = tcx.has_attr(def_id, "rustc_paren_sugar");
- if paren_sugar && !tcx.sess.features.borrow().unboxed_closures {
+ if paren_sugar && !tcx.features().unboxed_closures {
let mut err = tcx.sess.struct_span_err(
item.span,
"the `#[rustc_paren_sugar]` attribute is a temporary means of controlling \
}
if !allow_defaults && p.default.is_some() {
- if !tcx.sess.features.borrow().default_type_parameter_fallback {
+ if !tcx.features().default_type_parameter_fallback {
tcx.lint_node(
lint::builtin::INVALID_TYPE_PARAM_DEFAULT,
p.id,
// feature gate SIMD types in FFI, since I (huonw) am not sure the
// ABIs are handled at all correctly.
if abi != abi::Abi::RustIntrinsic && abi != abi::Abi::PlatformIntrinsic
- && !tcx.sess.features.borrow().simd_ffi {
+ && !tcx.features().simd_ffi {
let check = |ast_ty: &hir::Ty, ty: Ty| {
if ty.is_simd() {
tcx.sess.struct_span_err(ast_ty.span,
let actual = tcx.fn_sig(main_def_id);
let expected_return_type = if tcx.lang_items().termination().is_some()
- && tcx.sess.features.borrow().termination_trait {
+ && tcx.features().termination_trait {
// we take the return type of the given main function, the real check is done
// in `check_fn`
actual.output().skip_binder()
--- /dev/null
+# The walking tour of rustdoc
+
+Rustdoc is implemented entirely within the crate `librustdoc`. After partially compiling a crate to
+get its AST (technically the HIR map) from rustc, librustdoc performs two major steps past that to
+render a set of documentation:
+
+* "Clean" the AST into a form that's more suited to creating documentation (and slightly more
+ resistant to churn in the compiler).
+* Use this cleaned AST to render a crate's documentation, one page at a time.
+
+Naturally, there's more than just this, and those descriptions simplify out lots of details, but
+that's the high-level overview.
+
+(Side note: this is a library crate! The `rustdoc` binary is crated using the project in
+`src/tools/rustdoc`. Note that literally all that does is call the `main()` that's in this crate's
+`lib.rs`, though.)
+
+## Cheat sheet
+
+* Use `x.py build --stage 1 src/libstd src/tools/rustdoc` to make a useable rustdoc you can run on
+ other projects.
+ * Add `src/libtest` to be able to use `rustdoc --test`.
+ * If you've used `rustup toolchain link local /path/to/build/$TARGET/stage1` previously, then
+ after the previous build command, `cargo +local doc` will Just Work.
+* Use `x.py doc --stage 1 src/libstd` to use this rustdoc to generate the standard library docs.
+ * The completed docs will be available in `build/$TARGET/doc/std`, though the bundle is meant to
+ be used as though you would copy out the `doc` folder to a web server, since that's where the
+ CSS/JS and landing page are.
+* Most of the HTML printing code is in `html/format.rs` and `html/render.rs`. It's in a bunch of
+ `fmt::Display` implementations and supplementary functions.
+* The types that got `Display` impls above are defined in `clean/mod.rs`, right next to the custom
+ `Clean` trait used to process them out of the rustc HIR.
+* The bits specific to using rustdoc as a test harness are in `test.rs`.
+* The Markdown renderer is loaded up in `html/markdown.rs`, including functions for extracting
+ doctests from a given block of Markdown.
+* The tests on rustdoc *output* are located in `src/test/rustdoc`, where they're handled by the test
+ runner of rustbuild and the supplementary script `src/etc/htmldocck.py`.
+* Tests on search index generation are located in `src/test/rustdoc-js`, as a series of JavaScript
+ files that encode queries on the standard library search index and expected results.
+
+## From crate to clean
+
+In `core.rs` are two central items: the `DocContext` struct, and the `run_core` function. The latter
+is where rustdoc calls out to rustc to compile a crate to the point where rustdoc can take over. The
+former is a state container used when crawling through a crate to gather its documentation.
+
+The main process of crate crawling is done in `clean/mod.rs` through several implementations of the
+`Clean` trait defined within. This is a conversion trait, which defines one method:
+
+```rust
+pub trait Clean<T> {
+ fn clean(&self, cx: &DocContext) -> T;
+}
+```
+
+`clean/mod.rs` also defines the types for the "cleaned" AST used later on to render documentation
+pages. Each usually accompanies an implementation of `Clean` that takes some AST or HIR type from
+rustc and converts it into the appropriate "cleaned" type. "Big" items like modules or associated
+items may have some extra processing in its `Clean` implementation, but for the most part these
+impls are straightforward conversions. The "entry point" to this module is the `impl Clean<Crate>
+for visit_ast::RustdocVisitor`, which is called by `run_core` above.
+
+You see, I actually lied a little earlier: There's another AST transformation that happens before
+the events in `clean/mod.rs`. In `visit_ast.rs` is the type `RustdocVisitor`, which *actually*
+crawls a `hir::Crate` to get the first intermediate representation, defined in `doctree.rs`. This
+pass is mainly to get a few intermediate wrappers around the HIR types and to process visibility
+and inlining. This is where `#[doc(inline)]`, `#[doc(no_inline)]`, and `#[doc(hidden)]` are
+processed, as well as the logic for whether a `pub use` should get the full page or a "Reexport"
+line in the module page.
+
+The other major thing that happens in `clean/mod.rs` is the collection of doc comments and
+`#[doc=""]` attributes into a separate field of the Attributes struct, present on anything that gets
+hand-written documentation. This makes it easier to collect this documentation later in the process.
+
+The primary output of this process is a clean::Crate with a tree of Items which describe the
+publicly-documentable items in the target crate.
+
+### Hot potato
+
+Before moving on to the next major step, a few important "passes" occur over the documentation.
+These do things like combine the separate "attributes" into a single string and strip leading
+whitespace to make the document easier on the markdown parser, or drop items that are not public or
+deliberately hidden with `#[doc(hidden)]`. These are all implemented in the `passes/` directory, one
+file per pass. By default, all of these passes are run on a crate, but the ones regarding dropping
+private/hidden items can be bypassed by passing `--document-private-items` to rustdoc.
+
+(Strictly speaking, you can fine-tune the passes run and even add your own, but [we're trying to
+deprecate that][44136]. If you need finer-grain control over these passes, please let us know!)
+
+[44136]: https://github.com/rust-lang/rust/issues/44136
+
+## From clean to crate
+
+This is where the "second phase" in rustdoc begins. This phase primarily lives in the `html/`
+folder, and it all starts with `run()` in `html/render.rs`. This code is responsible for setting up
+the `Context`, `SharedContext`, and `Cache` which are used during rendering, copying out the static
+files which live in every rendered set of documentation (things like the fonts, CSS, and JavaScript
+that live in `html/static/`), creating the search index, and printing out the source code rendering,
+before beginning the process of rendering all the documentation for the crate.
+
+Several functions implemented directly on `Context` take the `clean::Crate` and set up some state
+between rendering items or recursing on a module's child items. From here the "page rendering"
+begins, via an enormous `write!()` call in `html/layout.rs`. The parts that actually generate HTML
+from the items and documentation occurs within a series of `std::fmt::Display` implementations and
+functions that pass around a `&mut std::fmt::Formatter`. The top-level implementation that writes
+out the page body is the `impl<'a> fmt::Display for Item<'a>` in `html/render.rs`, which switches
+out to one of several `item_*` functions based on the kind of `Item` being rendered.
+
+Depending on what kind of rendering code you're looking for, you'll probably find it either in
+`html/render.rs` for major items like "what sections should I print for a struct page" or
+`html/format.rs` for smaller component pieces like "how should I print a where clause as part of
+some other item".
+
+Whenever rustdoc comes across an item that should print hand-written documentation alongside, it
+calls out to `html/markdown.rs` which interfaces with the Markdown parser. This is exposed as a
+series of types that wrap a string of Markdown, and implement `fmt::Display` to emit HTML text. It
+takes special care to enable certain features like footnotes and tables and add syntax highlighting
+to Rust code blocks (via `html/highlight.rs`) before running the Markdown parser. There's also a
+function in here (`find_testable_code`) that specifically scans for Rust code blocks so the
+test-runner code can find all the doctests in the crate.
+
+### From soup to nuts
+
+(alternate title: ["An unbroken thread that stretches from those first `Cell`s to us"][video])
+
+[video]: https://www.youtube.com/watch?v=hOLAGYmUQV0
+
+It's important to note that the AST cleaning can ask the compiler for information (crucially,
+`DocContext` contains a `TyCtxt`), but page rendering cannot. The `clean::Crate` created within
+`run_core` is passed outside the compiler context before being handed to `html::render::run`. This
+means that a lot of the "supplementary data" that isn't immediately available inside an item's
+definition, like which trait is the `Deref` trait used by the language, needs to be collected during
+cleaning, stored in the `DocContext`, and passed along to the `SharedContext` during HTML rendering.
+This manifests as a bunch of shared state, context variables, and `RefCell`s.
+
+Also of note is that some items that come from "asking the compiler" don't go directly into the
+`DocContext` - for example, when loading items from a foreign crate, rustdoc will ask about trait
+implementations and generate new `Item`s for the impls based on that information. This goes directly
+into the returned `Crate` rather than roundabout through the `DocContext`. This way, these
+implementations can be collected alongside the others, right before rendering the HTML.
+
+## Other tricks up its sleeve
+
+All this describes the process for generating HTML documentation from a Rust crate, but there are
+couple other major modes that rustdoc runs in. It can also be run on a standalone Markdown file, or
+it can run doctests on Rust code or standalone Markdown files. For the former, it shortcuts straight
+to `html/markdown.rs`, optionally including a mode which inserts a Table of Contents to the output
+HTML.
+
+For the latter, rustdoc runs a similar partial-compilation to get relevant documentation in
+`test.rs`, but instead of going through the full clean and render process, it runs a much simpler
+crate walk to grab *just* the hand-written documentation. Combined with the aforementioned
+"`find_testable_code`" in `html/markdown.rs`, it builds up a collection of tests to run before
+handing them off to the libtest test runner. One notable location in `test.rs` is the function
+`make_test`, which is where hand-written doctests get transformed into something that can be
+executed.
+
+## Dotting i's and crossing t's
+
+So that's rustdoc's code in a nutshell, but there's more things in the repo that deal with it. Since
+we have the full `compiletest` suite at hand, there's a set of tests in `src/test/rustdoc` that make
+sure the final HTML is what we expect in various situations. These tests also use a supplementary
+script, `src/etc/htmldocck.py`, that allows it to look through the final HTML using XPath notation
+to get a precise look at the output. The full description of all the commands available to rustdoc
+tests is in `htmldocck.py`.
+
+In addition, there are separate tests for the search index and rustdoc's ability to query it. The
+files in `src/test/rustdoc-js` each contain a different search query and the expected results,
+broken out by search tab. These files are processed by a script in `src/tools/rustdoc-js` and the
+Node.js runtime. These tests don't have as thorough of a writeup, but a broad example that features
+results in all tabs can be found in `basic.js`. The basic idea is that you match a given `QUERY`
+with a set of `EXPECTED` results, complete with the full item path of each item.
match *self {
Predicate::Trait(ref pred) => pred.clean(cx),
- Predicate::Equate(ref pred) => pred.clean(cx),
Predicate::Subtype(ref pred) => pred.clean(cx),
Predicate::RegionOutlives(ref pred) => pred.clean(cx),
Predicate::TypeOutlives(ref pred) => pred.clean(cx),
}
}
-impl<'tcx> Clean<WherePredicate> for ty::EquatePredicate<'tcx> {
- fn clean(&self, cx: &DocContext) -> WherePredicate {
- let ty::EquatePredicate(ref lhs, ref rhs) = *self;
- WherePredicate::EqPredicate {
- lhs: lhs.clean(cx),
- rhs: rhs.clean(cx)
- }
- }
-}
-
impl<'tcx> Clean<WherePredicate> for ty::SubtypePredicate<'tcx> {
fn clean(&self, _cx: &DocContext) -> WherePredicate {
panic!("subtype predicates are an internal rustc artifact \
pub root_path: &'a str,
pub description: &'a str,
pub keywords: &'a str,
+ pub resource_suffix: &'a str,
}
pub fn render<T: fmt::Display, S: fmt::Display>(
<title>{title}</title>
- <link rel="stylesheet" type="text/css" href="{root_path}normalize.css">
- <link rel="stylesheet" type="text/css" href="{root_path}rustdoc.css" id="mainThemeStyle">
+ <link rel="stylesheet" type="text/css" href="{root_path}normalize{suffix}.css">
+ <link rel="stylesheet" type="text/css" href="{root_path}rustdoc{suffix}.css"
+ id="mainThemeStyle">
{themes}
- <link rel="stylesheet" type="text/css" href="{root_path}dark.css">
- <link rel="stylesheet" type="text/css" href="{root_path}main.css" id="themeStyle">
- <script src="{root_path}storage.js"></script>
+ <link rel="stylesheet" type="text/css" href="{root_path}dark{suffix}.css">
+ <link rel="stylesheet" type="text/css" href="{root_path}main{suffix}.css" id="themeStyle">
+ <script src="{root_path}storage{suffix}.js"></script>
{css_extension}
{favicon}
<div class="theme-picker">
<button id="theme-picker" aria-label="Pick another theme!">
- <img src="{root_path}brush.svg" width="18" alt="Pick another theme!">
+ <img src="{root_path}brush{suffix}.svg" width="18" alt="Pick another theme!">
</button>
<div id="theme-choices"></div>
</div>
- <script src="{root_path}theme.js"></script>
+ <script src="{root_path}theme{suffix}.js"></script>
<nav class="sub">
<form class="search-form js-only">
<div class="search-container">
window.rootPath = "{root_path}";
window.currentCrate = "{krate}";
</script>
- <script src="{root_path}main.js"></script>
+ <script src="{root_path}main{suffix}.js"></script>
<script defer src="{root_path}search-index.js"></script>
</body>
</html>"##,
css_extension = if css_file_extension {
- format!("<link rel=\"stylesheet\" type=\"text/css\" href=\"{root_path}theme.css\">",
- root_path = page.root_path)
+ format!("<link rel=\"stylesheet\" type=\"text/css\" href=\"{root_path}theme{suffix}.css\">",
+ root_path = page.root_path,
+ suffix=page.resource_suffix)
} else {
"".to_owned()
},
.filter_map(|t| t.file_stem())
.filter_map(|t| t.to_str())
.map(|t| format!(r#"<link rel="stylesheet" type="text/css" href="{}{}">"#,
- page.root_path, t))
+ page.root_path,
+ t.replace(".css", &format!("{}.css", page.resource_suffix))))
.collect::<String>(),
+ suffix=page.resource_suffix,
)
}
//! for creating the corresponding search index and source file renderings.
//! These threads are not parallelized (they haven't been a bottleneck yet), and
//! both occur before the crate is rendered.
+
pub use self::ExternalLocation::*;
use std::borrow::Cow;
pub sort_modules_alphabetically: bool,
/// Additional themes to be added to the generated docs.
pub themes: Vec<PathBuf>,
+ /// Suffix to be added on resource files (if suffix is "-v2" then "main.css" becomes
+ /// "main-v2.css").
+ pub resource_suffix: String,
}
impl SharedContext {
external_html: &ExternalHtml,
playground_url: Option<String>,
dst: PathBuf,
+ resource_suffix: String,
passes: FxHashSet<String>,
css_file_extension: Option<PathBuf>,
renderinfo: RenderInfo,
created_dirs: RefCell::new(FxHashSet()),
sort_modules_alphabetically,
themes,
+ resource_suffix,
};
// If user passed in `--playground-url` arg, we fill in crate name here
// Add all the static files. These may already exist, but we just
// overwrite them anyway to make sure that they're fresh and up-to-date.
- write(cx.dst.join("rustdoc.css"),
+ write(cx.dst.join(&format!("rustdoc{}.css", cx.shared.resource_suffix)),
include_bytes!("static/rustdoc.css"))?;
// To avoid "main.css" to be overwritten, we'll first run over the received themes and only
let mut f = try_err!(File::open(&entry), &entry);
try_err!(f.read_to_end(&mut content), &entry);
- write(cx.dst.join(try_none!(entry.file_name(), &entry)), content.as_slice())?;
- themes.insert(try_none!(try_none!(entry.file_stem(), &entry).to_str(), &entry).to_owned());
+ let theme = try_none!(try_none!(entry.file_stem(), &entry).to_str(), &entry);
+ let extension = try_none!(try_none!(entry.extension(), &entry).to_str(), &entry);
+ write(cx.dst.join(format!("{}{}.{}", theme, cx.shared.resource_suffix, extension)),
+ content.as_slice())?;
+ themes.insert(theme.to_owned());
}
- write(cx.dst.join("brush.svg"),
+ write(cx.dst.join(&format!("brush{}.svg", cx.shared.resource_suffix)),
include_bytes!("static/brush.svg"))?;
- write(cx.dst.join("main.css"),
+ write(cx.dst.join(&format!("main{}.css", cx.shared.resource_suffix)),
include_bytes!("static/themes/main.css"))?;
themes.insert("main".to_owned());
- write(cx.dst.join("dark.css"),
+ write(cx.dst.join(&format!("dark{}.css", cx.shared.resource_suffix)),
include_bytes!("static/themes/dark.css"))?;
themes.insert("dark".to_owned());
themes.sort();
// To avoid theme switch latencies as much as possible, we put everything theme related
// at the beginning of the html files into another js file.
- write(cx.dst.join("theme.js"), format!(
+ write(cx.dst.join(&format!("theme{}.js", cx.shared.resource_suffix)),
+ format!(
r#"var themes = document.getElementById("theme-choices");
var themePicker = document.getElementById("theme-picker");
themePicker.onclick = function() {{
}};
themes.appendChild(but);
}});
-"#, themes.iter()
- .map(|s| format!("\"{}\"", s))
- .collect::<Vec<String>>()
- .join(",")).as_bytes())?;
+"#,
+ themes.iter()
+ .map(|s| format!("\"{}\"", s))
+ .collect::<Vec<String>>()
+ .join(",")).as_bytes(),
+ )?;
+
+ write(cx.dst.join(&format!("main{}.js", cx.shared.resource_suffix)),
+ include_bytes!("static/main.js"))?;
- write(cx.dst.join("main.js"), include_bytes!("static/main.js"))?;
- write(cx.dst.join("storage.js"), include_bytes!("static/storage.js"))?;
+ {
+ let mut data = format!("var resourcesSuffix = \"{}\";\n",
+ cx.shared.resource_suffix).into_bytes();
+ data.extend_from_slice(include_bytes!("static/storage.js"));
+ write(cx.dst.join(&format!("storage{}.js", cx.shared.resource_suffix)), &data)?;
+ }
if let Some(ref css) = cx.shared.css_file_extension {
- let out = cx.dst.join("theme.css");
+ let out = cx.dst.join(&format!("theme{}.css", cx.shared.resource_suffix));
try_err!(fs::copy(css, out), css);
}
- write(cx.dst.join("normalize.css"),
+ write(cx.dst.join(&format!("normalize{}.css", cx.shared.resource_suffix)),
include_bytes!("static/normalize.css"))?;
write(cx.dst.join("FiraSans-Regular.woff"),
include_bytes!("static/FiraSans-Regular.woff"))?;
root_path: &root_path,
description: &desc,
keywords: BASIC_KEYWORDS,
+ resource_suffix: &self.scx.resource_suffix,
};
layout::render(&mut w, &self.scx.layout,
&page, &(""), &Source(contents),
title: &title,
description: &desc,
keywords: &keywords,
+ resource_suffix: &self.shared.resource_suffix,
};
reset_ids(true);
}
function switchTheme(styleElem, mainStyleElem, newTheme) {
- var newHref = mainStyleElem.href.replace("rustdoc.css", newTheme + ".css");
+ var fullBasicCss = "rustdoc" + resourcesSuffix + ".css";
+ var fullNewTheme = newTheme + resourcesSuffix + ".css";
+ var newHref = mainStyleElem.href.replace(fullBasicCss, fullNewTheme);
var found = false;
if (savedHref.length === 0) {
pub fn main() {
const STACK_SIZE: usize = 32_000_000; // 32MB
- env_logger::init().unwrap();
+ env_logger::init();
let res = std::thread::Builder::new().stack_size(STACK_SIZE).spawn(move || {
get_args().map(|args| main_args(&args)).unwrap_or(1)
}).unwrap().join().unwrap_or(101);
"check if given theme is valid",
"FILES")
}),
+ unstable("resource-suffix", |o| {
+ o.optopt("",
+ "resource-suffix",
+ "suffix to add to CSS and JavaScript files, e.g. \"main.css\" will become \
+ \"main-suffix.css\"",
+ "PATH")
+ }),
]
}
let display_warnings = matches.opt_present("display-warnings");
let linker = matches.opt_str("linker").map(PathBuf::from);
let sort_modules_alphabetically = !matches.opt_present("sort-modules-by-appearance");
+ let resource_suffix = matches.opt_str("resource-suffix");
match (should_test, markdown_input) {
(true, true) => {
Some("html") | None => {
html::render::run(krate, &external_html, playground_url,
output.unwrap_or(PathBuf::from("doc")),
+ resource_suffix.unwrap_or(String::new()),
passes.into_iter().collect(),
css_file_extension,
renderinfo,
nested: F) {
let mut attrs = Attributes::from_ast(self.sess.diagnostic(), attrs);
if let Some(ref cfg) = attrs.cfg {
- if !cfg.matches(&self.sess.parse_sess, Some(&self.sess.features.borrow())) {
+ if !cfg.matches(&self.sess.parse_sess, Some(&self.sess.features_untracked())) {
return;
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl ExactSizeIterator for EscapeDefault {}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl FusedIterator for EscapeDefault {}
#[stable(feature = "std_debug", since = "1.16.0")]
///
/// ```
/// use std::collections::HashMap;
- /// let mut map: HashMap<&str, isize> = HashMap::new();
+ /// let mut map: HashMap<&str, i32> = HashMap::new();
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
///
/// ```
/// use std::collections::HashMap;
- /// let mut map: HashMap<&str, isize> = HashMap::with_capacity(10);
+ /// let mut map: HashMap<&str, i32> = HashMap::with_capacity(10);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
/// use std::collections::hash_map::RandomState;
///
/// let hasher = RandomState::new();
- /// let map: HashMap<isize, isize> = HashMap::with_hasher(hasher);
+ /// let map: HashMap<i32, i32> = HashMap::with_hasher(hasher);
/// let hasher: &RandomState = map.hasher();
/// ```
#[stable(feature = "hashmap_public_hasher", since = "1.9.0")]
///
/// ```
/// use std::collections::HashMap;
- /// let map: HashMap<isize, isize> = HashMap::with_capacity(100);
+ /// let map: HashMap<i32, i32> = HashMap::with_capacity(100);
/// assert!(map.capacity() >= 100);
/// ```
#[inline]
///
/// ```
/// use std::collections::HashMap;
- /// let mut map: HashMap<&str, isize> = HashMap::new();
+ /// let mut map: HashMap<&str, i32> = HashMap::new();
/// map.reserve(10);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
/// ```
/// use std::collections::HashMap;
///
- /// let mut map: HashMap<isize, isize> = HashMap::with_capacity(100);
+ /// let mut map: HashMap<i32, i32> = HashMap::with_capacity(100);
/// map.insert(1, 2);
/// map.insert(3, 4);
/// assert!(map.capacity() >= 100);
/// ```
/// use std::collections::HashMap;
///
- /// let mut map: HashMap<isize, isize> = (0..8).map(|x|(x, x*10)).collect();
+ /// let mut map: HashMap<i32, i32> = (0..8).map(|x|(x, x*10)).collect();
/// map.retain(|&k, _| k % 2 == 0);
/// assert_eq!(map.len(), 4);
/// ```
/// map.insert("c", 3);
///
/// // Not possible with .iter()
- /// let vec: Vec<(&str, isize)> = map.into_iter().collect();
+ /// let vec: Vec<(&str, i32)> = map.into_iter().collect();
/// ```
fn into_iter(self) -> IntoIter<K, V> {
IntoIter { inner: self.table.into_iter() }
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a, K, V> FusedIterator for Iter<'a, K, V> {}
#[stable(feature = "rust1", since = "1.0.0")]
self.inner.len()
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a, K, V> FusedIterator for IterMut<'a, K, V> {}
#[stable(feature = "std_debug", since = "1.16.0")]
self.inner.len()
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<K, V> FusedIterator for IntoIter<K, V> {}
#[stable(feature = "std_debug", since = "1.16.0")]
self.inner.len()
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a, K, V> FusedIterator for Keys<'a, K, V> {}
#[stable(feature = "rust1", since = "1.0.0")]
self.inner.len()
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a, K, V> FusedIterator for Values<'a, K, V> {}
#[stable(feature = "map_values_mut", since = "1.10.0")]
self.inner.len()
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a, K, V> FusedIterator for ValuesMut<'a, K, V> {}
#[stable(feature = "std_debug", since = "1.16.0")]
self.inner.len()
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a, K, V> FusedIterator for Drain<'a, K, V> {}
#[stable(feature = "std_debug", since = "1.16.0")]
assert_eq!(m2.len(), 2);
}
- thread_local! { static DROP_VECTOR: RefCell<Vec<isize>> = RefCell::new(Vec::new()) }
+ thread_local! { static DROP_VECTOR: RefCell<Vec<i32>> = RefCell::new(Vec::new()) }
#[derive(Hash, PartialEq, Eq)]
- struct Dropable {
+ struct Droppable {
k: usize,
}
- impl Dropable {
- fn new(k: usize) -> Dropable {
+ impl Droppable {
+ fn new(k: usize) -> Droppable {
DROP_VECTOR.with(|slot| {
slot.borrow_mut()[k] += 1;
});
- Dropable { k: k }
+ Droppable { k: k }
}
}
- impl Drop for Dropable {
+ impl Drop for Droppable {
fn drop(&mut self) {
DROP_VECTOR.with(|slot| {
slot.borrow_mut()[self.k] -= 1;
}
}
- impl Clone for Dropable {
- fn clone(&self) -> Dropable {
- Dropable::new(self.k)
+ impl Clone for Droppable {
+ fn clone(&self) -> Droppable {
+ Droppable::new(self.k)
}
}
});
for i in 0..100 {
- let d1 = Dropable::new(i);
- let d2 = Dropable::new(i + 100);
+ let d1 = Droppable::new(i);
+ let d2 = Droppable::new(i + 100);
m.insert(d1, d2);
}
});
for i in 0..50 {
- let k = Dropable::new(i);
+ let k = Droppable::new(i);
let v = m.remove(&k);
assert!(v.is_some());
});
for i in 0..100 {
- let d1 = Dropable::new(i);
- let d2 = Dropable::new(i + 100);
+ let d1 = Droppable::new(i);
+ let d2 = Droppable::new(i + 100);
hm.insert(d1, d2);
}
#[test]
fn test_empty_remove() {
- let mut m: HashMap<isize, bool> = HashMap::new();
+ let mut m: HashMap<i32, bool> = HashMap::new();
assert_eq!(m.remove(&0), None);
}
#[test]
fn test_empty_entry() {
- let mut m: HashMap<isize, bool> = HashMap::new();
+ let mut m: HashMap<i32, bool> = HashMap::new();
match m.entry(0) {
Occupied(_) => panic!(),
Vacant(_) => {}
#[test]
fn test_empty_iter() {
- let mut m: HashMap<isize, bool> = HashMap::new();
+ let mut m: HashMap<i32, bool> = HashMap::new();
assert_eq!(m.drain().next(), None);
assert_eq!(m.keys().next(), None);
assert_eq!(m.values().next(), None);
fn test_entry_take_doesnt_corrupt() {
#![allow(deprecated)] //rand
// Test for #19292
- fn check(m: &HashMap<isize, ()>) {
+ fn check(m: &HashMap<i32, ()>) {
for k in m.keys() {
assert!(m.contains_key(k),
"{} is in keys() but not in the map?", k);
#[test]
fn test_retain() {
- let mut map: HashMap<isize, isize> = (0..100).map(|x|(x, x*10)).collect();
+ let mut map: HashMap<i32, i32> = (0..100).map(|x|(x, x*10)).collect();
map.retain(|&k, _| k % 2 == 0);
assert_eq!(map.len(), 50);
/// use std::collections::HashSet;
///
/// let xs = [1,2,3,4,5,6];
- /// let mut set: HashSet<isize> = xs.iter().cloned().collect();
+ /// let mut set: HashSet<i32> = xs.iter().cloned().collect();
/// set.retain(|&k| k % 2 == 0);
/// assert_eq!(set.len(), 3);
/// ```
self.iter.len()
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a, K> FusedIterator for Iter<'a, K> {}
#[stable(feature = "std_debug", since = "1.16.0")]
self.iter.len()
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<K> FusedIterator for IntoIter<K> {}
#[stable(feature = "std_debug", since = "1.16.0")]
self.iter.len()
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a, K> FusedIterator for Drain<'a, K> {}
#[stable(feature = "std_debug", since = "1.16.0")]
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a, T, S> FusedIterator for Intersection<'a, T, S>
where T: Eq + Hash,
S: BuildHasher
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a, T, S> FusedIterator for Difference<'a, T, S>
where T: Eq + Hash,
S: BuildHasher
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a, T, S> FusedIterator for SymmetricDifference<'a, T, S>
where T: Eq + Hash,
S: BuildHasher
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a, T, S> FusedIterator for Union<'a, T, S>
where T: Eq + Hash,
S: BuildHasher
#[test]
fn test_retain() {
let xs = [1, 2, 3, 4, 5, 6];
- let mut set: HashSet<isize> = xs.iter().cloned().collect();
+ let mut set: HashSet<i32> = xs.iter().cloned().collect();
set.retain(|&k| k % 2 == 0);
assert_eq!(set.len(), 3);
assert!(set.contains(&2));
/// `ptr`.
/// * There is no guarantee that the memory pointed to by `ptr` contains a
/// valid nul terminator byte at the end of the string.
+ /// * It is not guaranteed that the memory pointed by `ptr` won't change
+ /// before the `CStr` has been destroyed.
///
/// > **Note**: This operation is intended to be a 0-cost cast but it is
/// > currently implemented with an up-front calculation of the length of
#![feature(float_from_str_radix)]
#![feature(fn_traits)]
#![feature(fnbox)]
-#![feature(fused)]
#![feature(generic_param_attrs)]
#![feature(hashmap_hasher)]
#![feature(heap_api)]
#![feature(rand)]
#![feature(raw)]
#![feature(rustc_attrs)]
+#![feature(stdsimd)]
#![feature(sip_hash_13)]
#![feature(slice_bytes)]
#![feature(slice_concat_ext)]
// compiler
pub mod rt;
+// Pull in the the `stdsimd` crate directly into libstd. This is the same as
+// libcore's arch/simd modules where the source of truth here is in a different
+// repository, but we pull things in here manually to get it into libstd.
+//
+// Note that the #[cfg] here is intended to do two things. First it allows us to
+// change the rustc implementation of intrinsics in stage0 by not compiling simd
+// intrinsics in stage0. Next it doesn't compile anything in test mode as
+// stdsimd has tons of its own tests which we don't want to run.
+#[path = "../stdsimd/stdsimd/mod.rs"]
+#[allow(missing_debug_implementations, missing_docs, dead_code)]
+#[unstable(feature = "stdsimd", issue = "48556")]
+#[cfg(all(not(stage0), not(test)))]
+mod stdsimd;
+
+// A "fake" module needed by the `stdsimd` module to compile, not actually
+// exported though.
+#[cfg(not(stage0))]
+mod coresimd {
+ pub use core::arch;
+ pub use core::simd;
+}
+
+#[unstable(feature = "stdsimd", issue = "48556")]
+#[cfg(all(not(stage0), not(test)))]
+pub use stdsimd::simd;
+#[unstable(feature = "stdsimd", issue = "48556")]
+#[cfg(all(not(stage0), not(test)))]
+pub use stdsimd::arch;
+
// Include a number of private modules that exist solely to provide
// the rustdoc documentation for primitive types. Using `include!`
// because rustdoc only looks for these modules at the crate level.
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a> FusedIterator for Iter<'a> {}
#[stable(feature = "rust1", since = "1.0.0")]
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a> FusedIterator for Components<'a> {}
#[stable(feature = "rust1", since = "1.0.0")]
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[unstable(feature = "path_ancestors", issue = "48581")]
impl<'a> FusedIterator for Ancestors<'a> {}
////////////////////////////////////////////////////////////////////////////////
}
}
-/// This is ridiculously unstable, as it's a completely-punted-upon part
-/// of the `?`-in-`main` RFC. It's here only to allow experimenting with
-/// returning a code directly from main. It will definitely change
-/// drastically before being stabilized, if it doesn't just get deleted.
-#[doc(hidden)]
+/// This type represents the status code a process can return to its
+/// parent under normal termination.
+///
+/// Numeric values used in this type don't have portable meanings, and
+/// different platforms may mask different amounts of them.
+///
+/// For the platform's canonical successful and unsuccessful codes, see
+/// the [`SUCCESS`] and [`FAILURE`] associated items.
+///
+/// [`SUCCESS`]: #associatedconstant.SUCCESS
+/// [`FAILURE`]: #associatedconstant.FAILURE
+///
+/// **Warning**: While various forms of this were discussed in [RFC #1937],
+/// it was ultimately cut from that RFC, and thus this type is more subject
+/// to change even than the usual unstable item churn.
+///
+/// [RFC #1937]: https://github.com/rust-lang/rfcs/pull/1937
#[derive(Clone, Copy, Debug)]
-#[unstable(feature = "process_exitcode_placeholder", issue = "43301")]
-pub struct ExitCode(pub i32);
+#[unstable(feature = "process_exitcode_placeholder", issue = "48711")]
+pub struct ExitCode(imp::ExitCode);
+
+#[unstable(feature = "process_exitcode_placeholder", issue = "48711")]
+impl ExitCode {
+ /// The canonical ExitCode for successful termination on this platform.
+ ///
+ /// Note that a `()`-returning `main` implicitly results in a successful
+ /// termination, so there's no need to return this from `main` unless
+ /// you're also returning other possible codes.
+ #[unstable(feature = "process_exitcode_placeholder", issue = "48711")]
+ pub const SUCCESS: ExitCode = ExitCode(imp::ExitCode::SUCCESS);
+
+ /// The canonical ExitCode for unsuccessful termination on this platform.
+ ///
+ /// If you're only returning this and `SUCCESS` from `main`, consider
+ /// instead returning `Err(_)` and `Ok(())` respectively, which will
+ /// return the same codes (but will also `eprintln!` the error).
+ #[unstable(feature = "process_exitcode_placeholder", issue = "48711")]
+ pub const FAILURE: ExitCode = ExitCode(imp::ExitCode::FAILURE);
+}
impl Child {
/// Forces the child to exit. This is equivalent to sending a
::sys::os::getpid()
}
-#[cfg(target_arch = "wasm32")]
-mod exit {
- pub const SUCCESS: i32 = 0;
- pub const FAILURE: i32 = 1;
-}
-#[cfg(not(target_arch = "wasm32"))]
-mod exit {
- use libc;
- pub const SUCCESS: i32 = libc::EXIT_SUCCESS;
- pub const FAILURE: i32 = libc::EXIT_FAILURE;
-}
-
/// A trait for implementing arbitrary return types in the `main` function.
///
/// The c-main function only supports to return integers as return type.
#[unstable(feature = "termination_trait_lib", issue = "43301")]
impl Termination for () {
- fn report(self) -> i32 { exit::SUCCESS }
+ fn report(self) -> i32 { ExitCode::SUCCESS.report() }
}
#[unstable(feature = "termination_trait_lib", issue = "43301")]
impl<E: fmt::Debug> Termination for Result<(), E> {
fn report(self) -> i32 {
match self {
- Ok(val) => val.report(),
- Err(err) => {
- eprintln!("Error: {:?}", err);
- exit::FAILURE
- }
+ Ok(()) => ().report(),
+ Err(err) => Err::<!, _>(err).report(),
}
}
}
fn report(self) -> i32 {
let Err(err) = self;
eprintln!("Error: {:?}", err);
- exit::FAILURE
+ ExitCode::FAILURE.report()
}
}
#[unstable(feature = "termination_trait_lib", issue = "43301")]
impl Termination for ExitCode {
fn report(self) -> i32 {
- let ExitCode(code) = self;
- code
+ self.0.as_i32()
}
}
}
}
+#[derive(PartialEq, Eq, Clone, Copy, Debug)]
+pub struct ExitCode(bool);
+
+impl ExitCode {
+ pub const SUCCESS: ExitCode = ExitCode(false);
+ pub const FAILURE: ExitCode = ExitCode(true);
+
+ pub fn as_i32(&self) -> i32 {
+ self.0 as i32
+ }
+}
+
pub struct Process(Void);
impl Process {
use os::unix::ffi::OsStrExt;
use fmt;
use io::{self, Error, ErrorKind};
+use libc::{EXIT_SUCCESS, EXIT_FAILURE};
use path::{Path, PathBuf};
use sys::fd::FileDesc;
use sys::fs::{File, OpenOptions};
}
}
+#[derive(PartialEq, Eq, Clone, Copy, Debug)]
+pub struct ExitCode(u8);
+
+impl ExitCode {
+ pub const SUCCESS: ExitCode = ExitCode(EXIT_SUCCESS as _);
+ pub const FAILURE: ExitCode = ExitCode(EXIT_FAILURE as _);
+
+ pub fn as_i32(&self) -> i32 {
+ self.0 as i32
+ }
+}
+
/// The unique id of the process (this should never be negative).
pub struct Process {
pid: usize,
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-pub use self::process_common::{Command, ExitStatus, Stdio, StdioPipes};
+pub use self::process_common::{Command, ExitStatus, ExitCode, Stdio, StdioPipes};
pub use self::process_inner::Process;
mod process_common;
use ffi::{OsString, OsStr, CString, CStr};
use fmt;
use io;
-use libc::{self, c_int, gid_t, uid_t, c_char};
+use libc::{self, c_int, gid_t, uid_t, c_char, EXIT_SUCCESS, EXIT_FAILURE};
use ptr;
use sys::fd::FileDesc;
use sys::fs::{File, OpenOptions};
}
}
+#[derive(PartialEq, Eq, Clone, Copy, Debug)]
+pub struct ExitCode(u8);
+
+impl ExitCode {
+ pub const SUCCESS: ExitCode = ExitCode(EXIT_SUCCESS as _);
+ pub const FAILURE: ExitCode = ExitCode(EXIT_FAILURE as _);
+
+ pub fn as_i32(&self) -> i32 {
+ self.0 as i32
+ }
+}
+
#[cfg(all(test, not(target_os = "emscripten")))]
mod tests {
use super::*;
}
}
+#[derive(PartialEq, Eq, Clone, Copy, Debug)]
+pub struct ExitCode(bool);
+
+impl ExitCode {
+ pub const SUCCESS: ExitCode = ExitCode(false);
+ pub const FAILURE: ExitCode = ExitCode(true);
+
+ pub fn as_i32(&self) -> i32 {
+ self.0 as i32
+ }
+}
+
pub struct Process(Void);
impl Process {
use fmt;
use fs;
use io::{self, Error, ErrorKind};
-use libc::c_void;
+use libc::{c_void, EXIT_SUCCESS, EXIT_FAILURE};
use mem;
use os::windows::ffi::OsStrExt;
use path::Path;
}
}
+#[derive(PartialEq, Eq, Clone, Copy, Debug)]
+pub struct ExitCode(c::DWORD);
+
+impl ExitCode {
+ pub const SUCCESS: ExitCode = ExitCode(EXIT_SUCCESS as _);
+ pub const FAILURE: ExitCode = ExitCode(EXIT_FAILURE as _);
+
+ pub fn as_i32(&self) -> i32 {
+ self.0 as i32
+ }
+}
+
fn zeroed_startupinfo() -> c::STARTUPINFO {
c::STARTUPINFO {
cb: 0,
formatter.write_str("\"")?;
let mut pos = 0;
- loop {
- match self.next_surrogate(pos) {
- None => break,
- Some((surrogate_pos, surrogate)) => {
- write_str_escaped(
- formatter,
- unsafe { str::from_utf8_unchecked(
- &self.bytes[pos .. surrogate_pos]
- )},
- )?;
- write!(formatter, "\\u{{{:x}}}", surrogate)?;
- pos = surrogate_pos + 3;
- }
- }
+ while let Some((surrogate_pos, surrogate)) = self.next_surrogate(pos) {
+ write_str_escaped(
+ formatter,
+ unsafe { str::from_utf8_unchecked(
+ &self.bytes[pos .. surrogate_pos]
+ )},
+ )?;
+ write!(formatter, "\\u{{{:x}}}", surrogate)?;
+ pos = surrogate_pos + 3;
}
write_str_escaped(
formatter,
/// [`to_lowercase`]: ../../std/primitive.char.html#method.to_lowercase
/// [`char`]: ../../std/primitive.char.html
#[stable(feature = "rust1", since = "1.0.0")]
-#[derive(Debug)]
+#[derive(Debug, Clone)]
pub struct ToLowercase(CaseMappingIter);
#[stable(feature = "rust1", since = "1.0.0")]
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl FusedIterator for ToLowercase {}
/// Returns an iterator that yields the uppercase equivalent of a `char`.
/// [`to_uppercase`]: ../../std/primitive.char.html#method.to_uppercase
/// [`char`]: ../../std/primitive.char.html
#[stable(feature = "rust1", since = "1.0.0")]
-#[derive(Debug)]
+#[derive(Debug, Clone)]
pub struct ToUppercase(CaseMappingIter);
#[stable(feature = "rust1", since = "1.0.0")]
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl FusedIterator for ToUppercase {}
-#[derive(Debug)]
+#[derive(Debug, Clone)]
enum CaseMappingIter {
Three(char, char, char),
Two(char, char),
#![feature(str_internals)]
#![feature(decode_utf8)]
#![feature(fn_traits)]
-#![feature(fused)]
#![feature(lang_items)]
#![feature(non_exhaustive)]
#![feature(staged_api)]
}
}
-#[unstable(feature = "fused", issue = "35602")]
impl<I> FusedIterator for Utf16Encoder<I>
where I: FusedIterator<Item = char> {}
}
}
-#[unstable(feature = "fused", issue = "35602")]
+#[stable(feature = "fused", since = "1.26.0")]
impl<'a> FusedIterator for SplitWhitespace<'a> {}
use symbol::Symbol;
use tokenstream::{TokenStream, TokenTree};
-use std::cell::RefCell;
use std::collections::HashMap;
use std::collections::hash_map::Entry;
// Holy self-referential!
/// Converts a `macro_rules!` invocation into a syntax extension.
-pub fn compile(sess: &ParseSess, features: &RefCell<Features>, def: &ast::Item) -> SyntaxExtension {
+pub fn compile(sess: &ParseSess, features: &Features, def: &ast::Item) -> SyntaxExtension {
let lhs_nm = ast::Ident::with_empty_ctxt(Symbol::gensym("lhs"));
let rhs_nm = ast::Ident::with_empty_ctxt(Symbol::gensym("rhs"));
}
fn check_lhs_nt_follows(sess: &ParseSess,
- features: &RefCell<Features>,
+ features: &Features,
attrs: &[ast::Attribute],
lhs: "ed::TokenTree) -> bool {
// lhs is going to be like TokenTree::Delimited(...), where the
}
fn check_matcher(sess: &ParseSess,
- features: &RefCell<Features>,
+ features: &Features,
attrs: &[ast::Attribute],
matcher: &[quoted::TokenTree]) -> bool {
let first_sets = FirstSets::new(matcher);
// Requires that `first_sets` is pre-computed for `matcher`;
// see `FirstSets::new`.
fn check_matcher_core(sess: &ParseSess,
- features: &RefCell<Features>,
+ features: &Features,
attrs: &[ast::Attribute],
first_sets: &FirstSets,
matcher: &[quoted::TokenTree],
}
fn has_legal_fragment_specifier(sess: &ParseSess,
- features: &RefCell<Features>,
+ features: &Features,
attrs: &[ast::Attribute],
tok: "ed::TokenTree) -> Result<(), String> {
debug!("has_legal_fragment_specifier({:?})", tok);
}
fn is_legal_fragment_specifier(sess: &ParseSess,
- features: &RefCell<Features>,
+ features: &Features,
attrs: &[ast::Attribute],
frag_name: &str,
frag_span: Span) -> bool {
"item" | "block" | "stmt" | "expr" | "pat" |
"path" | "ty" | "ident" | "meta" | "tt" | "" => true,
"lifetime" => {
- if !features.borrow().macro_lifetime_matcher &&
+ if !features.macro_lifetime_matcher &&
!attr::contains_name(attrs, "allow_internal_unstable") {
let explain = feature_gate::EXPLAIN_LIFETIME_MATCHER;
emit_feature_err(sess,
true
},
"vis" => {
- if !features.borrow().macro_vis_matcher &&
+ if !features.macro_vis_matcher &&
!attr::contains_name(attrs, "allow_internal_unstable") {
let explain = feature_gate::EXPLAIN_VIS_MATCHER;
emit_feature_err(sess,
use syntax_pos::{BytePos, Span, DUMMY_SP};
use tokenstream;
-use std::cell::RefCell;
use std::iter::Peekable;
use rustc_data_structures::sync::Lrc;
input: tokenstream::TokenStream,
expect_matchers: bool,
sess: &ParseSess,
- features: &RefCell<Features>,
+ features: &Features,
attrs: &[ast::Attribute],
) -> Vec<TokenTree> {
// Will contain the final collection of `self::TokenTree`
trees: &mut Peekable<I>,
expect_matchers: bool,
sess: &ParseSess,
- features: &RefCell<Features>,
+ features: &Features,
attrs: &[ast::Attribute],
) -> TokenTree
where
input: &mut Peekable<I>,
span: Span,
sess: &ParseSess,
- features: &RefCell<Features>,
+ features: &Features,
attrs: &[ast::Attribute],
) -> (Option<token::Token>, KleeneOp)
where
match parse_kleene_op(input, span) {
// #2 is a KleeneOp (this is the only valid option) :)
Ok(Ok(op)) if op == KleeneOp::ZeroOrOne => {
- if !features.borrow().macro_at_most_once_rep
+ if !features.macro_at_most_once_rep
&& !attr::contains_name(attrs, "allow_internal_unstable")
{
let explain = feature_gate::EXPLAIN_MACRO_AT_MOST_ONCE_REP;
Err(span) => span,
}
} else {
- if !features.borrow().macro_at_most_once_rep
+ if !features.macro_at_most_once_rep
&& !attr::contains_name(attrs, "allow_internal_unstable")
{
let explain = feature_gate::EXPLAIN_MACRO_AT_MOST_ONCE_REP;
Ok(Err((tok, span))) => match parse_kleene_op(input, span) {
// #2 is a KleeneOp :D
Ok(Ok(op)) if op == KleeneOp::ZeroOrOne => {
- if !features.borrow().macro_at_most_once_rep
+ if !features.macro_at_most_once_rep
&& !attr::contains_name(attrs, "allow_internal_unstable")
{
let explain = feature_gate::EXPLAIN_MACRO_AT_MOST_ONCE_REP;
Err(span) => span,
};
- if !features.borrow().macro_at_most_once_rep
+ if !features.macro_at_most_once_rep
&& !attr::contains_name(attrs, "allow_internal_unstable")
{
sess.span_diagnostic
&[$((stringify!($feature), $ver, $issue, set!($feature))),+];
/// A set of features to be used by later passes.
+ #[derive(Clone)]
pub struct Features {
/// `#![feature]` attrs for stable language features, for error reporting
pub declared_stable_lang_features: Vec<(Symbol, Span)>,
$($feature: false),+
}
}
+
+ pub fn walk_feature_fields<F>(&self, mut f: F)
+ where F: FnMut(&str, bool)
+ {
+ $(f(stringify!($feature), self.$feature);)+
+ }
}
};
// Allow trait methods with arbitrary self types
(active, arbitrary_self_types, "1.23.0", Some(44874)),
- // #![wasm_import_memory] attribute
- (active, wasm_import_memory, "1.22.0", None),
-
// `crate` in paths
(active, crate_in_paths, "1.23.0", Some(45477)),
never be stable",
cfg_fn!(rustc_attrs))),
- ("wasm_import_memory", Whitelisted, Gated(Stability::Unstable,
- "wasm_import_memory",
- "wasm_import_memory attribute is currently unstable",
- cfg_fn!(wasm_import_memory))),
-
("rustc_args_required_const", Whitelisted, Gated(Stability::Unstable,
"rustc_attrs",
"never will be stable",
self.advance_token()?;
Ok(ret_val)
}
+
+ fn fail_unterminated_raw_string(&self, pos: BytePos, hash_count: usize) {
+ let mut err = self.struct_span_fatal(pos, pos, "unterminated raw string");
+ err.span_label(self.mk_sp(pos, pos), "unterminated raw string");
+ if hash_count > 0 {
+ err.note(&format!("this raw string should be terminated with `\"{}`",
+ "#".repeat(hash_count)));
+ }
+ err.emit();
+ FatalError.raise();
+ }
+
fn fatal(&self, m: &str) -> FatalError {
self.fatal_span(self.peek_span, m)
}
Self::push_escaped_char_for_msg(&mut m, c);
self.fatal_span_(from_pos, to_pos, &m[..])
}
+
+ fn struct_span_fatal(&self,
+ from_pos: BytePos,
+ to_pos: BytePos,
+ m: &str)
+ -> DiagnosticBuilder<'a> {
+ self.sess.span_diagnostic.struct_span_fatal(self.mk_sp(from_pos, to_pos), m)
+ }
+
fn struct_fatal_span_char(&self,
from_pos: BytePos,
to_pos: BytePos,
}
if self.is_eof() {
- let last_bpos = self.pos;
- self.fatal_span_(start_bpos, last_bpos, "unterminated raw string").raise();
+ self.fail_unterminated_raw_string(start_bpos, hash_count);
} else if !self.ch_is('"') {
let last_bpos = self.pos;
let curr_char = self.ch.unwrap();
let mut valid = true;
'outer: loop {
if self.is_eof() {
- let last_bpos = self.pos;
- self.fatal_span_(start_bpos, last_bpos, "unterminated raw string").raise();
+ self.fail_unterminated_raw_string(start_bpos, hash_count);
}
// if self.ch_is('"') {
// content_end_bpos = self.pos;
}
if self.is_eof() {
- let pos = self.pos;
- self.fatal_span_(start_bpos, pos, "unterminated raw string").raise();
+ self.fail_unterminated_raw_string(start_bpos, hash_count);
} else if !self.ch_is('"') {
let pos = self.pos;
let ch = self.ch.unwrap();
'outer: loop {
match self.ch {
None => {
- let pos = self.pos;
- self.fatal_span_(start_bpos, pos, "unterminated raw string").raise()
+ self.fail_unterminated_raw_string(start_bpos, hash_count);
}
Some('"') => {
content_end_bpos = self.pos;
let expr_str = self.sess.codemap().span_to_snippet(expr.span)
.unwrap_or(pprust::expr_to_string(&expr));
err.span_suggestion(expr.span,
- &format!("try {} the casted value", op_verb),
+ &format!("try {} the cast value", op_verb),
format!("({})", expr_str));
err.emit();
let mut parser = parse::Parser::new(fmt_str);
let mut pieces = vec![];
- loop {
- match parser.next() {
- Some(mut piece) => {
- if !parser.errors.is_empty() {
- break;
- }
- cx.verify_piece(&piece);
- cx.resolve_name_inplace(&mut piece);
- pieces.push(piece);
- }
- None => break,
+ while let Some(mut piece) = parser.next() {
+ if !parser.errors.is_empty() {
+ break;
}
+ cx.verify_piece(&piece);
+ cx.resolve_name_inplace(&mut piece);
+ pieces.push(piece);
}
let numbered_position_args = pieces.iter().any(|arg: &parse::Piece| {
pub fn macro_backtrace(mut self) -> Vec<MacroBacktrace> {
let mut prev_span = DUMMY_SP;
let mut result = vec![];
- loop {
- let info = match self.ctxt().outer().expn_info() {
- Some(info) => info,
- None => break,
- };
-
+ while let Some(info) = self.ctxt().outer().expn_info() {
let (pre, post) = match info.callee.format {
ExpnFormat::MacroAttribute(..) => ("#[", "]"),
ExpnFormat::MacroBang(..) => ("", "!"),
--- /dev/null
+Subproject commit 678cbd325c84070c9dbe4303969fbd2734c0b4ee
// ignore-hexagon
// ignore-mips
// ignore-powerpc
-// ignore-powerpc64
// ignore-s390x
// ignore-sparc
// ignore-wasm32
// ignore-mips64
// ignore-mips64el
// ignore-msp430
-// ignore-powerpc64
-// ignore-powerpc64le
// ignore-powerpc
// ignore-r600
// ignore-amdgcn
// ignore-mips64
// ignore-mips64el
// ignore-msp430
-// ignore-powerpc64
-// ignore-powerpc64le
// ignore-powerpc
// ignore-r600
// ignore-amdgcn
// ignore-mips64
// ignore-mips64el
// ignore-msp430
-// ignore-powerpc64
-// ignore-powerpc64le
// ignore-powerpc
// ignore-r600
// ignore-amdgcn
// ignore-mips64
// ignore-mips64el
// ignore-msp430
-// ignore-powerpc64
-// ignore-powerpc64le
// ignore-powerpc
// ignore-r600
// ignore-amdgcn
// ignore-mips
// ignore-mips64
// ignore-powerpc
-// ignore-powerpc64
// See repr-transparent.rs
#![crate_type="lib"]
let s: String = "abcdef".to_string();
v[3_usize];
v[3];
- v[3u8]; //~ERROR : std::ops::Index<u8>` is not satisfied
- v[3i8]; //~ERROR : std::ops::Index<i8>` is not satisfied
- v[3u32]; //~ERROR : std::ops::Index<u32>` is not satisfied
- v[3i32]; //~ERROR : std::ops::Index<i32>` is not satisfied
+ v[3u8]; //~ERROR : std::slice::SliceIndex<[isize]>` is not satisfied
+ v[3i8]; //~ERROR : std::slice::SliceIndex<[isize]>` is not satisfied
+ v[3u32]; //~ERROR : std::slice::SliceIndex<[isize]>` is not satisfied
+ v[3i32]; //~ERROR : std::slice::SliceIndex<[isize]>` is not satisfied
s.as_bytes()[3_usize];
s.as_bytes()[3];
s.as_bytes()[3u8]; //~ERROR : std::slice::SliceIndex<[u8]>` is not satisfied
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// This test makes sure that we detect changed feature gates.
+
+// revisions:rpass1 cfail2
+// compile-flags: -Z query-dep-graph
+
+#![feature(rustc_attrs)]
+#![cfg_attr(rpass1, feature(nll))]
+
+fn main() {
+ let mut v = vec![1];
+ v.push(v[0]);
+ //[cfail2]~^ ERROR cannot borrow
+}
fn new(f: u32) -> Rls699 {
Rls699 { fs }
}
+
+fn invalid_tuple_struct_access() {
+ bar.0;
+
+ struct S;
+ S.0;
+}
let mbe_matcher = quoted::parse(mbe_matcher.into_iter().collect(),
true,
cx.parse_sess,
- &RefCell::new(Features::new()),
+ &Features::new(),
&[]);
let map = match TokenTree::parse(cx, &mbe_matcher, args.iter().cloned().collect()) {
Success(map) => map,
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// Test that pointers to extern types can be casted from/to usize,
+// Test that pointers to extern types can be cast from/to usize,
// despite being !Sized.
#![feature(extern_types)]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-#![feature(intrinsics)]
+#![feature(intrinsics, i128_type)]
mod rusti {
extern "rust-intrinsic" {
pub fn cttz<T>(x: T) -> T;
pub fn cttz_nonzero<T>(x: T) -> T;
pub fn bswap<T>(x: T) -> T;
+ pub fn bitreverse<T>(x: T) -> T;
}
}
assert_eq!(ctpop(0u16), 0); assert_eq!(ctpop(0i16), 0);
assert_eq!(ctpop(0u32), 0); assert_eq!(ctpop(0i32), 0);
assert_eq!(ctpop(0u64), 0); assert_eq!(ctpop(0i64), 0);
+ assert_eq!(ctpop(0u128), 0); assert_eq!(ctpop(0i128), 0);
assert_eq!(ctpop(1u8), 1); assert_eq!(ctpop(1i8), 1);
assert_eq!(ctpop(1u16), 1); assert_eq!(ctpop(1i16), 1);
assert_eq!(ctpop(1u32), 1); assert_eq!(ctpop(1i32), 1);
assert_eq!(ctpop(1u64), 1); assert_eq!(ctpop(1i64), 1);
+ assert_eq!(ctpop(1u128), 1); assert_eq!(ctpop(1i128), 1);
assert_eq!(ctpop(10u8), 2); assert_eq!(ctpop(10i8), 2);
assert_eq!(ctpop(10u16), 2); assert_eq!(ctpop(10i16), 2);
assert_eq!(ctpop(10u32), 2); assert_eq!(ctpop(10i32), 2);
assert_eq!(ctpop(10u64), 2); assert_eq!(ctpop(10i64), 2);
+ assert_eq!(ctpop(10u128), 2); assert_eq!(ctpop(10i128), 2);
assert_eq!(ctpop(100u8), 3); assert_eq!(ctpop(100i8), 3);
assert_eq!(ctpop(100u16), 3); assert_eq!(ctpop(100i16), 3);
assert_eq!(ctpop(100u32), 3); assert_eq!(ctpop(100i32), 3);
assert_eq!(ctpop(100u64), 3); assert_eq!(ctpop(100i64), 3);
+ assert_eq!(ctpop(100u128), 3); assert_eq!(ctpop(100i128), 3);
assert_eq!(ctpop(-1i8 as u8), 8); assert_eq!(ctpop(-1i8), 8);
assert_eq!(ctpop(-1i16 as u16), 16); assert_eq!(ctpop(-1i16), 16);
assert_eq!(ctpop(-1i32 as u32), 32); assert_eq!(ctpop(-1i32), 32);
assert_eq!(ctpop(-1i64 as u64), 64); assert_eq!(ctpop(-1i64), 64);
+ assert_eq!(ctpop(-1i128 as u128), 128); assert_eq!(ctpop(-1i128), 128);
assert_eq!(ctlz(0u8), 8); assert_eq!(ctlz(0i8), 8);
assert_eq!(ctlz(0u16), 16); assert_eq!(ctlz(0i16), 16);
assert_eq!(ctlz(0u32), 32); assert_eq!(ctlz(0i32), 32);
assert_eq!(ctlz(0u64), 64); assert_eq!(ctlz(0i64), 64);
+ assert_eq!(ctlz(0u128), 128); assert_eq!(ctlz(0i128), 128);
assert_eq!(ctlz(1u8), 7); assert_eq!(ctlz(1i8), 7);
assert_eq!(ctlz(1u16), 15); assert_eq!(ctlz(1i16), 15);
assert_eq!(ctlz(1u32), 31); assert_eq!(ctlz(1i32), 31);
assert_eq!(ctlz(1u64), 63); assert_eq!(ctlz(1i64), 63);
+ assert_eq!(ctlz(1u128), 127); assert_eq!(ctlz(1i128), 127);
assert_eq!(ctlz(10u8), 4); assert_eq!(ctlz(10i8), 4);
assert_eq!(ctlz(10u16), 12); assert_eq!(ctlz(10i16), 12);
assert_eq!(ctlz(10u32), 28); assert_eq!(ctlz(10i32), 28);
assert_eq!(ctlz(10u64), 60); assert_eq!(ctlz(10i64), 60);
+ assert_eq!(ctlz(10u128), 124); assert_eq!(ctlz(10i128), 124);
assert_eq!(ctlz(100u8), 1); assert_eq!(ctlz(100i8), 1);
assert_eq!(ctlz(100u16), 9); assert_eq!(ctlz(100i16), 9);
assert_eq!(ctlz(100u32), 25); assert_eq!(ctlz(100i32), 25);
assert_eq!(ctlz(100u64), 57); assert_eq!(ctlz(100i64), 57);
+ assert_eq!(ctlz(100u128), 121); assert_eq!(ctlz(100i128), 121);
assert_eq!(ctlz_nonzero(1u8), 7); assert_eq!(ctlz_nonzero(1i8), 7);
assert_eq!(ctlz_nonzero(1u16), 15); assert_eq!(ctlz_nonzero(1i16), 15);
assert_eq!(ctlz_nonzero(1u32), 31); assert_eq!(ctlz_nonzero(1i32), 31);
assert_eq!(ctlz_nonzero(1u64), 63); assert_eq!(ctlz_nonzero(1i64), 63);
+ assert_eq!(ctlz_nonzero(1u128), 127); assert_eq!(ctlz_nonzero(1i128), 127);
assert_eq!(ctlz_nonzero(10u8), 4); assert_eq!(ctlz_nonzero(10i8), 4);
assert_eq!(ctlz_nonzero(10u16), 12); assert_eq!(ctlz_nonzero(10i16), 12);
assert_eq!(ctlz_nonzero(10u32), 28); assert_eq!(ctlz_nonzero(10i32), 28);
assert_eq!(ctlz_nonzero(10u64), 60); assert_eq!(ctlz_nonzero(10i64), 60);
+ assert_eq!(ctlz_nonzero(10u128), 124); assert_eq!(ctlz_nonzero(10i128), 124);
assert_eq!(ctlz_nonzero(100u8), 1); assert_eq!(ctlz_nonzero(100i8), 1);
assert_eq!(ctlz_nonzero(100u16), 9); assert_eq!(ctlz_nonzero(100i16), 9);
assert_eq!(ctlz_nonzero(100u32), 25); assert_eq!(ctlz_nonzero(100i32), 25);
assert_eq!(ctlz_nonzero(100u64), 57); assert_eq!(ctlz_nonzero(100i64), 57);
+ assert_eq!(ctlz_nonzero(100u128), 121); assert_eq!(ctlz_nonzero(100i128), 121);
assert_eq!(cttz(-1i8 as u8), 0); assert_eq!(cttz(-1i8), 0);
assert_eq!(cttz(-1i16 as u16), 0); assert_eq!(cttz(-1i16), 0);
assert_eq!(cttz(-1i32 as u32), 0); assert_eq!(cttz(-1i32), 0);
assert_eq!(cttz(-1i64 as u64), 0); assert_eq!(cttz(-1i64), 0);
+ assert_eq!(cttz(-1i128 as u128), 0); assert_eq!(cttz(-1i128), 0);
assert_eq!(cttz(0u8), 8); assert_eq!(cttz(0i8), 8);
assert_eq!(cttz(0u16), 16); assert_eq!(cttz(0i16), 16);
assert_eq!(cttz(0u32), 32); assert_eq!(cttz(0i32), 32);
assert_eq!(cttz(0u64), 64); assert_eq!(cttz(0i64), 64);
+ assert_eq!(cttz(0u128), 128); assert_eq!(cttz(0i128), 128);
assert_eq!(cttz(1u8), 0); assert_eq!(cttz(1i8), 0);
assert_eq!(cttz(1u16), 0); assert_eq!(cttz(1i16), 0);
assert_eq!(cttz(1u32), 0); assert_eq!(cttz(1i32), 0);
assert_eq!(cttz(1u64), 0); assert_eq!(cttz(1i64), 0);
+ assert_eq!(cttz(1u128), 0); assert_eq!(cttz(1i128), 0);
assert_eq!(cttz(10u8), 1); assert_eq!(cttz(10i8), 1);
assert_eq!(cttz(10u16), 1); assert_eq!(cttz(10i16), 1);
assert_eq!(cttz(10u32), 1); assert_eq!(cttz(10i32), 1);
assert_eq!(cttz(10u64), 1); assert_eq!(cttz(10i64), 1);
+ assert_eq!(cttz(10u128), 1); assert_eq!(cttz(10i128), 1);
assert_eq!(cttz(100u8), 2); assert_eq!(cttz(100i8), 2);
assert_eq!(cttz(100u16), 2); assert_eq!(cttz(100i16), 2);
assert_eq!(cttz(100u32), 2); assert_eq!(cttz(100i32), 2);
assert_eq!(cttz(100u64), 2); assert_eq!(cttz(100i64), 2);
+ assert_eq!(cttz(100u128), 2); assert_eq!(cttz(100i128), 2);
assert_eq!(cttz_nonzero(-1i8 as u8), 0); assert_eq!(cttz_nonzero(-1i8), 0);
assert_eq!(cttz_nonzero(-1i16 as u16), 0); assert_eq!(cttz_nonzero(-1i16), 0);
assert_eq!(cttz_nonzero(-1i32 as u32), 0); assert_eq!(cttz_nonzero(-1i32), 0);
assert_eq!(cttz_nonzero(-1i64 as u64), 0); assert_eq!(cttz_nonzero(-1i64), 0);
+ assert_eq!(cttz_nonzero(-1i128 as u128), 0); assert_eq!(cttz_nonzero(-1i128), 0);
assert_eq!(cttz_nonzero(1u8), 0); assert_eq!(cttz_nonzero(1i8), 0);
assert_eq!(cttz_nonzero(1u16), 0); assert_eq!(cttz_nonzero(1i16), 0);
assert_eq!(cttz_nonzero(1u32), 0); assert_eq!(cttz_nonzero(1i32), 0);
assert_eq!(cttz_nonzero(1u64), 0); assert_eq!(cttz_nonzero(1i64), 0);
+ assert_eq!(cttz_nonzero(1u128), 0); assert_eq!(cttz_nonzero(1i128), 0);
assert_eq!(cttz_nonzero(10u8), 1); assert_eq!(cttz_nonzero(10i8), 1);
assert_eq!(cttz_nonzero(10u16), 1); assert_eq!(cttz_nonzero(10i16), 1);
assert_eq!(cttz_nonzero(10u32), 1); assert_eq!(cttz_nonzero(10i32), 1);
assert_eq!(cttz_nonzero(10u64), 1); assert_eq!(cttz_nonzero(10i64), 1);
+ assert_eq!(cttz_nonzero(10u128), 1); assert_eq!(cttz_nonzero(10i128), 1);
assert_eq!(cttz_nonzero(100u8), 2); assert_eq!(cttz_nonzero(100i8), 2);
assert_eq!(cttz_nonzero(100u16), 2); assert_eq!(cttz_nonzero(100i16), 2);
assert_eq!(cttz_nonzero(100u32), 2); assert_eq!(cttz_nonzero(100i32), 2);
assert_eq!(cttz_nonzero(100u64), 2); assert_eq!(cttz_nonzero(100i64), 2);
+ assert_eq!(cttz_nonzero(100u128), 2); assert_eq!(cttz_nonzero(100i128), 2);
assert_eq!(bswap(0x0Au8), 0x0A); // no-op
assert_eq!(bswap(0x0Ai8), 0x0A); // no-op
assert_eq!(bswap(0x0ABBCC0Di32), 0x0DCCBB0A);
assert_eq!(bswap(0x0122334455667708u64), 0x0877665544332201);
assert_eq!(bswap(0x0122334455667708i64), 0x0877665544332201);
+ assert_eq!(bswap(0x0122334455667708u128), 0x08776655443322010000000000000000);
+ assert_eq!(bswap(0x0122334455667708i128), 0x08776655443322010000000000000000);
+
+ assert_eq!(bitreverse(0x0Au8), 0x50);
+ assert_eq!(bitreverse(0x0Ai8), 0x50);
+ assert_eq!(bitreverse(0x0A0Cu16), 0x3050);
+ assert_eq!(bitreverse(0x0A0Ci16), 0x3050);
+ assert_eq!(bitreverse(0x0ABBCC0Eu32), 0x7033DD50);
+ assert_eq!(bitreverse(0x0ABBCC0Ei32), 0x7033DD50);
+ assert_eq!(bitreverse(0x0122334455667708u64), 0x10EE66AA22CC4480);
+ assert_eq!(bitreverse(0x0122334455667708i64), 0x10EE66AA22CC4480);
+ assert_eq!(bitreverse(0x0122334455667708u128), 0x10EE66AA22CC44800000000000000000);
+ assert_eq!(bitreverse(0x0122334455667708i128), 0x10EE66AA22CC44800000000000000000);
}
}
// except according to those terms.
// ignore-windows
+// ignore-wasm32-bare no libs to link
#![feature(link_args)]
// `FusedIterator` in std but I was not able to isolate that into an
// external crate.
-#![feature(fused)]
use std::iter::FusedIterator;
struct Thing<'a>(&'a str);
use std::process::ExitCode;
fn main() -> ExitCode {
- ExitCode(0)
+ ExitCode::SUCCESS
}
fn main() {
const XYZ: char = 0x1F888 as char;
- //~^ ERROR only u8 can be casted into char
+ //~^ ERROR only u8 can be cast into char
const XY: char = 129160 as char;
- //~^ ERROR only u8 can be casted into char
+ //~^ ERROR only u8 can be cast into char
const ZYX: char = '\u{01F888}';
println!("{}", XYZ);
}
-error: only u8 can be casted into char
+error: only u8 can be cast into char
--> $DIR/cast_char.rs:14:23
|
LL | const XYZ: char = 0x1F888 as char;
LL | #![deny(overflowing_literals)]
| ^^^^^^^^^^^^^^^^^^^^
-error: only u8 can be casted into char
+error: only u8 can be cast into char
--> $DIR/cast_char.rs:16:22
|
LL | const XY: char = 129160 as char;
+++ /dev/null
-// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-#![wasm_import_memory] //~ ERROR: currently unstable
-
-fn main() {}
-
+++ /dev/null
-error[E0658]: wasm_import_memory attribute is currently unstable
- --> $DIR/feature-gate-wasm_import_memory.rs:11:1
- |
-LL | #![wasm_import_memory] //~ ERROR: currently unstable
- | ^^^^^^^^^^^^^^^^^^^^^^
- |
- = help: add #![feature(wasm_import_memory)] to the crate attributes to enable
-
-error: aborting due to previous error
-
-If you want more information on this error, try using "rustc --explain E0658"
-error[E0277]: the trait bound `std::vec::Vec<{integer}>: std::ops::Index<i32>` is not satisfied
+error[E0277]: the trait bound `i32: std::slice::SliceIndex<[{integer}]>` is not satisfied
--> $DIR/index-help.rs:13:5
|
LL | x[0i32]; //~ ERROR E0277
- | ^^^^^^^ vector indices are of type `usize` or ranges of `usize`
+ | ^^^^^^^ slice indices are of type `usize` or ranges of `usize`
|
- = help: the trait `std::ops::Index<i32>` is not implemented for `std::vec::Vec<{integer}>`
+ = help: the trait `std::slice::SliceIndex<[{integer}]>` is not implemented for `i32`
+ = note: required because of the requirements on the impl of `std::ops::Index<i32>` for `std::vec::Vec<{integer}>`
error: aborting due to previous error
| ---------- ^ --------- interpreted as generic arguments
| | |
| | not interpreted as comparison
- | help: try comparing the casted value: `(a as usize)`
+ | help: try comparing the cast value: `(a as usize)`
error: `<` is interpreted as a start of generic arguments for `usize`, not a comparison
--> $DIR/issue-22644.rs:17:33
| ---------- ^ -------------------- interpreted as generic arguments
| | |
| | not interpreted as comparison
- | help: try comparing the casted value: `(a as usize)`
+ | help: try comparing the cast value: `(a as usize)`
error: `<` is interpreted as a start of generic arguments for `usize`, not a comparison
--> $DIR/issue-22644.rs:19:31
| ---------- ^ - interpreted as generic arguments
| | |
| | not interpreted as comparison
- | help: try comparing the casted value: `(a as usize)`
+ | help: try comparing the cast value: `(a as usize)`
error: `<` is interpreted as a start of generic arguments for `usize`, not a comparison
--> $DIR/issue-22644.rs:21:31
| -------- ^ -------------------- interpreted as generic arguments
| | |
| | not interpreted as comparison
- | help: try comparing the casted value: `(a: usize)`
+ | help: try comparing the cast value: `(a: usize)`
error: `<` is interpreted as a start of generic arguments for `usize`, not a comparison
--> $DIR/issue-22644.rs:23:29
| -------- ^ - interpreted as generic arguments
| | |
| | not interpreted as comparison
- | help: try comparing the casted value: `(a: usize)`
+ | help: try comparing the cast value: `(a: usize)`
error: `<` is interpreted as a start of generic arguments for `usize`, not a comparison
--> $DIR/issue-22644.rs:28:20
| ^ not interpreted as comparison
LL | 4);
| - interpreted as generic arguments
-help: try comparing the casted value
+help: try comparing the cast value
|
LL | println!("{}", (a
LL | as
| ^ not interpreted as comparison
LL | 5);
| - interpreted as generic arguments
-help: try comparing the casted value
+help: try comparing the cast value
|
LL | println!("{}", (a
LL |
| ---------- ^^ --------- interpreted as generic arguments
| | |
| | not interpreted as shift
- | help: try shifting the casted value: `(a as usize)`
+ | help: try shifting the cast value: `(a as usize)`
error: expected type, found `4`
--> $DIR/issue-22644.rs:42:28
| --------- ^ - interpreted as generic arguments
| | |
| | not interpreted as comparison
- | help: try comparing the casted value: `($i as u32)`
+ | help: try comparing the cast value: `($i as u32)`
...
LL | is_plainly_printable!(c);
| ------------------------- in this macro invocation
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// must-compile-successfully
+
+#![feature(i128_type)]
+
+fn main() {
+ let error = 255i8; //~WARNING literal out of range for i8
+
+ let ok = 0b1000_0001; // should be ok -> i32
+ let ok = 0b0111_1111i8; // should be ok -> 127i8
+
+ let fail = 0b1000_0001i8; //~WARNING literal out of range for i8
+
+ let fail = 0x8000_0000_0000_0000i64; //~WARNING literal out of range for i64
+
+ let fail = 0x1_FFFF_FFFFu32; //~WARNING literal out of range for u32
+
+ let fail: i128 = 0x8000_0000_0000_0000_0000_0000_0000_0000;
+ //~^ WARNING literal out of range for i128
+
+ let fail = 0x8FFF_FFFF_FFFF_FFFE; //~WARNING literal out of range for i32
+
+ let fail = -0b1111_1111i8; //~WARNING literal out of range for i8
+}
--- /dev/null
+warning: literal out of range for i8
+ --> $DIR/type-overflow.rs:16:17
+ |
+LL | let error = 255i8; //~WARNING literal out of range for i8
+ | ^^^^^
+ |
+ = note: #[warn(overflowing_literals)] on by default
+
+warning: literal out of range for i8
+ --> $DIR/type-overflow.rs:21:16
+ |
+LL | let fail = 0b1000_0001i8; //~WARNING literal out of range for i8
+ | ^^^^^^^^^^^^^ help: consider using `u8` instead: `0b1000_0001u8`
+ |
+ = note: the literal `0b1000_0001i8` (decimal `129`) does not fit into an `i8` and will become `-127i8`
+
+warning: literal out of range for i64
+ --> $DIR/type-overflow.rs:23:16
+ |
+LL | let fail = 0x8000_0000_0000_0000i64; //~WARNING literal out of range for i64
+ | ^^^^^^^^^^^^^^^^^^^^^^^^ help: consider using `u64` instead: `0x8000_0000_0000_0000u64`
+ |
+ = note: the literal `0x8000_0000_0000_0000i64` (decimal `9223372036854775808`) does not fit into an `i64` and will become `-9223372036854775808i64`
+
+warning: literal out of range for u32
+ --> $DIR/type-overflow.rs:25:16
+ |
+LL | let fail = 0x1_FFFF_FFFFu32; //~WARNING literal out of range for u32
+ | ^^^^^^^^^^^^^^^^ help: consider using `u64` instead: `0x1_FFFF_FFFFu64`
+ |
+ = note: the literal `0x1_FFFF_FFFFu32` (decimal `8589934591`) does not fit into an `u32` and will become `4294967295u32`
+
+warning: literal out of range for i128
+ --> $DIR/type-overflow.rs:27:22
+ |
+LL | let fail: i128 = 0x8000_0000_0000_0000_0000_0000_0000_0000;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: the literal `0x8000_0000_0000_0000_0000_0000_0000_0000` (decimal `170141183460469231731687303715884105728`) does not fit into an `i128` and will become `-170141183460469231731687303715884105728i128`
+ = help: consider using `u128` instead
+
+warning: literal out of range for i32
+ --> $DIR/type-overflow.rs:30:16
+ |
+LL | let fail = 0x8FFF_FFFF_FFFF_FFFE; //~WARNING literal out of range for i32
+ | ^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: the literal `0x8FFF_FFFF_FFFF_FFFE` (decimal `10376293541461622782`) does not fit into an `i32` and will become `-2i32`
+ = help: consider using `i128` instead
+
+warning: literal out of range for i8
+ --> $DIR/type-overflow.rs:32:17
+ |
+LL | let fail = -0b1111_1111i8; //~WARNING literal out of range for i8
+ | ^^^^^^^^^^^^^ help: consider using `i16` instead: `0b1111_1111i16`
+ |
+ = note: the literal `0b1111_1111i8` (decimal `255`) does not fit into an `i8` and will become `-1i8`
+
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// compile-flags: -Znll-dump-cause
+
+#![feature(nll)]
+
+fn gimme(x: &(u32,)) -> &u32 {
+ &x.0
+}
+
+fn main() {
+ let x = gimme({
+ let v = (22,);
+ &v
+ //~^ ERROR `v` does not live long enough [E0597]
+ });
+ println!("{:?}", x);
+}
--- /dev/null
+error[E0597]: `v` does not live long enough
+ --> $DIR/borrowed-local-error.rs:22:9
+ |
+LL | let x = gimme({
+ | _____________-
+LL | | let v = (22,);
+LL | | &v
+ | | ^^ borrowed value does not live long enough
+LL | | //~^ ERROR `v` does not live long enough [E0597]
+LL | | });
+ | |_____-- borrow later used here
+ | |
+ | borrowed value only lives until here
+
+error: aborting due to previous error
+
+If you want more information on this error, try using "rustc --explain E0597"
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// compile-flags: -Znll-dump-cause
+
+#![feature(nll)]
+
+fn gimme(x: &(u32,)) -> &u32 {
+ &x.0
+}
+
+fn main() {
+ let x = gimme({
+ let v = 22;
+ &(v,)
+ //~^ ERROR borrowed value does not live long enough [E0597]
+ });
+ println!("{:?}", x);
+}
--- /dev/null
+error[E0597]: borrowed value does not live long enough
+ --> $DIR/borrowed-temporary-error.rs:22:10
+ |
+LL | &(v,)
+ | ^^^^ temporary value does not live long enough
+LL | //~^ ERROR borrowed value does not live long enough [E0597]
+LL | });
+ | - temporary value only lives until here
+LL | println!("{:?}", x);
+ | - borrow later used here
+
+error: aborting due to previous error
+
+If you want more information on this error, try using "rustc --explain E0597"
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// compile-flags: -Znll-dump-cause
+
+#![feature(nll)]
+#![allow(warnings)]
+
+fn foo<'a>(x: &'a (u32,)) -> &'a u32 {
+ let v = 22;
+ &v
+ //~^ ERROR `v` does not live long enough [E0597]
+}
+
+fn main() {}
--- /dev/null
+error[E0597]: `v` does not live long enough
+ --> $DIR/borrowed-universal-error-2.rs:18:5
+ |
+LL | &v
+ | ^^ borrowed value does not live long enough
+LL | //~^ ERROR `v` does not live long enough [E0597]
+LL | }
+ | - borrowed value only lives until here
+ |
+note: borrowed value must be valid for the lifetime 'a as defined on the function body at 16:1...
+ --> $DIR/borrowed-universal-error-2.rs:16:1
+ |
+LL | fn foo<'a>(x: &'a (u32,)) -> &'a u32 {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to previous error
+
+If you want more information on this error, try using "rustc --explain E0597"
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// compile-flags: -Znll-dump-cause
+
+#![feature(nll)]
+#![allow(warnings)]
+
+fn gimme(x: &(u32,)) -> &u32 {
+ &x.0
+}
+
+fn foo<'a>(x: &'a (u32,)) -> &'a u32 {
+ let v = 22;
+ gimme(&(v,))
+ //~^ ERROR borrowed value does not live long enough [E0597]
+}
+
+fn main() {}
--- /dev/null
+error[E0597]: borrowed value does not live long enough
+ --> $DIR/borrowed-universal-error.rs:22:12
+ |
+LL | gimme(&(v,))
+ | ^^^^ temporary value does not live long enough
+LL | //~^ ERROR borrowed value does not live long enough [E0597]
+LL | }
+ | - temporary value only lives until here
+ |
+note: borrowed value must be valid for the lifetime 'a as defined on the function body at 20:1...
+ --> $DIR/borrowed-universal-error.rs:20:1
+ |
+LL | fn foo<'a>(x: &'a (u32,)) -> &'a u32 {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to previous error
+
+If you want more information on this error, try using "rustc --explain E0597"
LL |
LL | deref(p);
| - borrow later used here
- |
- = note: borrowed value must be valid for lifetime '_#5r...
error: aborting due to previous error
LL |
LL | deref(p);
| - borrow later used here
- |
- = note: borrowed value must be valid for lifetime '_#6r...
error: aborting due to previous error
LL |
LL | deref(p);
| - borrow later used here
- |
- = note: borrowed value must be valid for lifetime '_#4r...
error: aborting due to previous error
LL |
LL | deref(p);
| - borrow later used here
- |
- = note: borrowed value must be valid for lifetime '_#4r...
error: aborting due to previous error
...
LL | }
| - borrowed value only lives until here
- |
- = note: borrowed value must be valid for lifetime '_#2r...
error: aborting due to 2 previous errors
LL | x
LL | }
| - temporary value only lives until here
- |
- = note: borrowed value must be valid for lifetime '_#2r...
error: aborting due to previous error
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn main() {
+ let x = r##"lol"#;
+ //~^ ERROR unterminated raw string
+}
--- /dev/null
+error: unterminated raw string
+ --> $DIR/raw_string.rs:12:13
+ |
+LL | let x = r##"lol"#;
+ | ^ unterminated raw string
+ |
+ = note: this raw string should be terminated with `"##`
+
[dependencies]
diff = "0.1.10"
-env_logger = { version = "0.4", default-features = false }
+env_logger = { version = "0.5", default-features = false }
filetime = "0.1"
getopts = "0.2"
log = "0.4"
mod read2;
fn main() {
- env_logger::init().unwrap();
+ env_logger::init();
let config = parse_config(env::args().collect());
("mips", "mips"),
("msp430", "msp430"),
("powerpc", "powerpc"),
- ("powerpc64", "powerpc64"),
("s390x", "s390x"),
("sparc", "sparc"),
("x86_64", "x86_64"),
file.ends_with("util/struct.ThinVec.html") ||
file.ends_with("util/struct.RcSlice.html") ||
file.ends_with("layout/struct.TyLayout.html") ||
+ file.ends_with("humantime/struct.Timestamp.html") ||
+ file.ends_with("log/index.html") ||
file.ends_with("ty/struct.Slice.html") ||
file.ends_with("ty/enum.Attributes.html") ||
file.ends_with("ty/struct.SymbolName.html") {
--- /dev/null
+Subproject commit b87873eaceb75cf9342d5273f01ba2c020f61ca8
-Subproject commit b55e0fc77590cf5d23a01dedeb2104d8cbb48efc
+Subproject commit 118e078c5badd520d18b92813fd88789c8d341ab
name = "tidy"
version = "0.1.0"
authors = ["Alex Crichton <alex@alexcrichton.com>"]
+
+[dependencies]
+serde = "1.0.8"
+serde_derive = "1.0.8"
+serde_json = "1.0.2"
//! Check license of third-party deps by inspecting src/vendor
+use std::collections::{BTreeSet, HashSet};
use std::fs::File;
use std::io::Read;
use std::path::Path;
+use std::process::Command;
+
+use serde_json;
static LICENSES: &'static [&'static str] = &[
"MIT/Apache-2.0",
"Unlicense/MIT",
];
-// These are exceptions to Rust's permissive licensing policy, and
-// should be considered bugs. Exceptions are only allowed in Rust
-// tooling. It is _crucial_ that no exception crates be dependencies
-// of the Rust runtime (std / test).
+/// These are exceptions to Rust's permissive licensing policy, and
+/// should be considered bugs. Exceptions are only allowed in Rust
+/// tooling. It is _crucial_ that no exception crates be dependencies
+/// of the Rust runtime (std / test).
static EXCEPTIONS: &'static [&'static str] = &[
- "mdbook", // MPL2, mdbook
- "openssl", // BSD+advertising clause, cargo, mdbook
- "pest", // MPL2, mdbook via handlebars
- "thread-id", // Apache-2.0, mdbook
- "toml-query", // MPL-2.0, mdbook
- "is-match", // MPL-2.0, mdbook
- "cssparser", // MPL-2.0, rustdoc
- "smallvec", // MPL-2.0, rustdoc
+ "mdbook", // MPL2, mdbook
+ "openssl", // BSD+advertising clause, cargo, mdbook
+ "pest", // MPL2, mdbook via handlebars
+ "thread-id", // Apache-2.0, mdbook
+ "toml-query", // MPL-2.0, mdbook
+ "is-match", // MPL-2.0, mdbook
+ "cssparser", // MPL-2.0, rustdoc
+ "smallvec", // MPL-2.0, rustdoc
"fuchsia-zircon-sys", // BSD-3-Clause, rustdoc, rustc, cargo
- "fuchsia-zircon", // BSD-3-Clause, rustdoc, rustc, cargo (jobserver & tempdir)
- "cssparser-macros", // MPL-2.0, rustdoc
- "selectors", // MPL-2.0, rustdoc
- "clippy_lints", // MPL-2.0 rls
+ "fuchsia-zircon", // BSD-3-Clause, rustdoc, rustc, cargo (jobserver & tempdir)
+ "cssparser-macros", // MPL-2.0, rustdoc
+ "selectors", // MPL-2.0, rustdoc
+ "clippy_lints", // MPL-2.0 rls
+];
+
+/// Which crates to check against the whitelist?
+static WHITELIST_CRATES: &'static [CrateVersion] = &[
+ CrateVersion("rustc", "0.0.0"),
+ CrateVersion("rustc_trans", "0.0.0"),
];
+/// Whitelist of crates rustc is allowed to depend on. Avoid adding to the list if possible.
+static WHITELIST: &'static [Crate] = &[
+ Crate("aho-corasick"),
+ Crate("ar"),
+ Crate("atty"),
+ Crate("backtrace"),
+ Crate("backtrace-sys"),
+ Crate("bitflags"),
+ Crate("byteorder"),
+ Crate("cc"),
+ Crate("cfg-if"),
+ Crate("cmake"),
+ Crate("ena"),
+ Crate("env_logger"),
+ Crate("filetime"),
+ Crate("flate2"),
+ Crate("fuchsia-zircon"),
+ Crate("fuchsia-zircon-sys"),
+ Crate("humantime"),
+ Crate("jobserver"),
+ Crate("kernel32-sys"),
+ Crate("lazy_static"),
+ Crate("libc"),
+ Crate("log"),
+ Crate("log_settings"),
+ Crate("memchr"),
+ Crate("miniz-sys"),
+ Crate("num_cpus"),
+ Crate("owning_ref"),
+ Crate("parking_lot"),
+ Crate("parking_lot_core"),
+ Crate("quick-error"),
+ Crate("rand"),
+ Crate("redox_syscall"),
+ Crate("redox_termios"),
+ Crate("regex"),
+ Crate("regex-syntax"),
+ Crate("rustc-demangle"),
+ Crate("smallvec"),
+ Crate("stable_deref_trait"),
+ Crate("tempdir"),
+ Crate("termcolor"),
+ Crate("terminon"),
+ Crate("termion"),
+ Crate("thread_local"),
+ Crate("unicode-width"),
+ Crate("unreachable"),
+ Crate("utf8-ranges"),
+ Crate("void"),
+ Crate("winapi"),
+ Crate("winapi-build"),
+ Crate("winapi-i686-pc-windows-gnu"),
+ Crate("winapi-x86_64-pc-windows-gnu"),
+ Crate("wincolor"),
+];
+
+// Some types for Serde to deserialize the output of `cargo metadata` to...
+
+#[derive(Deserialize)]
+struct Output {
+ resolve: Resolve,
+}
+
+#[derive(Deserialize)]
+struct Resolve {
+ nodes: Vec<ResolveNode>,
+}
+
+#[derive(Deserialize)]
+struct ResolveNode {
+ id: String,
+ dependencies: Vec<String>,
+}
+
+/// A unique identifier for a crate
+#[derive(Copy, Clone, PartialOrd, Ord, PartialEq, Eq, Debug, Hash)]
+struct Crate<'a>(&'a str); // (name,)
+
+#[derive(Copy, Clone, PartialOrd, Ord, PartialEq, Eq, Debug, Hash)]
+struct CrateVersion<'a>(&'a str, &'a str); // (name, version)
+
+impl<'a> Crate<'a> {
+ pub fn id_str(&self) -> String {
+ format!("{} ", self.0)
+ }
+}
+
+impl<'a> CrateVersion<'a> {
+ /// Returns the struct and whether or not the dep is in-tree
+ pub fn from_str(s: &'a str) -> (Self, bool) {
+ let mut parts = s.split(" ");
+ let name = parts.next().unwrap();
+ let version = parts.next().unwrap();
+ let path = parts.next().unwrap();
+
+ let is_path_dep = path.starts_with("(path+");
+
+ (CrateVersion(name, version), is_path_dep)
+ }
+
+ pub fn id_str(&self) -> String {
+ format!("{} {}", self.0, self.1)
+ }
+}
+
+impl<'a> From<CrateVersion<'a>> for Crate<'a> {
+ fn from(cv: CrateVersion<'a>) -> Crate<'a> {
+ Crate(cv.0)
+ }
+}
+
+/// Checks the dependency at the given path. Changes `bad` to `true` if a check failed.
+///
+/// Specifically, this checks that the license is correct.
pub fn check(path: &Path, bad: &mut bool) {
+ // Check licences
let path = path.join("vendor");
assert!(path.exists(), "vendor directory missing");
let mut saw_dir = false;
- 'next_path: for dir in t!(path.read_dir()) {
+ for dir in t!(path.read_dir()) {
saw_dir = true;
let dir = t!(dir);
// skip our exceptions
- for exception in EXCEPTIONS {
- if dir.path()
+ if EXCEPTIONS.iter().any(|exception| {
+ dir.path()
.to_str()
.unwrap()
- .contains(&format!("src/vendor/{}", exception)) {
- continue 'next_path;
- }
+ .contains(&format!("src/vendor/{}", exception))
+ }) {
+ continue;
}
let toml = dir.path().join("Cargo.toml");
- if !check_license(&toml) {
- *bad = true;
- }
+ *bad = *bad || !check_license(&toml);
}
assert!(saw_dir, "no vendored source");
}
+/// Checks the dependency of WHITELIST_CRATES at the given path. Changes `bad` to `true` if a check
+/// failed.
+///
+/// Specifically, this checks that the dependencies are on the WHITELIST.
+pub fn check_whitelist(path: &Path, cargo: &Path, bad: &mut bool) {
+ // Get dependencies from cargo metadata
+ let resolve = get_deps(path, cargo);
+
+ // Get the whitelist into a convenient form
+ let whitelist: HashSet<_> = WHITELIST.iter().cloned().collect();
+
+ // Check dependencies
+ let mut visited = BTreeSet::new();
+ let mut unapproved = BTreeSet::new();
+ for &krate in WHITELIST_CRATES.iter() {
+ let mut bad = check_crate_whitelist(&whitelist, &resolve, &mut visited, krate, false);
+ unapproved.append(&mut bad);
+ }
+
+ if unapproved.len() > 0 {
+ println!("Dependencies not on the whitelist:");
+ for dep in unapproved {
+ println!("* {}", dep.id_str());
+ }
+ *bad = true;
+ }
+}
+
fn check_license(path: &Path) -> bool {
if !path.exists() {
panic!("{} does not exist", path.display());
let first_quote = line.find('"');
let last_quote = line.rfind('"');
if let (Some(f), Some(l)) = (first_quote, last_quote) {
- let license = &line[f + 1 .. l];
+ let license = &line[f + 1..l];
license.into()
} else {
"bad-license-parse".into()
}
}
+
+/// Get the dependencies of the crate at the given path using `cargo metadata`.
+fn get_deps(path: &Path, cargo: &Path) -> Resolve {
+ // Run `cargo metadata` to get the set of dependencies
+ let output = Command::new(cargo)
+ .arg("metadata")
+ .arg("--format-version")
+ .arg("1")
+ .arg("--manifest-path")
+ .arg(path.join("Cargo.toml"))
+ .output()
+ .expect("Unable to run `cargo metadata`")
+ .stdout;
+ let output = String::from_utf8_lossy(&output);
+ let output: Output = serde_json::from_str(&output).unwrap();
+
+ output.resolve
+}
+
+/// Checks the dependencies of the given crate from the given cargo metadata to see if they are on
+/// the whitelist. Returns a list of illegal dependencies.
+fn check_crate_whitelist<'a, 'b>(
+ whitelist: &'a HashSet<Crate>,
+ resolve: &'a Resolve,
+ visited: &'b mut BTreeSet<CrateVersion<'a>>,
+ krate: CrateVersion<'a>,
+ must_be_on_whitelist: bool,
+) -> BTreeSet<Crate<'a>> {
+ // Will contain bad deps
+ let mut unapproved = BTreeSet::new();
+
+ // Check if we have already visited this crate
+ if visited.contains(&krate) {
+ return unapproved;
+ }
+
+ visited.insert(krate);
+
+ // If this path is in-tree, we don't require it to be on the whitelist
+ if must_be_on_whitelist {
+ // If this dependency is not on the WHITELIST, add to bad set
+ if !whitelist.contains(&krate.into()) {
+ unapproved.insert(krate.into());
+ }
+ }
+
+ // Do a DFS in the crate graph (it's a DAG, so we know we have no cycles!)
+ let to_check = resolve
+ .nodes
+ .iter()
+ .find(|n| n.id.starts_with(&krate.id_str()))
+ .expect("crate does not exist");
+
+ for dep in to_check.dependencies.iter() {
+ let (krate, is_path_dep) = CrateVersion::from_str(dep);
+
+ let mut bad = check_crate_whitelist(whitelist, resolve, visited, krate, !is_path_dep);
+ unapproved.append(&mut bad);
+ }
+
+ unapproved
+}
#![deny(warnings)]
+extern crate serde;
+extern crate serde_json;
+#[macro_use]
+extern crate serde_derive;
+
use std::fs;
use std::path::Path;
fn filter_dirs(path: &Path) -> bool {
let skip = [
- "src/binaryen",
"src/dlmalloc",
"src/jemalloc",
"src/llvm",
"src/tools/rust-installer",
"src/tools/rustfmt",
"src/tools/miri",
+ "src/tools/lld",
"src/librustc/mir/interpret",
"src/librustc_mir/interpret",
"src/target",
+ "src/stdsimd",
];
skip.iter().any(|p| path.ends_with(p))
}
use std::env;
fn main() {
- let path = env::args_os().skip(1).next().expect("need an argument");
+ let path = env::args_os().skip(1).next().expect("need path to src");
let path = PathBuf::from(path);
+ let cargo = env::args_os().skip(2).next().expect("need path to cargo");
+ let cargo = PathBuf::from(cargo);
+
let args: Vec<String> = env::args().skip(1).collect();
let mut bad = false;
if !args.iter().any(|s| *s == "--no-vendor") {
deps::check(&path, &mut bad);
}
+ deps::check_whitelist(&path, &cargo, &mut bad);
if bad {
eprintln!("some tidy checks failed");