version.texi
.cargo
!src/vendor/**
+/src/target/
-language: minimal
+language: generic
sudo: required
dist: trusty
services:
- env: IMAGE=dist-powerpc64le-linux DEPLOY=1
- env: IMAGE=dist-s390x-linux DEPLOY=1
- env: IMAGE=dist-x86_64-freebsd DEPLOY=1
- - env: IMAGE=dist-x86_64-linux DEPLOY=1
+ - env: IMAGE=dist-x86_64-linux DEPLOY=1 ALLOW_TRY=1
- env: IMAGE=dist-x86_64-musl DEPLOY=1
- env: IMAGE=dist-x86_64-netbsd DEPLOY=1
- env: IMAGE=emscripten
echo "#### Disk usage before running script:";
df -h;
du . | sort -nr | head -n100
-
-script:
+ # If we are building a pull request, do the build if $ALLOW_PR == 1
+ # Otherwise, do the build if we are on the auto branch, or the try branch and $ALLOW_TRY == 1
- >
- if [ "$ALLOW_PR" = "" ] && [ "$TRAVIS_BRANCH" != "auto" ]; then
- echo skipping, not a full build
+ if [[ "$TRAVIS_PULL_REQUEST" != "false" ]]; then
+ if [[ "$ALLOW_PR" == "1" ]]; then
+ SKIP_BUILD=false;
+ else
+ SKIP_BUILD=true;
+ fi
+ elif [[ "$TRAVIS_BRANCH" == "auto" || ( "$ALLOW_TRY" == "1" && "$TRAVIS_BRANCH" == "try" ) ]]; then
+ SKIP_BUILD=false;
+ else
+ SKIP_BUILD=true;
+ fi
+
+ if [[ "$SKIP_BUILD" == true ]]; then
+ export RUN_SCRIPT="echo 'skipping, not a full build'";
else
- stamp src/ci/init_repo.sh . "$HOME/rustsrc" &&
+ RUN_SCRIPT="stamp src/ci/init_repo.sh . $HOME/rustsrc";
if [ "$TRAVIS_OS_NAME" = "osx" ]; then
- stamp src/ci/run.sh;
+ export RUN_SCRIPT="$RUN_SCRIPT && stamp src/ci/run.sh";
else
- stamp src/ci/docker/run.sh $IMAGE;
+ export RUN_SCRIPT="$RUN_SCRIPT && stamp src/ci/docker/run.sh $IMAGE";
fi
fi
+script:
+ - sh -x -c "$RUN_SCRIPT"
+
after_success:
- >
echo "#### Build successful; Disk usage after running script:";
branch: auto
condition: $DEPLOY = 1
+ - provider: s3
+ bucket: rust-lang-ci
+ skip_cleanup: true
+ local_dir: deploy
+ upload_dir: rustc-builds-try
+ acl: public_read
+ region: us-east-1
+ access_key_id: AKIAIPQVNYF2T3DTYIWQ
+ secret_access_key:
+ secure: "FBqDqOTeIPMu6v/WYPf4CFSlh9rLRZGKVtpLa5KkyuOhXRTrnEzBduEtS8/FMIxdQImvurhSvxWvqRybMOi4qoVfjMqqpHAI7uBbidbrvAcJoHNsx6BgUNVCIoH6a0UsAjTUtm6/YPIpzbHoLZXPL0GrHPMk6Mu04qVSmcYNWn4="
+ on:
+ branch: try
+ condition: $DEPLOY = 1 && $ALLOW_TRY = 1
+
# this is the same as the above deployment provider except that it uploads to
# a slightly different directory and has a different trigger
- provider: s3
+Version 1.18.0 (2017-06-08)
+===========================
+
+Language
+--------
+
+- [Stabilize pub(restricted)][40556] `pub` can now accept a module path to
+ make the item visible to just that module tree. Also accepts the keyword
+ `crate` to make something public to the whole crate but not users of the
+ library. Example: `pub(crate) mod utils;`. [RFC 1422].
+- [Stabilize `#![windows_subsystem]` attribute][40870] conservative exposure of the
+ `/SUBSYSTEM` linker flag on Windows platforms. [RFC 1665].
+- [Refactor of trait object type parsing][40043] Now `ty` in macros can accept
+ types like `Write + Send`, trailing `+` are now supported in trait objects,
+ and better error reporting for trait objects starting with `?Sized`.
+- [0e+10 is now a valid floating point literal][40589]
+- [Now warns if you bind a lifetime parameter to 'static][40734]
+- [Tuples, Enum variant fields, and structs with no `repr` attribute or with
+ `#[repr(Rust)]` are reordered to minimize padding and produce a smaller
+ representation in some cases.][40377]
+
+Compiler
+--------
+
+- [rustc can now emit mir with `--emit mir`][39891]
+- [Improved LLVM IR for trivial functions][40367]
+- [Added explanation for E0090(Wrong number of lifetimes are supplied)][40723]
+- [rustc compilation is now 15%-20% faster][41469] Thanks to optimisation
+ opportunities found through profiling
+- [Improved backtrace formatting when panicking][38165]
+
+Libraries
+---------
+
+- [Specialized `Vec::from_iter` being passed `vec::IntoIter`][40731] if the
+ iterator hasn't been advanced the original `Vec` is reassembled with no actual
+ iteration or reallocation.
+- [Simplified HashMap Bucket interface][40561] provides performance
+ improvements for iterating and cloning.
+- [Specialize Vec::from_elem to use calloc][40409]
+- [Fixed Race condition in fs::create_dir_all][39799]
+- [No longer caching stdio on Windows][40516]
+- [Optimized insertion sort in slice][40807] insertion sort in some cases
+ 2.50%~ faster and in one case now 12.50% faster.
+- [Optimized `AtomicBool::fetch_nand`][41143]
+
+Stabilized APIs
+---------------
+
+- [`Child::try_wait`]
+- [`HashMap::retain`]
+- [`HashSet::retain`]
+- [`PeekMut::pop`]
+- [`TcpStream::peek`]
+- [`UdpSocket::peek`]
+- [`UdpSocket::peek_from`]
+
+Cargo
+-----
+
+- [Added partial Pijul support][cargo/3842] Pijul is a version control system in Rust.
+ You can now create new cargo projects with Pijul using `cargo new --vcs pijul`
+- [Now always emits build script warnings for crates that fail to build][cargo/3847]
+- [Added Android build support][cargo/3885]
+- [Added `--bins` and `--tests` flags][cargo/3901] now you can build all programs
+ of a certain type, for example `cargo build --bins` will build all
+ binaries.
+- [Added support for haiku][cargo/3952]
+
+Misc
+----
+
+- [rustdoc can now use pulldown-cmark with the `--enable-commonmark` flag][40338]
+- [Added rust-winbg script for better debugging on Windows][39983]
+- [Rust now uses the official cross compiler for NetBSD][40612]
+- [rustdoc now accepts `#` at the start of files][40828]
+- [Fixed jemalloc support for musl][41168]
+
+Compatibility Notes
+-------------------
+
+- [Changes to how the `0` flag works in format!][40241] Padding zeroes are now
+ always placed after the sign if it exists and before the digits. With the `#`
+ flag the zeroes are placed after the prefix and before the digits.
+- [Due to the struct field optimisation][40377], using `transmute` on structs
+ that have no `repr` attribute or `#[repr(Rust)]` will no longer work. This has
+ always been undefined behavior, but is now more likely to break in practice.
+- [The refactor of trait object type parsing][40043] fixed a bug where `+` was
+ receiving the wrong priority parsing things like `&for<'a> Tr<'a> + Send` as
+ `&(for<'a> Tr<'a> + Send)` instead of `(&for<'a> Tr<'a>) + Send`
+- [Overlapping inherent `impl`s are now a hard error][40728]
+- [`PartialOrd` and `Ord` must agree on the ordering.][41270]
+- [`rustc main.rs -o out --emit=asm,llvm-ir`][41085] Now will output
+ `out.asm` and `out.ll` instead of only one of the filetypes.
+- [ calling a function that returns `Self` will no longer work][41805] when
+ the size of `Self` cannot be statically determined.
+- [rustc now builds with a "pthreads" flavour of MinGW for Windows GNU][40805]
+ this has caused a few regressions namely:
+
+ - Changed the link order of local static/dynamic libraries (respecting the
+ order on given rather than having the compiler reorder).
+ - Changed how MinGW is linked, native code linked to dynamic libraries
+ may require manually linking to the gcc support library (for the native
+ code itself)
+
+[38165]: https://github.com/rust-lang/rust/pull/38165
+[39799]: https://github.com/rust-lang/rust/pull/39799
+[39891]: https://github.com/rust-lang/rust/pull/39891
+[39983]: https://github.com/rust-lang/rust/pull/39983
+[40043]: https://github.com/rust-lang/rust/pull/40043
+[40241]: https://github.com/rust-lang/rust/pull/40241
+[40338]: https://github.com/rust-lang/rust/pull/40338
+[40367]: https://github.com/rust-lang/rust/pull/40367
+[40377]: https://github.com/rust-lang/rust/pull/40377
+[40409]: https://github.com/rust-lang/rust/pull/40409
+[40516]: https://github.com/rust-lang/rust/pull/40516
+[40556]: https://github.com/rust-lang/rust/pull/40556
+[40561]: https://github.com/rust-lang/rust/pull/40561
+[40589]: https://github.com/rust-lang/rust/pull/40589
+[40612]: https://github.com/rust-lang/rust/pull/40612
+[40723]: https://github.com/rust-lang/rust/pull/40723
+[40728]: https://github.com/rust-lang/rust/pull/40728
+[40731]: https://github.com/rust-lang/rust/pull/40731
+[40734]: https://github.com/rust-lang/rust/pull/40734
+[40805]: https://github.com/rust-lang/rust/pull/40805
+[40807]: https://github.com/rust-lang/rust/pull/40807
+[40828]: https://github.com/rust-lang/rust/pull/40828
+[40870]: https://github.com/rust-lang/rust/pull/40870
+[41085]: https://github.com/rust-lang/rust/pull/41085
+[41143]: https://github.com/rust-lang/rust/pull/41143
+[41168]: https://github.com/rust-lang/rust/pull/41168
+[41270]: https://github.com/rust-lang/rust/issues/41270
+[41469]: https://github.com/rust-lang/rust/pull/41469
+[41805]: https://github.com/rust-lang/rust/issues/41805
+[RFC 1422]: https://github.com/rust-lang/rfcs/blob/master/text/1422-pub-restricted.md
+[RFC 1665]: https://github.com/rust-lang/rfcs/blob/master/text/1665-windows-subsystem.md
+[`Child::try_wait`]: https://doc.rust-lang.org/std/process/struct.Child.html#method.try_wait
+[`HashMap::retain`]: https://doc.rust-lang.org/std/collections/struct.HashMap.html#method.retain
+[`HashSet::retain`]: https://doc.rust-lang.org/std/collections/struct.HashSet.html#method.retain
+[`PeekMut::pop`]: https://doc.rust-lang.org/std/collections/binary_heap/struct.PeekMut.html#method.pop
+[`TcpStream::peek`]: https://doc.rust-lang.org/std/net/struct.TcpStream.html#method.peek
+[`UdpSocket::peek_from`]: https://doc.rust-lang.org/std/net/struct.UdpSocket.html#method.peek_from
+[`UdpSocket::peek`]: https://doc.rust-lang.org/std/net/struct.UdpSocket.html#method.peek
+[cargo/3842]: https://github.com/rust-lang/cargo/pull/3842
+[cargo/3847]: https://github.com/rust-lang/cargo/pull/3847
+[cargo/3885]: https://github.com/rust-lang/cargo/pull/3885
+[cargo/3901]: https://github.com/rust-lang/cargo/pull/3901
+[cargo/3952]: https://github.com/rust-lang/cargo/pull/3952
+
+
Version 1.17.0 (2017-04-27)
===========================
dependencies = [
"build_helper 0.1.0",
"core 0.0.0",
- "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)",
+ "gcc 0.3.50 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.0.0",
]
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)",
+ "gcc 0.3.50 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
]
"build_helper 0.1.0",
"cmake 0.1.23 (registry+https://github.com/rust-lang/crates.io-index)",
"filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)",
+ "gcc 0.3.50 (registry+https://github.com/rust-lang/crates.io-index)",
"getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
"num_cpus 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_ignored 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
"shell-escape 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "tar 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tar 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)",
"tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
"term 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
"toml 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "tar 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tar 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)",
"tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
"term 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
"url 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
version = "0.1.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)",
+ "gcc 0.3.50 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
dependencies = [
"build_helper 0.1.0",
"core 0.0.0",
- "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)",
+ "gcc 0.3.50 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
version = "0.3.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)",
+ "gcc 0.3.50 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
"libz-sys 1.0.13 (registry+https://github.com/rust-lang/crates.io-index)",
"openssl-sys 0.9.12 (registry+https://github.com/rust-lang/crates.io-index)",
version = "0.0.0"
dependencies = [
"build_helper 0.1.0",
- "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)",
+ "gcc 0.3.50 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
[[package]]
name = "gcc"
-version = "0.3.46"
+version = "0.3.50"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
"flate2 0.2.19 (registry+https://github.com/rust-lang/crates.io-index)",
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
- "tar 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tar 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)",
"walkdir 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
"xz2 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
dependencies = [
"cmake 0.1.23 (registry+https://github.com/rust-lang/crates.io-index)",
"curl-sys 0.3.11 (registry+https://github.com/rust-lang/crates.io-index)",
- "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)",
+ "gcc 0.3.50 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
"libssh2-sys 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
"libz-sys 1.0.13 (registry+https://github.com/rust-lang/crates.io-index)",
version = "1.0.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)",
+ "gcc 0.3.50 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
"pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
]
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)",
+ "gcc 0.3.50 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
]
version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)",
+ "gcc 0.3.50 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
]
version = "0.9.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)",
+ "gcc 0.3.50 (registry+https://github.com/rust-lang/crates.io-index)",
"gdi32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
"pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
version = "0.0.0"
dependencies = [
"build_helper 0.1.0",
- "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)",
+ "gcc 0.3.50 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc_bitflags 0.0.0",
]
version = "0.0.0"
dependencies = [
"flate 0.0.0",
+ "gcc 0.3.50 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
"owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc 0.0.0",
"arena 0.0.0",
"build_helper 0.1.0",
"env_logger 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)",
+ "gcc 0.3.50 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
"pulldown-cmark 0.0.14 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc 0.0.0",
"collections 0.0.0",
"compiler_builtins 0.0.0",
"core 0.0.0",
- "gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)",
+ "gcc 0.3.50 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.0.0",
"panic_abort 0.0.0",
"panic_unwind 0.0.0",
[[package]]
name = "tar"
-version = "0.4.12"
+version = "0.4.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
"checksum flate2 0.2.19 (registry+https://github.com/rust-lang/crates.io-index)" = "36df0166e856739905cd3d7e0b210fe818592211a008862599845e012d8d304c"
"checksum foreign-types 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3e4056b9bd47f8ac5ba12be771f77a0dae796d1bbaaf5fd0b9c2d38b69b8a29d"
"checksum fs2 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "34edaee07555859dc13ca387e6ae05686bb4d0364c95d649b6dab959511f4baf"
-"checksum gcc 0.3.46 (registry+https://github.com/rust-lang/crates.io-index)" = "181e3cebba1d663bd92eb90e2da787e10597e027eb00de8d742b260a7850948f"
+"checksum gcc 0.3.50 (registry+https://github.com/rust-lang/crates.io-index)" = "5f837c392f2ea61cb1576eac188653df828c861b7137d74ea4a5caa89621f9e6"
"checksum gdi32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0912515a8ff24ba900422ecda800b52f4016a56251922d397c576bf92c690518"
"checksum getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)" = "d9047cfbd08a437050b363d35ef160452c5fe8ea5187ae0a624708c91581d685"
"checksum git2 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)" = "9de9df4358c17e448a778d90cd0272e1dab5eae30244502333fa2001c4e24357"
"checksum syntex_pos 0.58.1 (registry+https://github.com/rust-lang/crates.io-index)" = "13ad4762fe52abc9f4008e85c4fb1b1fe3aa91ccb99ff4826a439c7c598e1047"
"checksum syntex_syntax 0.52.0 (registry+https://github.com/rust-lang/crates.io-index)" = "76a302e717e348aa372ff577791c3832395650073b8d8432f8b3cb170b34afde"
"checksum syntex_syntax 0.58.1 (registry+https://github.com/rust-lang/crates.io-index)" = "6e0e4dbae163dd98989464c23dd503161b338790640e11537686f2ef0f25c791"
-"checksum tar 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)" = "ab0ef9ead2fe0aa9e18475a96a207bfd5143f4124779ef7429503a8665416ce8"
+"checksum tar 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)" = "281285b717926caa919ad905ef89c63d75805c7d89437fb873100925a53f2b1b"
"checksum tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "87974a6f5c1dfb344d733055601650059a3363de2a6104819293baff662132d6"
"checksum term 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "d168af3930b369cfe245132550579d47dfd873d69470755a19c2c6568dbbd989"
"checksum term_size 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "07b6c1ac5b3fffd75073276bca1ceed01f67a28537097a2a9539e116e50fb21a"
toml = "0.1"
getopts = "0.2"
rustc-serialize = "0.3"
-gcc = "0.3.46"
+gcc = "0.3.50"
libc = "0.2"
if let Some(rpath) = rpath {
cmd.arg("-C").arg(format!("link-args={}", rpath));
}
-
- if let Ok(s) = env::var("RUSTFLAGS") {
- for flag in s.split_whitespace() {
- cmd.arg(flag);
- }
- }
}
if target.contains("pc-windows-msvc") {
}
}
+fn try_run(build: &Build, cmd: &mut Command) {
+ if build.flags.cmd.no_fail_fast() {
+ if !build.try_run(cmd) {
+ let failures = build.delayed_failures.get();
+ build.delayed_failures.set(failures + 1);
+ }
+ } else {
+ build.run(cmd);
+ }
+}
+
+fn try_run_quiet(build: &Build, cmd: &mut Command) {
+ if build.flags.cmd.no_fail_fast() {
+ if !build.try_run_quiet(cmd) {
+ let failures = build.delayed_failures.get();
+ build.delayed_failures.set(failures + 1);
+ }
+ } else {
+ build.run_quiet(cmd);
+ }
+}
+
/// Runs the `linkchecker` tool as compiled in `stage` by the `host` compiler.
///
/// This tool in `src/tools` will verify the validity of all our links in the
let compiler = Compiler::new(0, host);
let _time = util::timeit();
- build.run(build.tool_cmd(&compiler, "linkchecker")
- .arg(build.out.join(host).join("doc")));
+ try_run(build, build.tool_cmd(&compiler, "linkchecker")
+ .arg(build.out.join(host).join("doc")));
}
/// Runs the `cargotest` tool as compiled in `stage` by the `host` compiler.
let _time = util::timeit();
let mut cmd = Command::new(build.tool(&Compiler::new(0, host), "cargotest"));
build.prepare_tool_cmd(compiler, &mut cmd);
- build.run(cmd.arg(&build.cargo)
- .arg(&out_dir)
- .env("RUSTC", build.compiler_path(compiler))
- .env("RUSTDOC", build.rustdoc(compiler)))
+ try_run(build, cmd.arg(&build.cargo)
+ .arg(&out_dir)
+ .env("RUSTC", build.compiler_path(compiler))
+ .env("RUSTDOC", build.rustdoc(compiler)));
}
/// Runs `cargo test` for `cargo` packaged with Rust.
let mut cargo = build.cargo(compiler, Mode::Tool, host, "test");
cargo.arg("--manifest-path").arg(build.src.join("src/tools/cargo/Cargo.toml"));
+ if build.flags.cmd.no_fail_fast() {
+ cargo.arg("--no-fail-fast");
+ }
// Don't build tests dynamically, just a pain to work with
cargo.env("RUSTC_NO_PREFER_DYNAMIC", "1");
// available.
cargo.env("CFG_DISABLE_CROSS_TESTS", "1");
- build.run(cargo.env("PATH", newpath));
+ try_run(build, cargo.env("PATH", newpath));
}
/// Runs the `tidy` tool as compiled in `stage` by the `host` compiler.
/// otherwise just implements a few lint-like checks that are specific to the
/// compiler itself.
pub fn tidy(build: &Build, host: &str) {
+ let _folder = build.fold_output(|| "tidy");
println!("tidy check ({})", host);
let compiler = Compiler::new(0, host);
let mut cmd = build.tool_cmd(&compiler, "tidy");
if !build.config.vendor {
cmd.arg("--no-vendor");
}
- build.run(&mut cmd);
+ if build.config.quiet_tests {
+ cmd.arg("--quiet");
+ }
+ try_run(build, &mut cmd);
}
fn testdir(build: &Build, host: &str) -> PathBuf {
target: &str,
mode: &str,
suite: &str) {
+ let _folder = build.fold_output(|| format!("test_{}", suite));
println!("Check compiletest suite={} mode={} ({} -> {})",
suite, mode, compiler.host, target);
let mut cmd = Command::new(build.tool(&Compiler::new(0, compiler.host),
cmd.arg("--android-cross-path").arg("");
}
+ build.ci_env.force_coloring_in_ci(&mut cmd);
+
let _time = util::timeit();
- build.run(&mut cmd);
+ try_run(build, &mut cmd);
}
/// Run `rustdoc --test` for all documentation in `src/doc`.
// tests for all files that end in `*.md`
let mut stack = vec![build.src.join("src/doc")];
let _time = util::timeit();
+ let _folder = build.fold_output(|| "test_docs");
while let Some(p) = stack.pop() {
if p.is_dir() {
/// generate a markdown file from the error indexes of the code base which is
/// then passed to `rustdoc --test`.
pub fn error_index(build: &Build, compiler: &Compiler) {
+ let _folder = build.fold_output(|| "test_error_index");
println!("Testing error-index stage{}", compiler.stage);
let dir = testdir(build, compiler.host);
cmd.arg(markdown);
cmd.env("RUSTC_BOOTSTRAP", "1");
- let mut test_args = build.flags.cmd.test_args().join(" ");
- if build.config.quiet_tests {
- test_args.push_str(" --quiet");
- }
+ let test_args = build.flags.cmd.test_args().join(" ");
cmd.arg("--test-args").arg(test_args);
- build.run(&mut cmd);
+ if build.config.quiet_tests {
+ try_run_quiet(build, &mut cmd);
+ } else {
+ try_run(build, &mut cmd);
+ }
}
/// Run all unit tests plus documentation tests for an entire crate DAG defined
}
_ => panic!("can only test libraries"),
};
+ let _folder = build.fold_output(|| {
+ format!("{}_stage{}-{}", test_kind.subcommand(), compiler.stage, name)
+ });
println!("{} {} stage{} ({} -> {})", test_kind, name, compiler.stage,
compiler.host, target);
cargo.arg("--manifest-path")
.arg(build.src.join(path).join("Cargo.toml"))
.arg("--features").arg(features);
+ if test_kind.subcommand() == "test" && build.flags.cmd.no_fail_fast() {
+ cargo.arg("--no-fail-fast");
+ }
match krate {
Some(krate) => {
krate_remote(build, &compiler, target, mode);
} else {
cargo.args(&build.flags.cmd.test_args());
- build.run(&mut cargo);
+ try_run(build, &mut cargo);
}
}
if build.config.quiet_tests {
cmd.arg("--quiet");
}
- build.run(&mut cmd);
+ try_run(build, &mut cmd);
}
}
cmd.arg("--quiet");
}
cmd.args(&build.flags.cmd.test_args());
- build.run(&mut cmd);
+ try_run(build, &mut cmd);
}
}
.current_dir(build.src.join("src/bootstrap"))
.env("CARGO_TARGET_DIR", build.out.join("bootstrap"))
.env("RUSTC", &build.rustc);
+ if build.flags.cmd.no_fail_fast() {
+ cmd.arg("--no-fail-fast");
+ }
cmd.arg("--").args(&build.flags.cmd.test_args());
- build.run(&mut cmd);
+ try_run(build, &mut cmd);
}
let libdir = build.sysroot_libdir(compiler, target);
t!(fs::create_dir_all(&libdir));
+ let _folder = build.fold_output(|| format!("stage{}-std", compiler.stage));
println!("Building stage{} std artifacts ({} -> {})", compiler.stage,
compiler.host, target);
/// the build using the `compiler` targeting the `target` architecture. The
/// artifacts created will also be linked into the sysroot directory.
pub fn test(build: &Build, target: &str, compiler: &Compiler) {
+ let _folder = build.fold_output(|| format!("stage{}-test", compiler.stage));
println!("Building stage{} test artifacts ({} -> {})", compiler.stage,
compiler.host, target);
let out_dir = build.cargo_out(compiler, Mode::Libtest, target);
/// the `compiler` targeting the `target` architecture. The artifacts
/// created will also be linked into the sysroot directory.
pub fn rustc(build: &Build, target: &str, compiler: &Compiler) {
+ let _folder = build.fold_output(|| format!("stage{}-rustc", compiler.stage));
println!("Building stage{} compiler artifacts ({} -> {})",
compiler.stage, compiler.host, target);
/// This will build the specified tool with the specified `host` compiler in
/// `stage` into the normal cargo output directory.
pub fn tool(build: &Build, stage: u32, target: &str, tool: &str) {
+ let _folder = build.fold_output(|| format!("stage{}-{}", stage, tool));
println!("Building stage{} tool {} ({})", stage, tool, target);
let compiler = Compiler::new(stage, &build.config.build);
Test {
paths: Vec<PathBuf>,
test_args: Vec<String>,
+ no_fail_fast: bool,
},
Bench {
paths: Vec<PathBuf>,
// Some subcommands get extra options
match subcommand.as_str() {
- "test" => { opts.optmulti("", "test-args", "extra arguments", "ARGS"); },
+ "test" => {
+ opts.optflag("", "no-fail-fast", "Run all tests regardless of failure");
+ opts.optmulti("", "test-args", "extra arguments", "ARGS");
+ },
"bench" => { opts.optmulti("", "test-args", "extra arguments", "ARGS"); },
_ => { },
};
Subcommand::Test {
paths: paths,
test_args: matches.opt_strs("test-args"),
+ no_fail_fast: matches.opt_present("no-fail-fast"),
}
}
"bench" => {
_ => Vec::new(),
}
}
+
+ pub fn no_fail_fast(&self) -> bool {
+ match *self {
+ Subcommand::Test { no_fail_fast, .. } => no_fail_fast,
+ _ => false,
+ }
+ }
}
fn split(s: Vec<String>) -> Vec<String> {
#[cfg(unix)]
extern crate libc;
+use std::cell::Cell;
use std::cmp;
use std::collections::HashMap;
use std::env;
use std::path::{PathBuf, Path};
use std::process::Command;
-use build_helper::{run_silent, run_suppressed, output, mtime};
+use build_helper::{run_silent, run_suppressed, try_run_silent, try_run_suppressed, output, mtime};
-use util::{exe, libdir, add_lib_path};
+use util::{exe, libdir, add_lib_path, OutputFolder, CiEnv};
mod cc;
mod channel;
crates: HashMap<String, Crate>,
is_sudo: bool,
src_is_git: bool,
+ ci_env: CiEnv,
+ delayed_failures: Cell<usize>,
}
#[derive(Debug)]
lldb_python_dir: None,
is_sudo: is_sudo,
src_is_git: src_is_git,
+ ci_env: CiEnv::current(),
+ delayed_failures: Cell::new(0),
}
}
if self.config.vendor || self.is_sudo {
cargo.arg("--frozen");
}
+
+ self.ci_env.force_coloring_in_ci(&mut cargo);
+
return cargo
}
run_suppressed(cmd)
}
+ /// Runs a command, printing out nice contextual information if it fails.
+ /// Exits if the command failed to execute at all, otherwise returns its
+ /// `status.success()`.
+ fn try_run(&self, cmd: &mut Command) -> bool {
+ self.verbose(&format!("running: {:?}", cmd));
+ try_run_silent(cmd)
+ }
+
+ /// Runs a command, printing out nice contextual information if it fails.
+ /// Exits if the command failed to execute at all, otherwise returns its
+ /// `status.success()`.
+ fn try_run_quiet(&self, cmd: &mut Command) -> bool {
+ self.verbose(&format!("running: {:?}", cmd));
+ try_run_suppressed(cmd)
+ }
+
/// Prints a message if this build is configured in verbose mode.
fn verbose(&self, msg: &str) {
if self.flags.verbose() || self.config.verbose() {
"nightly" | _ => true,
}
}
+
+ /// Fold the output of the commands after this method into a group. The fold
+ /// ends when the returned object is dropped. Folding can only be used in
+ /// the Travis CI environment.
+ pub fn fold_output<D, F>(&self, name: F) -> Option<OutputFolder>
+ where D: Into<String>, F: FnOnce() -> D
+ {
+ if self.ci_env == CiEnv::Travis {
+ Some(OutputFolder::new(name().into()))
+ } else {
+ None
+ }
+ }
}
impl<'a> Compiler<'a> {
drop(fs::remove_dir_all(&out_dir));
}
+ let _folder = build.fold_output(|| "llvm");
println!("Building LLVM for {}", target);
let _time = util::timeit();
t!(fs::create_dir_all(&out_dir));
configure_compilers(&mut cfg);
if env::var_os("SCCACHE_ERROR_LOG").is_some() {
- cfg.env("RUST_LOG", "sccache=info");
+ cfg.env("RUST_LOG", "sccache=warn");
}
// FIXME: we don't actually need to build all LLVM tools and all LLVM
return
}
+ let _folder = build.fold_output(|| "build_test_helpers");
println!("Building test helpers");
t!(fs::create_dir_all(&dst));
let mut cfg = gcc::Config::new();
use std::collections::{BTreeMap, HashSet, HashMap};
use std::mem;
+use std::process;
use check::{self, TestKind};
use compile;
let (kind, paths) = match self.build.flags.cmd {
Subcommand::Build { ref paths } => (Kind::Build, &paths[..]),
Subcommand::Doc { ref paths } => (Kind::Doc, &paths[..]),
- Subcommand::Test { ref paths, test_args: _ } => (Kind::Test, &paths[..]),
- Subcommand::Bench { ref paths, test_args: _ } => (Kind::Bench, &paths[..]),
+ Subcommand::Test { ref paths, .. } => (Kind::Test, &paths[..]),
+ Subcommand::Bench { ref paths, .. } => (Kind::Bench, &paths[..]),
Subcommand::Dist { ref paths } => (Kind::Dist, &paths[..]),
Subcommand::Install { ref paths } => (Kind::Install, &paths[..]),
Subcommand::Clean => panic!(),
self.build.verbose(&format!("executing step {:?}", step));
(self.rules[step.name].run)(step);
}
+
+ // Check for postponed failures from `test --no-fail-fast`.
+ let failures = self.build.delayed_failures.get();
+ if failures > 0 {
+ println!("\n{} command(s) did not execute successfully.\n", failures);
+ process::exit(1);
+ }
}
/// From the top level targets `steps` generate a topological ordering of
use std::env;
use std::ffi::OsString;
use std::fs;
-use std::io;
+use std::io::{self, Write};
use std::path::{Path, PathBuf};
use std::process::Command;
-use std::time::Instant;
+use std::time::{SystemTime, Instant};
use filetime::{self, FileTime};
}
}
}
+
+/// An RAII structure that indicates all output until this instance is dropped
+/// is part of the same group.
+///
+/// On Travis CI, these output will be folded by default, together with the
+/// elapsed time in this block. This reduces noise from unnecessary logs,
+/// allowing developers to quickly identify the error.
+///
+/// Travis CI supports folding by printing `travis_fold:start:<name>` and
+/// `travis_fold:end:<name>` around the block. Time elapsed is recognized
+/// similarly with `travis_time:[start|end]:<name>`. These are undocumented, but
+/// can easily be deduced from source code of the [Travis build commands].
+///
+/// [Travis build commands]:
+/// https://github.com/travis-ci/travis-build/blob/f603c0089/lib/travis/build/templates/header.sh
+pub struct OutputFolder {
+ name: String,
+ start_time: SystemTime, // we need SystemTime to get the UNIX timestamp.
+}
+
+impl OutputFolder {
+ /// Creates a new output folder with the given group name.
+ pub fn new(name: String) -> OutputFolder {
+ // "\r" moves the cursor to the beginning of the line, and "\x1b[0K" is
+ // the ANSI escape code to clear from the cursor to end of line.
+ // Travis seems to have trouble when _not_ using "\r\x1b[0K", that will
+ // randomly put lines to the top of the webpage.
+ print!("travis_fold:start:{0}\r\x1b[0Ktravis_time:start:{0}\r\x1b[0K", name);
+ OutputFolder {
+ name,
+ start_time: SystemTime::now(),
+ }
+ }
+}
+
+impl Drop for OutputFolder {
+ fn drop(&mut self) {
+ use std::time::*;
+ use std::u64;
+
+ fn to_nanos(duration: Result<Duration, SystemTimeError>) -> u64 {
+ match duration {
+ Ok(d) => d.as_secs() * 1_000_000_000 + d.subsec_nanos() as u64,
+ Err(_) => u64::MAX,
+ }
+ }
+
+ let end_time = SystemTime::now();
+ let duration = end_time.duration_since(self.start_time);
+ let start = self.start_time.duration_since(UNIX_EPOCH);
+ let finish = end_time.duration_since(UNIX_EPOCH);
+ println!(
+ "travis_fold:end:{0}\r\x1b[0K\n\
+ travis_time:end:{0}:start={1},finish={2},duration={3}\r\x1b[0K",
+ self.name,
+ to_nanos(start),
+ to_nanos(finish),
+ to_nanos(duration)
+ );
+ io::stdout().flush().unwrap();
+ }
+}
+
+/// The CI environment rustbuild is running in. This mainly affects how the logs
+/// are printed.
+#[derive(Copy, Clone, PartialEq, Eq, Debug)]
+pub enum CiEnv {
+ /// Not a CI environment.
+ None,
+ /// The Travis CI environment, for Linux (including Docker) and macOS builds.
+ Travis,
+ /// The AppVeyor environment, for Windows builds.
+ AppVeyor,
+}
+
+impl CiEnv {
+ /// Obtains the current CI environment.
+ pub fn current() -> CiEnv {
+ if env::var("TRAVIS").ok().map_or(false, |e| &*e == "true") {
+ CiEnv::Travis
+ } else if env::var("APPVEYOR").ok().map_or(false, |e| &*e == "True") {
+ CiEnv::AppVeyor
+ } else {
+ CiEnv::None
+ }
+ }
+
+ /// If in a CI environment, forces the command to run with colors.
+ pub fn force_coloring_in_ci(self, cmd: &mut Command) {
+ if self != CiEnv::None {
+ // Due to use of stamp/docker, the output stream of rustbuild is not
+ // a TTY in CI, so coloring is by-default turned off.
+ // The explicit `TERM=xterm` environment is needed for
+ // `--color always` to actually work. This env var was lost when
+ // compiling through the Makefile. Very strange.
+ cmd.env("TERM", "xterm").args(&["--color", "always"]);
+ }
+ }
+}
\ No newline at end of file
}
pub fn run_silent(cmd: &mut Command) {
+ if !try_run_silent(cmd) {
+ std::process::exit(1);
+ }
+}
+
+pub fn try_run_silent(cmd: &mut Command) -> bool {
let status = match cmd.status() {
Ok(status) => status,
Err(e) => fail(&format!("failed to execute command: {:?}\nerror: {}",
cmd, e)),
};
if !status.success() {
- fail(&format!("command did not execute successfully: {:?}\n\
- expected success, got: {}",
- cmd,
- status));
+ println!("\n\ncommand did not execute successfully: {:?}\n\
+ expected success, got: {}\n\n",
+ cmd,
+ status);
}
+ status.success()
}
pub fn run_suppressed(cmd: &mut Command) {
+ if !try_run_suppressed(cmd) {
+ std::process::exit(1);
+ }
+}
+
+pub fn try_run_suppressed(cmd: &mut Command) -> bool {
let output = match cmd.output() {
Ok(status) => status,
Err(e) => fail(&format!("failed to execute command: {:?}\nerror: {}",
cmd, e)),
};
if !output.status.success() {
- fail(&format!("command did not execute successfully: {:?}\n\
- expected success, got: {}\n\n\
- stdout ----\n{}\n\
- stderr ----\n{}\n",
- cmd,
- output.status,
- String::from_utf8_lossy(&output.stdout),
- String::from_utf8_lossy(&output.stderr)));
+ println!("\n\ncommand did not execute successfully: {:?}\n\
+ expected success, got: {}\n\n\
+ stdout ----\n{}\n\
+ stderr ----\n{}\n\n",
+ cmd,
+ output.status,
+ String::from_utf8_lossy(&output.stdout),
+ String::from_utf8_lossy(&output.stderr));
}
+ output.status.success()
}
pub fn gnu_target(target: &str) -> String {
source "$ci_dir/shared.sh"
+travis_fold start build_docker
+travis_time_start
+
if [ -f "$docker_dir/$image/Dockerfile" ]; then
retry docker \
build \
exit 1
fi
+travis_fold end build_docker
+travis_time_finish
+
objdir=$root_dir/obj
mkdir -p $HOME/.cargo
--env DEPLOY=$DEPLOY \
--env DEPLOY_ALT=$DEPLOY_ALT \
--env LOCAL_USER_ID=`id -u` \
+ --env TRAVIS=${TRAVIS-false} \
+ --env TRAVIS_BRANCH \
--volume "$HOME/.cargo:/cargo" \
--volume "$HOME/rustsrc:$HOME/rustsrc" \
--privileged \
set -o pipefail
set -o nounset
-set -o xtrace
-
ci_dir=$(cd $(dirname $0) && pwd)
. "$ci_dir/shared.sh"
+travis_fold start init_repo
+
REPO_DIR="$1"
CACHE_DIR="$2"
# Wipe the cache if it's not valid, or mark it as invalid while we update it
if [ ! -f "$cache_valid_file" ]; then
+ echo "Invalid cache, wiping ($cache_valid_file missing)"
rm -rf "$CACHE_DIR"
mkdir "$CACHE_DIR"
else
rm -rf "$CACHE_DIR"
mkdir "$CACHE_DIR"
else
+ echo "Valid cache ($cache_valid_file exists)"
rm "$cache_valid_file"
fi
fi
+travis_fold start update_cache
+travis_time_start
+
# Update the cache (a pristine copy of the rust source master)
if [ ! -d "$cache_src_dir/.git" ]; then
retry sh -c "rm -rf $cache_src_dir && mkdir -p $cache_src_dir && \
git submodule deinit -f . && git submodule sync && git submodule update --init"
# Cache was updated without errors, mark it as valid
+echo "Refreshed cache (touch $cache_valid_file)"
touch "$cache_valid_file"
+travis_fold end update_cache
+travis_time_finish
+
+travis_fold start update_submodules
+travis_time_start
+
# Update the submodules of the repo we're in, using the pristine repo as
# a cache for any object files
# No, `git submodule foreach` won't work:
retry sh -c "git submodule deinit -f $module && \
git submodule update --init --reference $cache_src_dir/$module $module"
done
+
+travis_fold end update_submodules
+travis_time_finish
+
+travis_fold end init_repo
ci_dir=`cd $(dirname $0) && pwd`
source "$ci_dir/shared.sh"
+if [ "$TRAVIS" == "true" ] && [ "$TRAVIS_BRANCH" != "auto" ]; then
+ RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --enable-quiet-tests"
+fi
+
RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --enable-sccache"
RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --disable-manage-submodules"
RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --enable-locked-deps"
fi
fi
+travis_fold start configure
+travis_time_start
$SRC/configure $RUST_CONFIGURE_ARGS
+travis_fold end configure
+travis_time_finish
+
+travis_fold start make-prepare
+travis_time_start
retry make prepare
+travis_fold end make-prepare
+travis_time_finish
if [ "$TRAVIS_OS_NAME" = "osx" ]; then
ncpus=$(sysctl -n hw.ncpu)
ncpus=$(grep processor /proc/cpuinfo | wc -l)
fi
-set -x
-
if [ ! -z "$SCRIPT" ]; then
sh -x -c "$SCRIPT"
else
- make -j $ncpus tidy
- make -j $ncpus
- make $RUST_CHECK_TARGET -j $ncpus
+ do_make() {
+ travis_fold start "make-$1"
+ travis_time_start
+ echo "make -j $ncpus $1"
+ make -j $ncpus "$1"
+ local retval=$?
+ travis_fold end "make-$1"
+ travis_time_finish
+ return $retval
+ }
+
+ do_make tidy
+ do_make all
+ do_make "$RUST_CHECK_TARGET"
fi
}
done
}
+
+if ! declare -F travis_fold; then
+ if [ "${TRAVIS-false}" = 'true' ]; then
+ # This is a trimmed down copy of
+ # https://github.com/travis-ci/travis-build/blob/master/lib/travis/build/templates/header.sh
+ travis_fold() {
+ echo -en "travis_fold:$1:$2\r\033[0K"
+ }
+ travis_time_start() {
+ travis_timer_id=$(printf %08x $(( RANDOM * RANDOM )))
+ travis_start_time=$(travis_nanoseconds)
+ echo -en "travis_time:start:$travis_timer_id\r\033[0K"
+ }
+ travis_time_finish() {
+ travis_end_time=$(travis_nanoseconds)
+ local duration=$(($travis_end_time-$travis_start_time))
+ local msg="travis_time:end:$travis_timer_id"
+ echo -en "\n$msg:start=$travis_start_time,finish=$travis_end_time,duration=$duration\r\033[0K"
+ }
+ if [ $(uname) = 'Darwin' ]; then
+ travis_nanoseconds() {
+ date -u '+%s000000000'
+ }
+ else
+ travis_nanoseconds() {
+ date -u '+%s%N'
+ }
+ fi
+ else
+ travis_fold() { return 0; }
+ travis_time_start() { return 0; }
+ travis_time_finish() { return 0; }
+ fi
+fi
-Subproject commit 97422981c53a00f7c3d6584d363443117f179fff
+Subproject commit f746084b099060f55ac5e7d8050797593fcedd6e
-Subproject commit 6fa139b1630a9bb95dcd60cfc90aff9c19e54580
+Subproject commit c0e8c56d76bdf6bd16c64338f81c04d48c60f117
-Subproject commit f7a108dfa9e90b07821700c55d01f08a9adf005c
+Subproject commit 876582e9d0fbdc9cecb03133c28db96e9ff8c844
- [sip_hash_13](library-features/sip-hash-13.md)
- [slice_concat_ext](library-features/slice-concat-ext.md)
- [slice_get_slice](library-features/slice-get-slice.md)
+ - [slice_rotate](library-features/slice-rotate.md)
- [slice_rsplit](library-features/slice-rsplit.md)
- [sort_internals](library-features/sort-internals.md)
- [sort_unstable](library-features/sort-unstable.md)
--- /dev/null
+# `slice_rotate`
+
+The tracking issue for this feature is: [#41891]
+
+[#41891]: https://github.com/rust-lang/rust/issues/41891
+
+------------------------
[build-dependencies]
build_helper = { path = "../build_helper" }
-gcc = "0.3.27"
+gcc = "0.3.50"
[features]
debug = []
#[cfg(windows)]
#[allow(bad_style)]
mod imp {
+ use core::cmp::min;
+ use core::ptr::copy_nonoverlapping;
use MIN_ALIGN;
type LPVOID = *mut u8;
allocate_with_flags(size, align, HEAP_ZERO_MEMORY)
}
- pub unsafe fn reallocate(ptr: *mut u8, _old_size: usize, size: usize, align: usize) -> *mut u8 {
+ pub unsafe fn reallocate(ptr: *mut u8, old_size: usize, size: usize, align: usize) -> *mut u8 {
if align <= MIN_ALIGN {
HeapReAlloc(GetProcessHeap(), 0, ptr as LPVOID, size as SIZE_T) as *mut u8
} else {
- let header = get_header(ptr);
- let new = HeapReAlloc(GetProcessHeap(),
- 0,
- header.0 as LPVOID,
- (size + align) as SIZE_T) as *mut u8;
- if new.is_null() {
- return new;
+ let new = allocate(size, align);
+ if !new.is_null() {
+ copy_nonoverlapping(ptr, new, min(size, old_size));
+ deallocate(ptr, old_size, align);
}
- align_ptr(new, align)
+ new
}
}
size: usize,
align: usize)
-> usize {
- if align <= MIN_ALIGN {
- let new = HeapReAlloc(GetProcessHeap(),
- HEAP_REALLOC_IN_PLACE_ONLY,
- ptr as LPVOID,
- size as SIZE_T) as *mut u8;
- if new.is_null() { old_size } else { size }
+ let new = if align <= MIN_ALIGN {
+ HeapReAlloc(GetProcessHeap(),
+ HEAP_REALLOC_IN_PLACE_ONLY,
+ ptr as LPVOID,
+ size as SIZE_T) as *mut u8
} else {
- old_size
- }
+ let header = get_header(ptr);
+ HeapReAlloc(GetProcessHeap(),
+ HEAP_REALLOC_IN_PLACE_ONLY,
+ header.0 as LPVOID,
+ size + align as SIZE_T) as *mut u8
+ };
+ if new.is_null() { old_size } else { size }
}
pub unsafe fn deallocate(ptr: *mut u8, _old_size: usize, align: usize) {
#![feature(i128_type)]
#![feature(rand)]
#![feature(repr_simd)]
+#![feature(slice_rotate)]
#![feature(sort_unstable)]
#![feature(test)]
rng.gen_iter::<u64>().take(len).collect()
}
+fn gen_random_bytes(len: usize) -> Vec<u8> {
+ let mut rng = thread_rng();
+ rng.gen_iter::<u8>().take(len).collect()
+}
+
fn gen_mostly_ascending(len: usize) -> Vec<u64> {
let mut rng = thread_rng();
let mut v = gen_ascending(len);
reverse!(reverse_u128, u128, |x| x as u128);
#[repr(simd)] struct F64x4(f64, f64, f64, f64);
reverse!(reverse_simd_f64x4, F64x4, |x| { let x = x as f64; F64x4(x,x,x,x) });
+
+macro_rules! rotate {
+ ($name:ident, $gen:expr, $len:expr, $mid:expr) => {
+ #[bench]
+ fn $name(b: &mut Bencher) {
+ let size = mem::size_of_val(&$gen(1)[0]);
+ let mut v = $gen($len * 8 / size);
+ b.iter(|| black_box(&mut v).rotate(($mid*8+size-1)/size));
+ b.bytes = (v.len() * size) as u64;
+ }
+ }
+}
+
+rotate!(rotate_tiny_by1, gen_random, 16, 1);
+rotate!(rotate_tiny_half, gen_random, 16, 16/2);
+rotate!(rotate_tiny_half_plus_one, gen_random, 16, 16/2+1);
+
+rotate!(rotate_medium_by1, gen_random, 9158, 1);
+rotate!(rotate_medium_by727_u64, gen_random, 9158, 727);
+rotate!(rotate_medium_by727_bytes, gen_random_bytes, 9158, 727);
+rotate!(rotate_medium_by727_strings, gen_strings, 9158, 727);
+rotate!(rotate_medium_half, gen_random, 9158, 9158/2);
+rotate!(rotate_medium_half_plus_one, gen_random, 9158, 9158/2+1);
+
+// Intended to use more RAM than the machine has cache
+rotate!(rotate_huge_by1, gen_random, 5*1024*1024, 1);
+rotate!(rotate_huge_by9199_u64, gen_random, 5*1024*1024, 9199);
+rotate!(rotate_huge_by9199_bytes, gen_random_bytes, 5*1024*1024, 9199);
+rotate!(rotate_huge_by9199_strings, gen_strings, 5*1024*1024, 9199);
+rotate!(rotate_huge_by9199_big, gen_big_random, 5*1024*1024, 9199);
+rotate!(rotate_huge_by1234577_u64, gen_random, 5*1024*1024, 1234577);
+rotate!(rotate_huge_by1234577_bytes, gen_random_bytes, 5*1024*1024, 1234577);
+rotate!(rotate_huge_by1234577_strings, gen_strings, 5*1024*1024, 1234577);
+rotate!(rotate_huge_by1234577_big, gen_big_random, 5*1024*1024, 1234577);
+rotate!(rotate_huge_half, gen_random, 5*1024*1024, 5*1024*1024/2);
+rotate!(rotate_huge_half_plus_one, gen_random, 5*1024*1024, 5*1024*1024/2+1);
/// # Examples
///
/// ```
+ /// use std::ascii::AsciiExt;
/// use std::borrow::Cow;
///
- /// let mut cow: Cow<[_]> = Cow::Owned(vec![1, 2, 3]);
+ /// let mut cow = Cow::Borrowed("foo");
+ /// cow.to_mut().make_ascii_uppercase();
///
- /// let hello = cow.to_mut();
- ///
- /// assert_eq!(hello, &[1, 2, 3]);
+ /// assert_eq!(
+ /// cow,
+ /// Cow::Owned(String::from("FOO")) as Cow<str>
+ /// );
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn to_mut(&mut self) -> &mut <B as ToOwned>::Owned {
///
/// # Examples
///
+ /// Calling `into_owned` on a `Cow::Borrowed` clones the underlying data
+ /// and becomes a `Cow::Owned`:
+ ///
/// ```
/// use std::borrow::Cow;
///
- /// let cow: Cow<[_]> = Cow::Owned(vec![1, 2, 3]);
+ /// let s = "Hello world!";
+ /// let cow = Cow::Borrowed(s);
+ ///
+ /// assert_eq!(
+ /// cow.into_owned(),
+ /// Cow::Owned(String::from(s))
+ /// );
+ /// ```
+ ///
+ /// Calling `into_owned` on a `Cow::Owned` is a no-op:
+ ///
+ /// ```
+ /// use std::borrow::Cow;
///
- /// let hello = cow.into_owned();
+ /// let s = "Hello world!";
+ /// let cow: Cow<str> = Cow::Owned(String::from(s));
///
- /// assert_eq!(vec![1, 2, 3], hello);
+ /// assert_eq!(
+ /// cow.into_owned(),
+ /// Cow::Owned(String::from(s))
+ /// );
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn into_owned(self) -> <B as ToOwned>::Owned {
#![feature(shared)]
#![feature(slice_get_slice)]
#![feature(slice_patterns)]
+#![cfg_attr(not(test), feature(slice_rotate))]
#![feature(slice_rsplit)]
#![cfg_attr(not(test), feature(sort_unstable))]
#![feature(specialization)]
core_slice::SliceExt::sort_unstable_by_key(self, f);
}
+ /// Permutes the slice in-place such that `self[mid..]` moves to the
+ /// beginning of the slice while `self[..mid]` moves to the end of the
+ /// slice. Equivalently, rotates the slice `mid` places to the left
+ /// or `k = self.len() - mid` places to the right.
+ ///
+ /// This is a "k-rotation", a permutation in which item `i` moves to
+ /// position `i + k`, modulo the length of the slice. See _Elements
+ /// of Programming_ [§10.4][eop].
+ ///
+ /// Rotation by `mid` and rotation by `k` are inverse operations.
+ ///
+ /// [eop]: https://books.google.com/books?id=CO9ULZGINlsC&pg=PA178&q=k-rotation
+ ///
+ /// # Panics
+ ///
+ /// This function will panic if `mid` is greater than the length of the
+ /// slice. (Note that `mid == self.len()` does _not_ panic; it's a nop
+ /// rotation with `k == 0`, the inverse of a rotation with `mid == 0`.)
+ ///
+ /// # Complexity
+ ///
+ /// Takes linear (in `self.len()`) time.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(slice_rotate)]
+ ///
+ /// let mut a = [1, 2, 3, 4, 5, 6, 7];
+ /// let mid = 2;
+ /// a.rotate(mid);
+ /// assert_eq!(&a, &[3, 4, 5, 6, 7, 1, 2]);
+ /// let k = a.len() - mid;
+ /// a.rotate(k);
+ /// assert_eq!(&a, &[1, 2, 3, 4, 5, 6, 7]);
+ ///
+ /// use std::ops::Range;
+ /// fn slide<T>(slice: &mut [T], range: Range<usize>, to: usize) {
+ /// if to < range.start {
+ /// slice[to..range.end].rotate(range.start-to);
+ /// } else if to > range.end {
+ /// slice[range.start..to].rotate(range.end-range.start);
+ /// }
+ /// }
+ /// let mut v: Vec<_> = (0..10).collect();
+ /// slide(&mut v, 1..4, 7);
+ /// assert_eq!(&v, &[0, 4, 5, 6, 1, 2, 3, 7, 8, 9]);
+ /// slide(&mut v, 6..8, 1);
+ /// assert_eq!(&v, &[0, 3, 7, 4, 5, 6, 1, 2, 8, 9]);
+ /// ```
+ #[unstable(feature = "slice_rotate", issue = "41891")]
+ pub fn rotate(&mut self, mid: usize) {
+ core_slice::SliceExt::rotate(self, mid);
+ }
+
/// Copies the elements from `src` into `self`.
///
/// The length of `src` must be the same as `self`.
///
/// ```
/// assert_eq!(["hello", "world"].concat(), "helloworld");
+ /// assert_eq!([[1, 2], [3, 4]].concat(), [1, 2, 3, 4]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn concat(&self) -> Self::Output;
///
/// ```
/// assert_eq!(["hello", "world"].join(" "), "hello world");
+ /// assert_eq!([[1, 2], [3, 4]].join(&0), [1, 2, 0, 3, 4]);
/// ```
#[stable(feature = "rename_connect_to_join", since = "1.3.0")]
fn join(&self, sep: &T) -> Self::Output;
fn test_push_unique() {
let mut heap = BinaryHeap::<Box<_>>::from(vec![box 2, box 4, box 9]);
assert_eq!(heap.len(), 3);
- assert!(*heap.peek().unwrap() == box 9);
+ assert!(**heap.peek().unwrap() == 9);
heap.push(box 11);
assert_eq!(heap.len(), 4);
- assert!(*heap.peek().unwrap() == box 11);
+ assert!(**heap.peek().unwrap() == 11);
heap.push(box 5);
assert_eq!(heap.len(), 5);
- assert!(*heap.peek().unwrap() == box 11);
+ assert!(**heap.peek().unwrap() == 11);
heap.push(box 27);
assert_eq!(heap.len(), 6);
- assert!(*heap.peek().unwrap() == box 27);
+ assert!(**heap.peek().unwrap() == 27);
heap.push(box 3);
assert_eq!(heap.len(), 7);
- assert!(*heap.peek().unwrap() == box 27);
+ assert!(**heap.peek().unwrap() == 27);
heap.push(box 103);
assert_eq!(heap.len(), 8);
- assert!(*heap.peek().unwrap() == box 103);
+ assert!(**heap.peek().unwrap() == 103);
}
fn check_to_vec(mut data: Vec<i32>) {
#![deny(warnings)]
+#![feature(attr_literals)]
#![feature(box_syntax)]
#![feature(inclusive_range_syntax)]
#![feature(collection_placement)]
#![feature(collections)]
#![feature(const_fn)]
#![feature(exact_size_is_empty)]
+#![feature(iterator_step_by)]
#![feature(pattern)]
#![feature(placement_in_syntax)]
#![feature(rand)]
+#![feature(repr_align)]
+#![feature(slice_rotate)]
#![feature(splice)]
-#![feature(step_by)]
#![feature(str_escape)]
#![feature(test)]
#![feature(unboxed_closures)]
}
}
+#[test]
+fn test_rotate() {
+ let expected: Vec<_> = (0..13).collect();
+ let mut v = Vec::new();
+
+ // no-ops
+ v.clone_from(&expected);
+ v.rotate(0);
+ assert_eq!(v, expected);
+ v.rotate(expected.len());
+ assert_eq!(v, expected);
+ let mut zst_array = [(), (), ()];
+ zst_array.rotate(2);
+
+ // happy path
+ v = (5..13).chain(0..5).collect();
+ v.rotate(8);
+ assert_eq!(v, expected);
+
+ let expected: Vec<_> = (0..1000).collect();
+
+ // small rotations in large slice, uses ptr::copy
+ v = (2..1000).chain(0..2).collect();
+ v.rotate(998);
+ assert_eq!(v, expected);
+ v = (998..1000).chain(0..998).collect();
+ v.rotate(2);
+ assert_eq!(v, expected);
+
+ // non-small prime rotation, has a few rounds of swapping
+ v = (389..1000).chain(0..389).collect();
+ v.rotate(1000-389);
+ assert_eq!(v, expected);
+}
+
#[test]
fn test_concat() {
let v: [Vec<i32>; 0] = [];
assert_eq!(vec, [2, 3]);
assert!(ptr != vec.as_ptr());
}
+
+#[test]
+fn overaligned_allocations() {
+ #[repr(align(256))]
+ struct Foo(usize);
+ let mut v = vec![Foo(273)];
+ for i in 0..0x1000 {
+ v.reserve_exact(i);
+ assert!(v[0].0 == 273);
+ assert!(v.as_ptr() as usize & 0xff == 0);
+ v.shrink_to_fit();
+ assert!(v[0].0 == 273);
+ assert!(v.as_ptr() as usize & 0xff == 0);
+ }
+}
let u: Vec<_> = deq.iter().cloned().collect();
assert_eq!(u, v);
- let seq = (0..).step_by(2).take(256);
+ // FIXME #27741: Remove `.skip(0)` when Range::step_by is fully removed
+ let seq = (0..).skip(0).step_by(2).take(256);
let deq: VecDeque<_> = seq.collect();
for (i, &x) in deq.iter().enumerate() {
assert_eq!(2 * i, x);
[build-dependencies]
build_helper = { path = "../build_helper" }
-gcc = "0.3.27"
+gcc = "0.3.50"
pub fn atomic_umax_rel<T>(dst: *mut T, src: T) -> T;
pub fn atomic_umax_acqrel<T>(dst: *mut T, src: T) -> T;
pub fn atomic_umax_relaxed<T>(dst: *mut T, src: T) -> T;
+
+ /// The `prefetch` intrinsic is a hint to the code generator to insert a prefetch instruction
+ /// if supported; otherwise, it is a noop.
+ /// Prefetches have no effect on the behavior of the program but can change its performance
+ /// characteristics.
+ ///
+ /// The `locality` argument must be a constant integer and is a temporal locality specifier
+ /// ranging from (0) - no locality, to (3) - extremely local keep in cache
+ #[cfg(not(stage0))]
+ pub fn prefetch_read_data<T>(data: *const T, locality: i32);
+ /// The `prefetch` intrinsic is a hint to the code generator to insert a prefetch instruction
+ /// if supported; otherwise, it is a noop.
+ /// Prefetches have no effect on the behavior of the program but can change its performance
+ /// characteristics.
+ ///
+ /// The `locality` argument must be a constant integer and is a temporal locality specifier
+ /// ranging from (0) - no locality, to (3) - extremely local keep in cache
+ #[cfg(not(stage0))]
+ pub fn prefetch_write_data<T>(data: *const T, locality: i32);
+ /// The `prefetch` intrinsic is a hint to the code generator to insert a prefetch instruction
+ /// if supported; otherwise, it is a noop.
+ /// Prefetches have no effect on the behavior of the program but can change its performance
+ /// characteristics.
+ ///
+ /// The `locality` argument must be a constant integer and is a temporal locality specifier
+ /// ranging from (0) - no locality, to (3) - extremely local keep in cache
+ #[cfg(not(stage0))]
+ pub fn prefetch_read_instruction<T>(data: *const T, locality: i32);
+ /// The `prefetch` intrinsic is a hint to the code generator to insert a prefetch instruction
+ /// if supported; otherwise, it is a noop.
+ /// Prefetches have no effect on the behavior of the program but can change its performance
+ /// characteristics.
+ ///
+ /// The `locality` argument must be a constant integer and is a temporal locality specifier
+ /// ranging from (0) - no locality, to (3) - extremely local keep in cache
+ #[cfg(not(stage0))]
+ pub fn prefetch_write_instruction<T>(data: *const T, locality: i32);
}
+// Empty bootstrap implementations for stage0 compilation
+#[cfg(stage0)]
+pub fn prefetch_read_data<T>(_data: *const T, _locality: i32) { /* EMPTY */ }
+#[cfg(stage0)]
+pub fn prefetch_write_data<T>(_data: *const T, _locality: i32) { /* EMPTY */ }
+#[cfg(stage0)]
+pub fn prefetch_read_instruction<T>(_data: *const T, _locality: i32) { /* EMPTY */ }
+#[cfg(stage0)]
+pub fn prefetch_write_instruction<T>(_data: *const T, _locality: i32) { /* EMPTY */ }
+
extern "rust-intrinsic" {
pub fn atomic_fence();
//! {
//! let result = match IntoIterator::into_iter(values) {
//! mut iter => loop {
-//! match iter.next() {
-//! Some(x) => { println!("{}", x); },
+//! let x = match iter.next() {
+//! Some(val) => val,
//! None => break,
-//! }
+//! };
+//! let () = { println!("{}", x); };
//! },
//! };
//! result
pub use self::range::Step;
#[unstable(feature = "step_by", reason = "recent addition",
issue = "27741")]
+#[rustc_deprecated(since = "1.19.0",
+ reason = "replaced by `iter::StepBy`")]
+#[allow(deprecated)]
pub use self::range::StepBy as DeprecatedStepBy;
#[stable(feature = "rust1", since = "1.0.0")]
#[derive(Clone, Debug)]
#[unstable(feature = "step_by", reason = "recent addition",
issue = "27741")]
+#[rustc_deprecated(since = "1.19.0",
+ reason = "replaced by `iter::StepBy`")]
+#[allow(deprecated)]
pub struct StepBy<A, R> {
step_by: A,
range: R,
/// ```
#[unstable(feature = "step_by", reason = "recent addition",
issue = "27741")]
+ #[rustc_deprecated(since = "1.19.0",
+ reason = "replaced by `Iterator::step_by`")]
+ #[allow(deprecated)]
pub fn step_by(self, by: A) -> StepBy<A, Self> {
StepBy {
step_by: by,
/// ```
#[unstable(feature = "step_by", reason = "recent addition",
issue = "27741")]
+ #[rustc_deprecated(since = "1.19.0",
+ reason = "replaced by `Iterator::step_by`")]
+ #[allow(deprecated)]
pub fn step_by(self, by: A) -> StepBy<A, Self> {
StepBy {
step_by: by,
/// ```
#[unstable(feature = "step_by", reason = "recent addition",
issue = "27741")]
+ #[rustc_deprecated(since = "1.19.0",
+ reason = "replaced by `Iterator::step_by`")]
+ #[allow(deprecated)]
pub fn step_by(self, by: A) -> StepBy<A, Self> {
StepBy {
step_by: by,
#[unstable(feature = "step_by", reason = "recent addition",
issue = "27741")]
+#[allow(deprecated)]
impl<A> Iterator for StepBy<A, ops::RangeFrom<A>> where
A: Clone,
for<'a> &'a A: Add<&'a A, Output = A>
}
#[unstable(feature = "fused", issue = "35602")]
+#[allow(deprecated)]
impl<A> FusedIterator for StepBy<A, ops::RangeFrom<A>>
where A: Clone, for<'a> &'a A: Add<&'a A, Output = A> {}
#[unstable(feature = "step_by", reason = "recent addition",
issue = "27741")]
+#[allow(deprecated)]
impl<A: Step + Clone> Iterator for StepBy<A, ops::Range<A>> {
type Item = A;
}
#[unstable(feature = "fused", issue = "35602")]
+#[allow(deprecated)]
impl<A: Step + Clone> FusedIterator for StepBy<A, ops::Range<A>> {}
#[unstable(feature = "inclusive_range",
reason = "recently added, follows RFC",
issue = "28237")]
+#[allow(deprecated)]
impl<A: Step + Clone> Iterator for StepBy<A, ops::RangeInclusive<A>> {
type Item = A;
}
#[unstable(feature = "fused", issue = "35602")]
+#[allow(deprecated)]
impl<A: Step + Clone> FusedIterator for StepBy<A, ops::RangeInclusive<A>> {}
macro_rules! range_exact_iter_impl {
use marker::{Copy, Send, Sync, Sized, self};
use iter_private::TrustedRandomAccess;
+mod rotate;
mod sort;
#[repr(C)]
#[stable(feature = "core", since = "1.6.0")]
fn ends_with(&self, needle: &[Self::Item]) -> bool where Self::Item: PartialEq;
+ #[unstable(feature = "slice_rotate", issue = "41891")]
+ fn rotate(&mut self, mid: usize);
+
#[stable(feature = "clone_from_slice", since = "1.7.0")]
fn clone_from_slice(&mut self, src: &[Self::Item]) where Self::Item: Clone;
self.binary_search_by(|p| p.borrow().cmp(x))
}
+ fn rotate(&mut self, mid: usize) {
+ assert!(mid <= self.len());
+ let k = self.len() - mid;
+
+ unsafe {
+ let p = self.as_mut_ptr();
+ rotate::ptr_rotate(mid, p.offset(mid as isize), k);
+ }
+ }
+
#[inline]
fn clone_from_slice(&mut self, src: &[T]) where T: Clone {
assert!(self.len() == src.len(),
--- /dev/null
+// Copyright 2012-2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use cmp;
+use mem;
+use ptr;
+
+/// Rotation is much faster if it has access to a little bit of memory. This
+/// union provides a RawVec-like interface, but to a fixed-size stack buffer.
+#[allow(unions_with_drop_fields)]
+union RawArray<T> {
+ /// Ensure this is appropriately aligned for T, and is big
+ /// enough for two elements even if T is enormous.
+ typed: [T; 2],
+ /// For normally-sized types, especially things like u8, having more
+ /// than 2 in the buffer is necessary for usefulness, so pad it out
+ /// enough to be helpful, but not so big as to risk overflow.
+ _extra: [usize; 32],
+}
+
+impl<T> RawArray<T> {
+ fn new() -> Self {
+ unsafe { mem::uninitialized() }
+ }
+ fn ptr(&self) -> *mut T {
+ unsafe { &self.typed as *const T as *mut T }
+ }
+ fn cap() -> usize {
+ if mem::size_of::<T>() == 0 {
+ usize::max_value()
+ } else {
+ mem::size_of::<Self>() / mem::size_of::<T>()
+ }
+ }
+}
+
+/// Rotates the range `[mid-left, mid+right)` such that the element at `mid`
+/// becomes the first element. Equivalently, rotates the range `left`
+/// elements to the left or `right` elements to the right.
+///
+/// # Safety
+///
+/// The specified range must be valid for reading and writing.
+/// The type `T` must have non-zero size.
+///
+/// # Algorithm
+///
+/// For longer rotations, swap the left-most `delta = min(left, right)`
+/// elements with the right-most `delta` elements. LLVM vectorizes this,
+/// which is profitable as we only reach this step for a "large enough"
+/// rotation. Doing this puts `delta` elements on the larger side into the
+/// correct position, leaving a smaller rotate problem. Demonstration:
+///
+/// ```text
+/// [ 6 7 8 9 10 11 12 13 . 1 2 3 4 5 ]
+/// 1 2 3 4 5 [ 11 12 13 . 6 7 8 9 10 ]
+/// 1 2 3 4 5 [ 8 9 10 . 6 7 ] 11 12 13
+/// 1 2 3 4 5 6 7 [ 10 . 8 9 ] 11 12 13
+/// 1 2 3 4 5 6 7 [ 9 . 8 ] 10 11 12 13
+/// 1 2 3 4 5 6 7 8 [ . ] 9 10 11 12 13
+/// ```
+///
+/// Once the rotation is small enough, copy some elements into a stack
+/// buffer, `memmove` the others, and move the ones back from the buffer.
+pub unsafe fn ptr_rotate<T>(mut left: usize, mid: *mut T, mut right: usize) {
+ loop {
+ let delta = cmp::min(left, right);
+ if delta <= RawArray::<T>::cap() {
+ break;
+ }
+
+ ptr_swap_n(
+ mid.offset(-(left as isize)),
+ mid.offset((right-delta) as isize),
+ delta);
+
+ if left <= right {
+ right -= delta;
+ } else {
+ left -= delta;
+ }
+ }
+
+ let rawarray = RawArray::new();
+ let buf = rawarray.ptr();
+
+ let dim = mid.offset(-(left as isize)).offset(right as isize);
+ if left <= right {
+ ptr::copy_nonoverlapping(mid.offset(-(left as isize)), buf, left);
+ ptr::copy(mid, mid.offset(-(left as isize)), right);
+ ptr::copy_nonoverlapping(buf, dim, left);
+ }
+ else {
+ ptr::copy_nonoverlapping(mid, buf, right);
+ ptr::copy(mid.offset(-(left as isize)), dim, left);
+ ptr::copy_nonoverlapping(buf, mid.offset(-(left as isize)), right);
+ }
+}
+
+unsafe fn ptr_swap_n<T>(a: *mut T, b: *mut T, n: usize) {
+ for i in 0..n {
+ // These are nonoverlapping, so use mem::swap instead of ptr::swap
+ mem::swap(&mut *a.offset(i as isize), &mut *b.offset(i as isize));
+ }
+}
use core::{i8, i16, isize};
use core::usize;
+// FIXME #27741: This is here to simplify calling Iterator::step_by. Remove
+// once Range::step_by is completely gone (not just deprecated).
+trait IterEx: Sized {
+ fn iter_step_by(self, n: usize) -> StepBy<Self>;
+}
+impl<I:Iterator> IterEx for I {
+ fn iter_step_by(self, n: usize) -> StepBy<Self> { self.step_by(n) }
+}
+
#[test]
fn test_lt() {
let empty: [isize; 0] = [];
#[test]
fn test_counter_from_iter() {
- let it = (0..).step_by(5).take(10);
+ let it = (0..).iter_step_by(5).take(10);
let xs: Vec<isize> = FromIterator::from_iter(it);
assert_eq!(xs, [0, 5, 10, 15, 20, 25, 30, 35, 40, 45]);
}
}
assert_eq!(i, expected.len());
- let ys = (30..).step_by(10).take(4);
+ let ys = (30..).iter_step_by(10).take(4);
let it = xs.iter().cloned().chain(ys);
let mut i = 0;
for x in it {
#[test]
fn test_iterator_step_by() {
// Identity
- // Replace with (0..).step_by(1) after Range::step_by gets removed
- let mut it = Iterator::step_by((0..), 1).take(3);
+ let mut it = (0..).iter_step_by(1).take(3);
assert_eq!(it.next(), Some(0));
assert_eq!(it.next(), Some(1));
assert_eq!(it.next(), Some(2));
assert_eq!(it.next(), None);
- // Replace with (0..).step_by(3) after Range::step_by gets removed
- let mut it = Iterator::step_by((0..), 3).take(4);
+ let mut it = (0..).iter_step_by(3).take(4);
assert_eq!(it.next(), Some(0));
assert_eq!(it.next(), Some(3));
assert_eq!(it.next(), Some(6));
#[test]
#[should_panic]
fn test_iterator_step_by_zero() {
- // Replace with (0..).step_by(0) after Range::step_by gets removed
- let mut it = Iterator::step_by((0..), 0);
+ let mut it = (0..).iter_step_by(0);
it.next();
}
#[test]
fn test_filter_map() {
- let it = (0..).step_by(1).take(10)
+ let it = (0..).iter_step_by(1).take(10)
.filter_map(|x| if x % 2 == 0 { Some(x*x) } else { None });
assert_eq!(it.collect::<Vec<usize>>(), [0*0, 2*2, 4*4, 6*6, 8*8]);
}
fn test_iterator_flat_map() {
let xs = [0, 3, 6];
let ys = [0, 1, 2, 3, 4, 5, 6, 7, 8];
- let it = xs.iter().flat_map(|&x| (x..).step_by(1).take(3));
+ let it = xs.iter().flat_map(|&x| (x..).iter_step_by(1).take(3));
let mut i = 0;
for x in it {
assert_eq!(x, ys[i]);
#[test]
fn test_cycle() {
let cycle_len = 3;
- let it = (0..).step_by(1).take(cycle_len).cycle();
+ let it = (0..).iter_step_by(1).take(cycle_len).cycle();
assert_eq!(it.size_hint(), (usize::MAX, None));
for (i, x) in it.take(100).enumerate() {
assert_eq!(i % cycle_len, x);
}
- let mut it = (0..).step_by(1).take(0).cycle();
+ let mut it = (0..).iter_step_by(1).take(0).cycle();
assert_eq!(it.size_hint(), (0, Some(0)));
assert_eq!(it.next(), None);
}
#[test]
fn test_iterator_size_hint() {
- let c = (0..).step_by(1);
+ let c = (0..).iter_step_by(1);
let v: &[_] = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
let v2 = &[10, 11, 12];
let vi = v.iter();
#[test]
fn test_range_step() {
+ #![allow(deprecated)]
+
assert_eq!((0..20).step_by(5).collect::<Vec<isize>>(), [0, 5, 10, 15]);
assert_eq!((20..0).step_by(-5).collect::<Vec<isize>>(), [20, 15, 10, 5]);
assert_eq!((20..0).step_by(-6).collect::<Vec<isize>>(), [20, 14, 8, 2]);
#![feature(raw)]
#![feature(sip_hash_13)]
#![feature(slice_patterns)]
+#![feature(slice_rotate)]
#![feature(sort_internals)]
#![feature(sort_unstable)]
#![feature(specialization)]
assert_eq!(v.iter().rfind(|&&x| x <= 3), Some(&3));
}
+#[test]
+fn test_rotate() {
+ const N: usize = 600;
+ let a: &mut [_] = &mut [0; N];
+ for i in 0..N {
+ a[i] = i;
+ }
+
+ a.rotate(42);
+ let k = N - 42;
+
+ for i in 0..N {
+ assert_eq!(a[(i+k)%N], i);
+ }
+}
+
#[test]
fn sort_unstable() {
let mut v = [0; 600];
[build-dependencies]
build_helper = { path = "../build_helper" }
-gcc = "0.3.27"
+gcc = "0.3.50"
}
hir::ExprIndex(ref l, ref r) |
- hir::ExprBinary(_, ref l, ref r) if self.tables.is_method_call(expr.id) => {
+ hir::ExprBinary(_, ref l, ref r) if self.tables.is_method_call(expr) => {
self.call(expr, pred, &l, Some(&**r).into_iter())
}
- hir::ExprUnary(_, ref e) if self.tables.is_method_call(expr.id) => {
+ hir::ExprUnary(_, ref e) if self.tables.is_method_call(expr) => {
self.call(expr, pred, &e, None::<hir::Expr>.iter())
}
pred: CFGIndex,
func_or_rcvr: &hir::Expr,
args: I) -> CFGIndex {
- let method_call = ty::MethodCall::expr(call_expr.id);
- let fn_ty = match self.tables.method_map.get(&method_call) {
- Some(method) => method.ty,
- None => self.tables.expr_ty_adjusted(func_or_rcvr),
- };
-
let func_or_rcvr_exit = self.expr(func_or_rcvr, pred);
let ret = self.straightline(call_expr, func_or_rcvr_exit, args);
// FIXME(canndrew): This is_never should probably be an is_uninhabited.
- if fn_ty.fn_ret().0.is_never() {
+ if self.tables.expr_ty(call_expr).is_never() {
self.add_unreachable_node()
} else {
ret
In particular, using the memoize helper is much better than writing
the obvious code yourself:
-```
+```rust
if let Some(result) = map.get(key) {
return result;
}
// During compilation, it is always `DefId`, but when serializing
// it is mapped to `DefPath`.
- // Represents the `Krate` as a whole (the `hir::Krate` value) (as
- // distinct from the krate module). This is basically a hash of
- // the entire krate, so if you read from `Krate` (e.g., by calling
- // `tcx.hir.krate()`), we will have to assume that any change
- // means that you need to be recompiled. This is because the
- // `Krate` value gives you access to all other items. To avoid
- // this fate, do not call `tcx.hir.krate()`; instead, prefer
- // wrappers like `tcx.visit_all_items_in_krate()`. If there is no
- // suitable wrapper, you can use `tcx.dep_graph.ignore()` to gain
- // access to the krate, but you must remember to add suitable
- // edges yourself for the individual items that you read.
+ /// Represents the `Krate` as a whole (the `hir::Krate` value) (as
+ /// distinct from the krate module). This is basically a hash of
+ /// the entire krate, so if you read from `Krate` (e.g., by calling
+ /// `tcx.hir.krate()`), we will have to assume that any change
+ /// means that you need to be recompiled. This is because the
+ /// `Krate` value gives you access to all other items. To avoid
+ /// this fate, do not call `tcx.hir.krate()`; instead, prefer
+ /// wrappers like `tcx.visit_all_items_in_krate()`. If there is no
+ /// suitable wrapper, you can use `tcx.dep_graph.ignore()` to gain
+ /// access to the krate, but you must remember to add suitable
+ /// edges yourself for the individual items that you read.
Krate,
- // Represents the HIR node with the given node-id
+ /// Represents the HIR node with the given node-id
Hir(D),
- // Represents the body of a function or method. The def-id is that of the
- // function/method.
+ /// Represents the body of a function or method. The def-id is that of the
+ /// function/method.
HirBody(D),
- // Represents the metadata for a given HIR node, typically found
- // in an extern crate.
+ /// Represents the metadata for a given HIR node, typically found
+ /// in an extern crate.
MetaData(D),
- // Represents some piece of metadata global to its crate.
+ /// Represents some piece of metadata global to its crate.
GlobalMetaData(D, GlobalMetaDataKind),
- // Represents some artifact that we save to disk. Note that these
- // do not have a def-id as part of their identifier.
+ /// Represents some artifact that we save to disk. Note that these
+ /// do not have a def-id as part of their identifier.
WorkProduct(Arc<WorkProductId>),
// Represents different phases in the compiler.
IsSized(D),
IsFreeze(D),
NeedsDrop(D),
+ Layout(D),
- // The set of impls for a given trait. Ultimately, it would be
- // nice to get more fine-grained here (e.g., to include a
- // simplified type), but we can't do that until we restructure the
- // HIR to distinguish the *header* of an impl from its body. This
- // is because changes to the header may change the self-type of
- // the impl and hence would require us to be more conservative
- // than changes in the impl body.
+ /// The set of impls for a given trait. Ultimately, it would be
+ /// nice to get more fine-grained here (e.g., to include a
+ /// simplified type), but we can't do that until we restructure the
+ /// HIR to distinguish the *header* of an impl from its body. This
+ /// is because changes to the header may change the self-type of
+ /// the impl and hence would require us to be more conservative
+ /// than changes in the impl body.
TraitImpls(D),
AllLocalTraitImpls,
TraitItems(D),
ReprHints(D),
- // Trait selection cache is a little funny. Given a trait
- // reference like `Foo: SomeTrait<Bar>`, there could be
- // arbitrarily many def-ids to map on in there (e.g., `Foo`,
- // `SomeTrait`, `Bar`). We could have a vector of them, but it
- // requires heap-allocation, and trait sel in general can be a
- // surprisingly hot path. So instead we pick two def-ids: the
- // trait def-id, and the first def-id in the input types. If there
- // is no def-id in the input types, then we use the trait def-id
- // again. So for example:
- //
- // - `i32: Clone` -> `TraitSelect { trait_def_id: Clone, self_def_id: Clone }`
- // - `u32: Clone` -> `TraitSelect { trait_def_id: Clone, self_def_id: Clone }`
- // - `Clone: Clone` -> `TraitSelect { trait_def_id: Clone, self_def_id: Clone }`
- // - `Vec<i32>: Clone` -> `TraitSelect { trait_def_id: Clone, self_def_id: Vec }`
- // - `String: Clone` -> `TraitSelect { trait_def_id: Clone, self_def_id: String }`
- // - `Foo: Trait<Bar>` -> `TraitSelect { trait_def_id: Trait, self_def_id: Foo }`
- // - `Foo: Trait<i32>` -> `TraitSelect { trait_def_id: Trait, self_def_id: Foo }`
- // - `(Foo, Bar): Trait` -> `TraitSelect { trait_def_id: Trait, self_def_id: Foo }`
- // - `i32: Trait<Foo>` -> `TraitSelect { trait_def_id: Trait, self_def_id: Foo }`
- //
- // You can see that we map many trait refs to the same
- // trait-select node. This is not a problem, it just means
- // imprecision in our dep-graph tracking. The important thing is
- // that for any given trait-ref, we always map to the **same**
- // trait-select node.
+ /// Trait selection cache is a little funny. Given a trait
+ /// reference like `Foo: SomeTrait<Bar>`, there could be
+ /// arbitrarily many def-ids to map on in there (e.g., `Foo`,
+ /// `SomeTrait`, `Bar`). We could have a vector of them, but it
+ /// requires heap-allocation, and trait sel in general can be a
+ /// surprisingly hot path. So instead we pick two def-ids: the
+ /// trait def-id, and the first def-id in the input types. If there
+ /// is no def-id in the input types, then we use the trait def-id
+ /// again. So for example:
+ ///
+ /// - `i32: Clone` -> `TraitSelect { trait_def_id: Clone, self_def_id: Clone }`
+ /// - `u32: Clone` -> `TraitSelect { trait_def_id: Clone, self_def_id: Clone }`
+ /// - `Clone: Clone` -> `TraitSelect { trait_def_id: Clone, self_def_id: Clone }`
+ /// - `Vec<i32>: Clone` -> `TraitSelect { trait_def_id: Clone, self_def_id: Vec }`
+ /// - `String: Clone` -> `TraitSelect { trait_def_id: Clone, self_def_id: String }`
+ /// - `Foo: Trait<Bar>` -> `TraitSelect { trait_def_id: Trait, self_def_id: Foo }`
+ /// - `Foo: Trait<i32>` -> `TraitSelect { trait_def_id: Trait, self_def_id: Foo }`
+ /// - `(Foo, Bar): Trait` -> `TraitSelect { trait_def_id: Trait, self_def_id: Foo }`
+ /// - `i32: Trait<Foo>` -> `TraitSelect { trait_def_id: Trait, self_def_id: Foo }`
+ ///
+ /// You can see that we map many trait refs to the same
+ /// trait-select node. This is not a problem, it just means
+ /// imprecision in our dep-graph tracking. The important thing is
+ /// that for any given trait-ref, we always map to the **same**
+ /// trait-select node.
TraitSelect { trait_def_id: D, input_def_id: D },
- // For proj. cache, we just keep a list of all def-ids, since it is
- // not a hotspot.
+ /// For proj. cache, we just keep a list of all def-ids, since it is
+ /// not a hotspot.
ProjectionCache { def_ids: Vec<D> },
ParamEnv(D),
IsSized(ref d) => op(d).map(IsSized),
IsFreeze(ref d) => op(d).map(IsFreeze),
NeedsDrop(ref d) => op(d).map(NeedsDrop),
+ Layout(ref d) => op(d).map(Layout),
Hir(ref d) => op(d).map(Hir),
HirBody(ref d) => op(d).map(HirBody),
MetaData(ref d) => op(d).map(MetaData),
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+//! The `DepGraphSafe` trait
+
use hir::BodyId;
use hir::def_id::DefId;
use syntax::ast::NodeId;
init: l.init.as_ref().map(|e| P(self.lower_expr(e))),
span: l.span,
attrs: l.attrs.clone(),
+ source: hir::LocalSource::Normal,
})
}
// let result = match ::std::iter::IntoIterator::into_iter(<head>) {
// mut iter => {
// [opt_ident]: loop {
- // match ::std::iter::Iterator::next(&mut iter) {
- // ::std::option::Option::Some(<pat>) => <body>,
+ // let <pat> = match ::std::iter::Iterator::next(&mut iter) {
+ // ::std::option::Option::Some(val) => val,
// ::std::option::Option::None => break
- // }
+ // };
+ // SemiExpr(<body>);
// }
// }
// };
let iter = self.str_to_ident("iter");
- // `::std::option::Option::Some(<pat>) => <body>`
+ // `::std::option::Option::Some(val) => val`
let pat_arm = {
- let body_block = self.with_loop_scope(e.id,
- |this| this.lower_block(body, false));
- let body_expr = P(self.expr_block(body_block, ThinVec::new()));
- let pat = self.lower_pat(pat);
- let some_pat = self.pat_some(e.span, pat);
-
- self.arm(hir_vec![some_pat], body_expr)
+ let val_ident = self.str_to_ident("val");
+ let val_pat = self.pat_ident(e.span, val_ident);
+ let val_expr = P(self.expr_ident(e.span, val_ident, val_pat.id));
+ let some_pat = self.pat_some(e.span, val_pat);
+ self.arm(hir_vec![some_pat], val_expr)
};
// `::std::option::Option::None => break`
ThinVec::new()))
};
+ let pat = self.lower_pat(pat);
+ let pat_let = self.stmt_let_pat(e.span,
+ match_expr,
+ pat,
+ hir::LocalSource::ForLoopDesugar);
+
+ let body_block = self.with_loop_scope(e.id,
+ |this| this.lower_block(body, false));
+ let body_expr = P(self.expr_block(body_block, ThinVec::new()));
+ let body_stmt = respan(e.span, hir::StmtExpr(body_expr, self.next_id()));
+
+ let loop_block = P(self.block_all(e.span, hir_vec![pat_let, body_stmt], None));
+
// `[opt_ident]: loop { ... }`
- let loop_block = P(self.block_expr(match_expr));
let loop_expr = hir::ExprLoop(loop_block, self.lower_opt_sp_ident(opt_ident),
hir::LoopSource::ForLoop);
let loop_expr = P(hir::Expr {
}
}
- fn stmt_let(&mut self, sp: Span, mutbl: bool, ident: Name, ex: P<hir::Expr>)
- -> (hir::Stmt, NodeId) {
- let pat = if mutbl {
- self.pat_ident_binding_mode(sp, ident, hir::BindByValue(hir::MutMutable))
- } else {
- self.pat_ident(sp, ident)
- };
- let pat_id = pat.id;
+ fn stmt_let_pat(&mut self,
+ sp: Span,
+ ex: P<hir::Expr>,
+ pat: P<hir::Pat>,
+ source: hir::LocalSource)
+ -> hir::Stmt {
let local = P(hir::Local {
pat: pat,
ty: None,
id: self.next_id(),
span: sp,
attrs: ThinVec::new(),
+ source,
});
let decl = respan(sp, hir::DeclLocal(local));
- (respan(sp, hir::StmtDecl(P(decl), self.next_id())), pat_id)
+ respan(sp, hir::StmtDecl(P(decl), self.next_id()))
+ }
+
+ fn stmt_let(&mut self, sp: Span, mutbl: bool, ident: Name, ex: P<hir::Expr>)
+ -> (hir::Stmt, NodeId) {
+ let pat = if mutbl {
+ self.pat_ident_binding_mode(sp, ident, hir::BindByValue(hir::MutMutable))
+ } else {
+ self.pat_ident(sp, ident)
+ };
+ let pat_id = pat.id;
+ (self.stmt_let_pat(sp, ex, pat, hir::LocalSource::Normal), pat_id)
}
fn block_expr(&mut self, expr: P<hir::Expr>) -> hir::Block {
pub struct DefPathTable {
index_to_key: [Vec<DefKey>; 2],
key_to_index: FxHashMap<DefKey, DefIndex>,
- def_path_hashes: [Vec<Fingerprint>; 2],
+ def_path_hashes: [Vec<DefPathHash>; 2],
}
// Unfortunately we have to provide a manual impl of Clone because of the
fn allocate(&mut self,
key: DefKey,
- def_path_hash: Fingerprint,
+ def_path_hash: DefPathHash,
address_space: DefIndexAddressSpace)
-> DefIndex {
let index = {
}
#[inline(always)]
- pub fn def_path_hash(&self, index: DefIndex) -> Fingerprint {
+ pub fn def_path_hash(&self, index: DefIndex) -> DefPathHash {
self.def_path_hashes[index.address_space().index()]
[index.as_array_index()]
}
Some(index)
}
+
+ pub fn add_def_path_hashes_to(&self,
+ cnum: CrateNum,
+ out: &mut FxHashMap<DefPathHash, DefId>) {
+ for address_space in &[DefIndexAddressSpace::Low, DefIndexAddressSpace::High] {
+ let start_index = address_space.start();
+ out.extend(
+ (&self.def_path_hashes[address_space.index()])
+ .iter()
+ .enumerate()
+ .map(|(index, &hash)| {
+ let def_id = DefId {
+ krate: cnum,
+ index: DefIndex::new(index + start_index),
+ };
+ (hash, def_id)
+ })
+ );
+ }
+ }
+
+ pub fn size(&self) -> usize {
+ self.key_to_index.len()
+ }
}
let index_to_key_lo: Vec<DefKey> = Decodable::decode(d)?;
let index_to_key_hi: Vec<DefKey> = Decodable::decode(d)?;
- let def_path_hashes_lo: Vec<Fingerprint> = Decodable::decode(d)?;
- let def_path_hashes_hi: Vec<Fingerprint> = Decodable::decode(d)?;
+ let def_path_hashes_lo: Vec<DefPathHash> = Decodable::decode(d)?;
+ let def_path_hashes_hi: Vec<DefPathHash> = Decodable::decode(d)?;
let index_to_key = [index_to_key_lo, index_to_key_hi];
let def_path_hashes = [def_path_hashes_lo, def_path_hashes_hi];
}
impl DefKey {
- fn compute_stable_hash(&self, parent_hash: Fingerprint) -> Fingerprint {
+ fn compute_stable_hash(&self, parent_hash: DefPathHash) -> DefPathHash {
let mut hasher = StableHasher::new();
// We hash a 0u8 here to disambiguate between regular DefPath hashes,
0u8.hash(&mut hasher);
parent_hash.hash(&mut hasher);
self.disambiguated_data.hash(&mut hasher);
- hasher.finish()
+ DefPathHash(hasher.finish())
}
- fn root_parent_stable_hash(crate_name: &str, crate_disambiguator: &str) -> Fingerprint {
+ fn root_parent_stable_hash(crate_name: &str, crate_disambiguator: &str) -> DefPathHash {
let mut hasher = StableHasher::new();
// Disambiguate this from a regular DefPath hash,
// see compute_stable_hash() above.
1u8.hash(&mut hasher);
crate_name.hash(&mut hasher);
crate_disambiguator.hash(&mut hasher);
- hasher.finish()
+ DefPathHash(hasher.finish())
}
}
s.push_str(&tcx.original_crate_name(self.krate).as_str());
s.push_str("/");
- s.push_str(&tcx.crate_disambiguator(self.krate).as_str());
+ // Don't print the whole crate disambiguator. That's just annoying in
+ // debug output.
+ s.push_str(&tcx.crate_disambiguator(self.krate).as_str()[..7]);
for component in &self.data {
write!(s,
Typeof,
}
+#[derive(Copy, Clone, Hash, PartialEq, Eq, PartialOrd, Ord, Debug,
+ RustcEncodable, RustcDecodable)]
+pub struct DefPathHash(pub Fingerprint);
+
+impl_stable_hash_for!(tuple_struct DefPathHash { fingerprint });
+
impl Definitions {
/// Create new empty definition map.
pub fn new() -> Definitions {
}
#[inline(always)]
- pub fn def_path_hash(&self, index: DefIndex) -> Fingerprint {
+ pub fn def_path_hash(&self, index: DefIndex) -> DefPathHash {
self.table.def_path_hash(index)
}
use self::collector::NodeCollector;
pub use self::def_collector::{DefCollector, MacroInvocationData};
pub use self::definitions::{Definitions, DefKey, DefPath, DefPathData,
- DisambiguatedDefPathData};
+ DisambiguatedDefPathData, DefPathHash};
use dep_graph::{DepGraph, DepNode};
pub id: NodeId,
pub span: Span,
pub attrs: ThinVec<Attribute>,
+ pub source: LocalSource,
}
pub type Decl = Spanned<Decl_>;
TypeRelative(P<Ty>, P<PathSegment>)
}
+/// Hints at the original code for a let statement
+#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
+pub enum LocalSource {
+ /// A `match _ { .. }`
+ Normal,
+ /// A desugared `for _ in _ { .. }` loop
+ ForLoopDesugar,
+}
+
/// Hints at the original code for a `match _ { .. }`
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum MatchSource {
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use rustc_serialize::{Encodable, Decodable, Encoder, Decoder};
use rustc_data_structures::stable_hasher;
use std::mem;
use std::slice;
-#[derive(Eq, PartialEq, Ord, PartialOrd, Hash, Debug, Clone, Copy)]
+#[derive(Eq, PartialEq, Ord, PartialOrd, Hash, Debug, Clone, Copy, RustcEncodable, RustcDecodable)]
pub struct Fingerprint(u64, u64);
impl Fingerprint {
}
}
-impl Encodable for Fingerprint {
- #[inline]
- fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
- s.emit_u64(self.0.to_le())?;
- s.emit_u64(self.1.to_le())
- }
-}
-
-impl Decodable for Fingerprint {
- #[inline]
- fn decode<D: Decoder>(d: &mut D) -> Result<Fingerprint, D::Error> {
- let _0 = u64::from_le(d.read_u64()?);
- let _1 = u64::from_le(d.read_u64()?);
- Ok(Fingerprint(_0, _1))
- }
-}
-
impl ::std::fmt::Display for Fingerprint {
fn fmt(&self, formatter: &mut ::std::fmt::Formatter) -> Result<(), ::std::fmt::Error> {
write!(formatter, "{:x}-{:x}", self.0, self.1)
use hir;
use hir::def_id::DefId;
+use hir::map::DefPathHash;
use ich::{self, CachingCodemapView};
use session::config::DebugInfoLevel::NoDebugInfo;
use ty;
}
#[inline]
- pub fn def_path_hash(&mut self, def_id: DefId) -> ich::Fingerprint {
+ pub fn def_path_hash(&mut self, def_id: DefId) -> DefPathHash {
self.tcx.def_path_hash(def_id)
}
init,
id,
span,
- attrs
+ attrs,
+ source
});
impl_stable_hash_for_spanned!(hir::Decl_);
ExprRepeat(val, times)
});
+impl_stable_hash_for!(enum hir::LocalSource {
+ Normal,
+ ForLoopDesugar
+});
+
impl_stable_hash_for!(enum hir::LoopSource {
Loop,
WhileLet,
use syntax_pos::symbol::InternedString;
use ty;
-impl_stable_hash_for!(struct ty::ItemSubsts<'tcx> { substs });
-
impl<'a, 'tcx, T> HashStable<StableHashingContext<'a, 'tcx>> for &'tcx ty::Slice<T>
where T: HashStable<StableHashingContext<'a, 'tcx>> {
fn hash_stable<W: StableHasherResult>(&self,
ty::adjustment::Adjust::ReifyFnPointer |
ty::adjustment::Adjust::UnsafeFnPointer |
ty::adjustment::Adjust::ClosureFnPointer |
- ty::adjustment::Adjust::MutToConstPointer => {}
- ty::adjustment::Adjust::DerefRef { autoderefs, ref autoref, unsize } => {
- autoderefs.hash_stable(hcx, hasher);
+ ty::adjustment::Adjust::MutToConstPointer |
+ ty::adjustment::Adjust::Unsize => {}
+ ty::adjustment::Adjust::Deref(ref overloaded) => {
+ overloaded.hash_stable(hcx, hasher);
+ }
+ ty::adjustment::Adjust::Borrow(ref autoref) => {
autoref.hash_stable(hcx, hasher);
- unsize.hash_stable(hcx, hasher);
}
}
}
}
impl_stable_hash_for!(struct ty::adjustment::Adjustment<'tcx> { kind, target });
-impl_stable_hash_for!(struct ty::MethodCall { expr_id, autoderef });
-impl_stable_hash_for!(struct ty::MethodCallee<'tcx> { def_id, ty, substs });
+impl_stable_hash_for!(struct ty::adjustment::OverloadedDeref<'tcx> { region, mutbl });
impl_stable_hash_for!(struct ty::UpvarId { var_id, closure_expr_id });
impl_stable_hash_for!(struct ty::UpvarBorrow<'tcx> { kind, region });
hcx: &mut StableHashingContext<'a, 'tcx>,
hasher: &mut StableHasher<W>) {
let ty::TypeckTables {
- ref type_relative_path_defs,
+ ref type_dependent_defs,
ref node_types,
- ref item_substs,
+ ref node_substs,
ref adjustments,
- ref method_map,
ref upvar_capture_map,
ref closure_tys,
ref closure_kinds,
} = *self;
hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| {
- ich::hash_stable_nodemap(hcx, hasher, type_relative_path_defs);
+ ich::hash_stable_nodemap(hcx, hasher, type_dependent_defs);
ich::hash_stable_nodemap(hcx, hasher, node_types);
- ich::hash_stable_nodemap(hcx, hasher, item_substs);
+ ich::hash_stable_nodemap(hcx, hasher, node_substs);
ich::hash_stable_nodemap(hcx, hasher, adjustments);
-
- ich::hash_stable_hashmap(hcx, hasher, method_map, |hcx, method_call| {
- let ty::MethodCall {
- expr_id,
- autoderef
- } = *method_call;
-
- let def_id = hcx.tcx().hir.local_def_id(expr_id);
- (hcx.def_path_hash(def_id), autoderef)
- });
-
ich::hash_stable_hashmap(hcx, hasher, upvar_capture_map, |hcx, up_var_id| {
let ty::UpvarId {
var_id,
--- /dev/null
+// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! A nice interface for working with the infcx. The basic idea is to
+//! do `infcx.at(cause, param_env)`, which sets the "cause" of the
+//! operation as well as the surrounding parameter environment. Then
+//! you can do something like `.sub(a, b)` or `.eq(a, b)` to create a
+//! subtype or equality relationship respectively. The first argument
+//! is always the "expected" output from the POV of diagnostics.
+//!
+//! Examples:
+//!
+//! infcx.at(cause, param_env).sub(a, b)
+//! // requires that `a <: b`, with `a` considered the "expected" type
+//!
+//! infcx.at(cause, param_env).sup(a, b)
+//! // requires that `b <: a`, with `a` considered the "expected" type
+//!
+//! infcx.at(cause, param_env).eq(a, b)
+//! // requires that `a == b`, with `a` considered the "expected" type
+//!
+//! For finer-grained control, you can also do use `trace`:
+//!
+//! infcx.at(...).trace(a, b).sub(&c, &d)
+//!
+//! This will set `a` and `b` as the "root" values for
+//! error-reporting, but actually operate on `c` and `d`. This is
+//! sometimes useful when the types of `c` and `d` are not traceable
+//! things. (That system should probably be refactored.)
+
+use super::*;
+
+use ty::relate::{Relate, TypeRelation};
+
+pub struct At<'a, 'gcx: 'tcx, 'tcx: 'a> {
+ infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
+ cause: &'a ObligationCause<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+}
+
+pub struct Trace<'a, 'gcx: 'tcx, 'tcx: 'a> {
+ at: At<'a, 'gcx, 'tcx>,
+ a_is_expected: bool,
+ trace: TypeTrace<'tcx>,
+}
+
+impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
+ pub fn at(&'a self,
+ cause: &'a ObligationCause<'tcx>,
+ param_env: ty::ParamEnv<'tcx>)
+ -> At<'a, 'gcx, 'tcx>
+ {
+ At { infcx: self, cause, param_env }
+ }
+}
+
+pub trait ToTrace<'tcx>: Relate<'tcx> + Copy {
+ fn to_trace(cause: &ObligationCause<'tcx>,
+ a_is_expected: bool,
+ a: Self,
+ b: Self)
+ -> TypeTrace<'tcx>;
+}
+
+impl<'a, 'gcx, 'tcx> At<'a, 'gcx, 'tcx> {
+ /// Hacky routine for equating two impl headers in coherence.
+ pub fn eq_impl_headers(self,
+ expected: &ty::ImplHeader<'tcx>,
+ actual: &ty::ImplHeader<'tcx>)
+ -> InferResult<'tcx, ()>
+ {
+ debug!("eq_impl_header({:?} = {:?})", expected, actual);
+ match (expected.trait_ref, actual.trait_ref) {
+ (Some(a_ref), Some(b_ref)) =>
+ self.eq(a_ref, b_ref),
+ (None, None) =>
+ self.eq(expected.self_ty, actual.self_ty),
+ _ =>
+ bug!("mk_eq_impl_headers given mismatched impl kinds"),
+ }
+ }
+
+ /// Make `a <: b` where `a` may or may not be expected
+ pub fn sub_exp<T>(self,
+ a_is_expected: bool,
+ a: T,
+ b: T)
+ -> InferResult<'tcx, ()>
+ where T: ToTrace<'tcx>
+ {
+ self.trace_exp(a_is_expected, a, b).sub(&a, &b)
+ }
+
+ /// Make `actual <: expected`. For example, if type-checking a
+ /// call like `foo(x)`, where `foo: fn(i32)`, you might have
+ /// `sup(i32, x)`, since the "expected" type is the type that
+ /// appears in the signature.
+ pub fn sup<T>(self,
+ expected: T,
+ actual: T)
+ -> InferResult<'tcx, ()>
+ where T: ToTrace<'tcx>
+ {
+ self.sub_exp(false, actual, expected)
+ }
+
+ /// Make `expected <: actual`
+ pub fn sub<T>(self,
+ expected: T,
+ actual: T)
+ -> InferResult<'tcx, ()>
+ where T: ToTrace<'tcx>
+ {
+ self.sub_exp(true, expected, actual)
+ }
+
+ /// Make `expected <: actual`
+ pub fn eq_exp<T>(self,
+ a_is_expected: bool,
+ a: T,
+ b: T)
+ -> InferResult<'tcx, ()>
+ where T: ToTrace<'tcx>
+ {
+ self.trace_exp(a_is_expected, a, b).eq(&a, &b)
+ }
+
+ /// Make `expected <: actual`
+ pub fn eq<T>(self,
+ expected: T,
+ actual: T)
+ -> InferResult<'tcx, ()>
+ where T: ToTrace<'tcx>
+ {
+ self.trace(expected, actual).eq(&expected, &actual)
+ }
+
+ /// Compute the least-upper-bound, or mutual supertype, of two
+ /// values. The order of the arguments doesn't matter, but since
+ /// this can result in an error (e.g., if asked to compute LUB of
+ /// u32 and i32), it is meaningful to call one of them the
+ /// "expected type".
+ pub fn lub<T>(self,
+ expected: T,
+ actual: T)
+ -> InferResult<'tcx, T>
+ where T: ToTrace<'tcx>
+ {
+ self.trace(expected, actual).lub(&expected, &actual)
+ }
+
+ /// Compute the greatest-lower-bound, or mutual subtype, of two
+ /// values. As with `lub` order doesn't matter, except for error
+ /// cases.
+ pub fn glb<T>(self,
+ expected: T,
+ actual: T)
+ -> InferResult<'tcx, T>
+ where T: ToTrace<'tcx>
+ {
+ self.trace(expected, actual).glb(&expected, &actual)
+ }
+
+ /// Sets the "trace" values that will be used for
+ /// error-repporting, but doesn't actually perform any operation
+ /// yet (this is useful when you want to set the trace using
+ /// distinct values from those you wish to operate upon).
+ pub fn trace<T>(self,
+ expected: T,
+ actual: T)
+ -> Trace<'a, 'gcx, 'tcx>
+ where T: ToTrace<'tcx>
+ {
+ self.trace_exp(true, expected, actual)
+ }
+
+ /// Like `trace`, but the expected value is determined by the
+ /// boolean argument (if true, then the first argument `a` is the
+ /// "expected" value).
+ pub fn trace_exp<T>(self,
+ a_is_expected: bool,
+ a: T,
+ b: T)
+ -> Trace<'a, 'gcx, 'tcx>
+ where T: ToTrace<'tcx>
+ {
+ let trace = ToTrace::to_trace(self.cause, a_is_expected, a, b);
+ Trace { at: self, trace: trace, a_is_expected }
+ }
+}
+
+impl<'a, 'gcx, 'tcx> Trace<'a, 'gcx, 'tcx> {
+ /// Make `a <: b` where `a` may or may not be expected (if
+ /// `a_is_expected` is true, then `a` is expected).
+ /// Make `expected <: actual`
+ pub fn sub<T>(self,
+ a: &T,
+ b: &T)
+ -> InferResult<'tcx, ()>
+ where T: Relate<'tcx>
+ {
+ debug!("sub({:?} <: {:?})", a, b);
+ let Trace { at, trace, a_is_expected } = self;
+ at.infcx.commit_if_ok(|_| {
+ let mut fields = at.infcx.combine_fields(trace, at.param_env);
+ fields.sub(a_is_expected)
+ .relate(a, b)
+ .map(move |_| InferOk { value: (), obligations: fields.obligations })
+ })
+ }
+
+ /// Make `a == b`; the expectation is set by the call to
+ /// `trace()`.
+ pub fn eq<T>(self,
+ a: &T,
+ b: &T)
+ -> InferResult<'tcx, ()>
+ where T: Relate<'tcx>
+ {
+ debug!("eq({:?} == {:?})", a, b);
+ let Trace { at, trace, a_is_expected } = self;
+ at.infcx.commit_if_ok(|_| {
+ let mut fields = at.infcx.combine_fields(trace, at.param_env);
+ fields.equate(a_is_expected)
+ .relate(a, b)
+ .map(move |_| InferOk { value: (), obligations: fields.obligations })
+ })
+ }
+
+ pub fn lub<T>(self,
+ a: &T,
+ b: &T)
+ -> InferResult<'tcx, T>
+ where T: Relate<'tcx>
+ {
+ debug!("lub({:?} \\/ {:?})", a, b);
+ let Trace { at, trace, a_is_expected } = self;
+ at.infcx.commit_if_ok(|_| {
+ let mut fields = at.infcx.combine_fields(trace, at.param_env);
+ fields.lub(a_is_expected)
+ .relate(a, b)
+ .map(move |t| InferOk { value: t, obligations: fields.obligations })
+ })
+ }
+
+ pub fn glb<T>(self,
+ a: &T,
+ b: &T)
+ -> InferResult<'tcx, T>
+ where T: Relate<'tcx>
+ {
+ debug!("glb({:?} /\\ {:?})", a, b);
+ let Trace { at, trace, a_is_expected } = self;
+ at.infcx.commit_if_ok(|_| {
+ let mut fields = at.infcx.combine_fields(trace, at.param_env);
+ fields.glb(a_is_expected)
+ .relate(a, b)
+ .map(move |t| InferOk { value: t, obligations: fields.obligations })
+ })
+ }
+}
+
+impl<'tcx> ToTrace<'tcx> for Ty<'tcx> {
+ fn to_trace(cause: &ObligationCause<'tcx>,
+ a_is_expected: bool,
+ a: Self,
+ b: Self)
+ -> TypeTrace<'tcx>
+ {
+ TypeTrace {
+ cause: cause.clone(),
+ values: Types(ExpectedFound::new(a_is_expected, a, b))
+ }
+ }
+}
+
+impl<'tcx> ToTrace<'tcx> for ty::TraitRef<'tcx> {
+ fn to_trace(cause: &ObligationCause<'tcx>,
+ a_is_expected: bool,
+ a: Self,
+ b: Self)
+ -> TypeTrace<'tcx>
+ {
+ TypeTrace {
+ cause: cause.clone(),
+ values: TraitRefs(ExpectedFound::new(a_is_expected, a, b))
+ }
+ }
+}
+
+impl<'tcx> ToTrace<'tcx> for ty::PolyTraitRef<'tcx> {
+ fn to_trace(cause: &ObligationCause<'tcx>,
+ a_is_expected: bool,
+ a: Self,
+ b: Self)
+ -> TypeTrace<'tcx>
+ {
+ TypeTrace {
+ cause: cause.clone(),
+ values: PolyTraitRefs(ExpectedFound::new(a_is_expected, a, b))
+ }
+ }
+}
pub infcx: &'infcx InferCtxt<'infcx, 'gcx, 'tcx>,
pub trace: TypeTrace<'tcx>,
pub cause: Option<ty::relate::Cause>,
+ pub param_env: ty::ParamEnv<'tcx>,
pub obligations: PredicateObligations<'tcx>,
}
if needs_wf {
self.obligations.push(Obligation::new(self.trace.cause.clone(),
+ self.param_env,
ty::Predicate::WellFormed(b_ty)));
}
use ty::{self, Ty, TyCtxt};
use ty::error::{ExpectedFound, TypeError, UnconstrainedNumeric};
use ty::fold::{TypeFoldable, TypeFolder, TypeVisitor};
-use ty::relate::{Relate, RelateResult, TypeRelation};
+use ty::relate::RelateResult;
use traits::{self, ObligationCause, PredicateObligations, Reveal};
use rustc_data_structures::unify::{self, UnificationTable};
use std::cell::{Cell, RefCell, Ref, RefMut};
use self::type_variable::TypeVariableOrigin;
use self::unify_key::ToType;
+pub mod at;
mod combine;
mod equate;
pub mod error_reporting;
// For region variables.
region_vars: RegionVarBindings<'a, 'gcx, 'tcx>,
- pub param_env: ty::ParamEnv<'gcx>,
-
/// Caches the results of trait selection. This cache is used
/// for things that have to do with the parameters in scope.
pub selection_cache: traits::SelectionCache<'tcx>,
// avoid reporting the same error twice.
pub reported_trait_errors: RefCell<FxHashSet<traits::TraitErrorKey<'tcx>>>,
- // Sadly, the behavior of projection varies a bit depending on the
- // stage of compilation. The specifics are given in the
- // documentation for `Reveal`.
- projection_mode: Reveal,
-
// When an error occurs, we want to avoid reporting "derived"
// errors that are due to this original failure. Normally, we
// handle this with the `err_count_on_creation` count, which
pub trait InferEnv<'a, 'tcx> {
fn to_parts(self, tcx: TyCtxt<'a, 'tcx, 'tcx>)
-> (Option<&'a ty::TypeckTables<'tcx>>,
- Option<ty::TypeckTables<'tcx>>,
- Option<ty::ParamEnv<'tcx>>);
+ Option<ty::TypeckTables<'tcx>>);
}
impl<'a, 'tcx> InferEnv<'a, 'tcx> for () {
fn to_parts(self, _: TyCtxt<'a, 'tcx, 'tcx>)
-> (Option<&'a ty::TypeckTables<'tcx>>,
- Option<ty::TypeckTables<'tcx>>,
- Option<ty::ParamEnv<'tcx>>) {
- (None, None, None)
- }
-}
-
-impl<'a, 'tcx> InferEnv<'a, 'tcx> for ty::ParamEnv<'tcx> {
- fn to_parts(self, _: TyCtxt<'a, 'tcx, 'tcx>)
- -> (Option<&'a ty::TypeckTables<'tcx>>,
- Option<ty::TypeckTables<'tcx>>,
- Option<ty::ParamEnv<'tcx>>) {
- (None, None, Some(self))
+ Option<ty::TypeckTables<'tcx>>) {
+ (None, None)
}
}
-impl<'a, 'tcx> InferEnv<'a, 'tcx> for (&'a ty::TypeckTables<'tcx>, ty::ParamEnv<'tcx>) {
+impl<'a, 'tcx> InferEnv<'a, 'tcx> for &'a ty::TypeckTables<'tcx> {
fn to_parts(self, _: TyCtxt<'a, 'tcx, 'tcx>)
-> (Option<&'a ty::TypeckTables<'tcx>>,
- Option<ty::TypeckTables<'tcx>>,
- Option<ty::ParamEnv<'tcx>>) {
- (Some(self.0), None, Some(self.1))
+ Option<ty::TypeckTables<'tcx>>) {
+ (Some(self), None)
}
}
-impl<'a, 'tcx> InferEnv<'a, 'tcx> for (ty::TypeckTables<'tcx>, ty::ParamEnv<'tcx>) {
+impl<'a, 'tcx> InferEnv<'a, 'tcx> for ty::TypeckTables<'tcx> {
fn to_parts(self, _: TyCtxt<'a, 'tcx, 'tcx>)
-> (Option<&'a ty::TypeckTables<'tcx>>,
- Option<ty::TypeckTables<'tcx>>,
- Option<ty::ParamEnv<'tcx>>) {
- (None, Some(self.0), Some(self.1))
+ Option<ty::TypeckTables<'tcx>>) {
+ (None, Some(self))
}
}
impl<'a, 'tcx> InferEnv<'a, 'tcx> for hir::BodyId {
fn to_parts(self, tcx: TyCtxt<'a, 'tcx, 'tcx>)
-> (Option<&'a ty::TypeckTables<'tcx>>,
- Option<ty::TypeckTables<'tcx>>,
- Option<ty::ParamEnv<'tcx>>) {
+ Option<ty::TypeckTables<'tcx>>) {
let def_id = tcx.hir.body_owner_def_id(self);
- (Some(tcx.typeck_tables_of(def_id)),
- None,
- Some(tcx.param_env(def_id)))
+ (Some(tcx.typeck_tables_of(def_id)), None)
}
}
arena: DroplessArena,
fresh_tables: Option<RefCell<ty::TypeckTables<'tcx>>>,
tables: Option<&'a ty::TypeckTables<'gcx>>,
- param_env: Option<ty::ParamEnv<'gcx>>,
- projection_mode: Reveal,
}
impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'gcx> {
- pub fn infer_ctxt<E: InferEnv<'a, 'gcx>>(self,
- env: E,
- projection_mode: Reveal)
- -> InferCtxtBuilder<'a, 'gcx, 'tcx> {
- let (tables, fresh_tables, param_env) = env.to_parts(self);
+ pub fn infer_ctxt<E: InferEnv<'a, 'gcx>>(self, env: E) -> InferCtxtBuilder<'a, 'gcx, 'tcx> {
+ let (tables, fresh_tables) = env.to_parts(self);
InferCtxtBuilder {
global_tcx: self,
arena: DroplessArena::new(),
fresh_tables: fresh_tables.map(RefCell::new),
tables: tables,
- param_env: param_env,
- projection_mode: projection_mode,
}
}
/// If any inference functionality is used, ICEs will occur.
pub fn borrowck_fake_infer_ctxt(self, body: hir::BodyId)
-> InferCtxt<'a, 'gcx, 'gcx> {
- let (tables, _, param_env) = body.to_parts(self);
+ let (tables, _) = body.to_parts(self);
InferCtxt {
tcx: self,
tables: InferTables::Interned(tables.unwrap()),
int_unification_table: RefCell::new(UnificationTable::new()),
float_unification_table: RefCell::new(UnificationTable::new()),
region_vars: RegionVarBindings::new(self),
- param_env: param_env.unwrap(),
selection_cache: traits::SelectionCache::new(),
evaluation_cache: traits::EvaluationCache::new(),
projection_cache: RefCell::new(traits::ProjectionCache::new()),
reported_trait_errors: RefCell::new(FxHashSet()),
- projection_mode: Reveal::UserFacing,
tainted_by_errors_flag: Cell::new(false),
err_count_on_creation: self.sess.err_count(),
in_snapshot: Cell::new(false),
ref arena,
ref fresh_tables,
tables,
- ref mut param_env,
- projection_mode,
} = *self;
let tables = tables.map(InferTables::Interned).unwrap_or_else(|| {
fresh_tables.as_ref().map_or(InferTables::Missing, InferTables::InProgress)
});
- let param_env = param_env.take().unwrap_or_else(|| ty::ParamEnv::empty());
global_tcx.enter_local(arena, |tcx| f(InferCtxt {
tcx: tcx,
tables: tables,
int_unification_table: RefCell::new(UnificationTable::new()),
float_unification_table: RefCell::new(UnificationTable::new()),
region_vars: RegionVarBindings::new(tcx),
- param_env: param_env,
selection_cache: traits::SelectionCache::new(),
evaluation_cache: traits::EvaluationCache::new(),
reported_trait_errors: RefCell::new(FxHashSet()),
- projection_mode: projection_mode,
tainted_by_errors_flag: Cell::new(false),
err_count_on_creation: tcx.sess.err_count(),
in_snapshot: Cell::new(false),
}
#[must_use = "once you start a snapshot, you should always consume it"]
-pub struct CombinedSnapshot {
+pub struct CombinedSnapshot<'a, 'tcx:'a> {
projection_cache_snapshot: traits::ProjectionCacheSnapshot,
type_snapshot: type_variable::Snapshot,
int_snapshot: unify::Snapshot<ty::IntVid>,
float_snapshot: unify::Snapshot<ty::FloatVid>,
region_vars_snapshot: RegionSnapshot,
was_in_snapshot: bool,
+ _in_progress_tables: Option<Ref<'a, ty::TypeckTables<'tcx>>>,
}
/// Helper trait for shortening the lifetimes inside a
/// value for post-type-checking normalization.
pub trait TransNormalize<'gcx>: TypeFoldable<'gcx> {
- fn trans_normalize<'a, 'tcx>(&self, infcx: &InferCtxt<'a, 'gcx, 'tcx>) -> Self;
+ fn trans_normalize<'a, 'tcx>(&self,
+ infcx: &InferCtxt<'a, 'gcx, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>)
+ -> Self;
}
macro_rules! items { ($($item:item)+) => ($($item)+) }
($lt_gcx:tt, $($ty:ty),+) => {
items!($(impl<$lt_gcx> TransNormalize<$lt_gcx> for $ty {
fn trans_normalize<'a, 'tcx>(&self,
- infcx: &InferCtxt<'a, $lt_gcx, 'tcx>)
+ infcx: &InferCtxt<'a, $lt_gcx, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>)
-> Self {
- infcx.normalize_projections_in(self)
+ infcx.normalize_projections_in(param_env, self)
}
})+);
}
);
impl<'gcx> TransNormalize<'gcx> for LvalueTy<'gcx> {
- fn trans_normalize<'a, 'tcx>(&self, infcx: &InferCtxt<'a, 'gcx, 'tcx>) -> Self {
+ fn trans_normalize<'a, 'tcx>(&self,
+ infcx: &InferCtxt<'a, 'gcx, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>)
+ -> Self {
match *self {
- LvalueTy::Ty { ty } => LvalueTy::Ty { ty: ty.trans_normalize(infcx) },
+ LvalueTy::Ty { ty } => LvalueTy::Ty { ty: ty.trans_normalize(infcx, param_env) },
LvalueTy::Downcast { adt_def, substs, variant_index } => {
LvalueTy::Downcast {
adt_def: adt_def,
- substs: substs.trans_normalize(infcx),
+ substs: substs.trans_normalize(infcx, param_env),
variant_index: variant_index
}
}
self.normalize_associated_type(&value)
}
+ /// Fully normalizes any associated types in `value`, using an
+ /// empty environment and `Reveal::All` mode (therefore, suitable
+ /// only for monomorphized code during trans, basically).
pub fn normalize_associated_type<T>(self, value: &T) -> T
where T: TransNormalize<'tcx>
{
debug!("normalize_associated_type(t={:?})", value);
+ let param_env = ty::ParamEnv::empty(Reveal::All);
let value = self.erase_regions(value);
if !value.has_projection_types() {
return value;
}
- self.infer_ctxt((), Reveal::All).enter(|infcx| {
- value.trans_normalize(&infcx)
+ self.infer_ctxt(()).enter(|infcx| {
+ value.trans_normalize(&infcx, param_env)
})
}
+ /// Does a best-effort to normalize any associated types in
+ /// `value`; this includes revealing specializable types, so this
+ /// should be not be used during type-checking, but only during
+ /// optimization and code generation.
pub fn normalize_associated_type_in_env<T>(
self, value: &T, env: ty::ParamEnv<'tcx>
) -> T
return value;
}
- self.infer_ctxt(env, Reveal::All).enter(|infcx| {
- value.trans_normalize(&infcx)
+ self.infer_ctxt(()).enter(|infcx| {
+ value.trans_normalize(&infcx, env.reveal_all())
})
}
}
impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
- fn normalize_projections_in<T>(&self, value: &T) -> T::Lifted
+ fn normalize_projections_in<T>(&self, param_env: ty::ParamEnv<'tcx>, value: &T) -> T::Lifted
where T: TypeFoldable<'tcx> + ty::Lift<'gcx>
{
let mut selcx = traits::SelectionContext::new(self);
let cause = traits::ObligationCause::dummy();
let traits::Normalized { value: result, obligations } =
- traits::normalize(&mut selcx, cause, value);
+ traits::normalize(&mut selcx, param_env, cause, value);
debug!("normalize_projections_in: result={:?} obligations={:?}",
result, obligations);
}
}
- pub fn projection_mode(&self) -> Reveal {
- self.projection_mode
- }
-
pub fn is_in_snapshot(&self) -> bool {
self.in_snapshot.get()
}
return variables;
}
- fn combine_fields(&'a self, trace: TypeTrace<'tcx>)
+ fn combine_fields(&'a self, trace: TypeTrace<'tcx>, param_env: ty::ParamEnv<'tcx>)
-> CombineFields<'a, 'gcx, 'tcx> {
CombineFields {
infcx: self,
trace: trace,
cause: None,
+ param_env,
obligations: PredicateObligations::new(),
}
}
- pub fn equate<T>(&'a self, a_is_expected: bool, trace: TypeTrace<'tcx>, a: &T, b: &T)
- -> InferResult<'tcx, T>
- where T: Relate<'tcx>
- {
- let mut fields = self.combine_fields(trace);
- let result = fields.equate(a_is_expected).relate(a, b);
- result.map(move |t| InferOk { value: t, obligations: fields.obligations })
- }
-
- pub fn sub<T>(&'a self, a_is_expected: bool, trace: TypeTrace<'tcx>, a: &T, b: &T)
- -> InferResult<'tcx, T>
- where T: Relate<'tcx>
- {
- let mut fields = self.combine_fields(trace);
- let result = fields.sub(a_is_expected).relate(a, b);
- result.map(move |t| InferOk { value: t, obligations: fields.obligations })
- }
-
- pub fn lub<T>(&'a self, a_is_expected: bool, trace: TypeTrace<'tcx>, a: &T, b: &T)
- -> InferResult<'tcx, T>
- where T: Relate<'tcx>
- {
- let mut fields = self.combine_fields(trace);
- let result = fields.lub(a_is_expected).relate(a, b);
- result.map(move |t| InferOk { value: t, obligations: fields.obligations })
- }
-
- pub fn glb<T>(&'a self, a_is_expected: bool, trace: TypeTrace<'tcx>, a: &T, b: &T)
- -> InferResult<'tcx, T>
- where T: Relate<'tcx>
- {
- let mut fields = self.combine_fields(trace);
- let result = fields.glb(a_is_expected).relate(a, b);
- result.map(move |t| InferOk { value: t, obligations: fields.obligations })
- }
-
// Clear the "currently in a snapshot" flag, invoke the closure,
// then restore the flag to its original value. This flag is a
// debugging measure designed to detect cases where we start a
result
}
- fn start_snapshot(&self) -> CombinedSnapshot {
+ fn start_snapshot<'b>(&'b self) -> CombinedSnapshot<'b, 'tcx> {
debug!("start_snapshot()");
let in_snapshot = self.in_snapshot.get();
float_snapshot: self.float_unification_table.borrow_mut().snapshot(),
region_vars_snapshot: self.region_vars.start_snapshot(),
was_in_snapshot: in_snapshot,
+ // Borrow tables "in progress" (i.e. during typeck)
+ // to ban writes from within a snapshot to them.
+ _in_progress_tables: match self.tables {
+ InferTables::InProgress(ref tables) => tables.try_borrow().ok(),
+ _ => None
+ }
}
}
int_snapshot,
float_snapshot,
region_vars_snapshot,
- was_in_snapshot } = snapshot;
+ was_in_snapshot,
+ _in_progress_tables } = snapshot;
self.in_snapshot.set(was_in_snapshot);
int_snapshot,
float_snapshot,
region_vars_snapshot,
- was_in_snapshot } = snapshot;
+ was_in_snapshot,
+ _in_progress_tables } = snapshot;
self.in_snapshot.set(was_in_snapshot);
self.region_vars.add_given(sub, sup);
}
- pub fn sub_types(&self,
- a_is_expected: bool,
- cause: &ObligationCause<'tcx>,
- a: Ty<'tcx>,
- b: Ty<'tcx>)
- -> InferResult<'tcx, ()>
- {
- debug!("sub_types({:?} <: {:?})", a, b);
- self.commit_if_ok(|_| {
- let trace = TypeTrace::types(cause, a_is_expected, a, b);
- self.sub(a_is_expected, trace, &a, &b).map(|ok| ok.unit())
- })
- }
-
- pub fn can_sub_types(&self,
- a: Ty<'tcx>,
- b: Ty<'tcx>)
- -> UnitResult<'tcx>
+ pub fn can_sub<T>(&self,
+ param_env: ty::ParamEnv<'tcx>,
+ a: T,
+ b: T)
+ -> UnitResult<'tcx>
+ where T: at::ToTrace<'tcx>
{
+ let origin = &ObligationCause::dummy();
self.probe(|_| {
- let origin = &ObligationCause::dummy();
- let trace = TypeTrace::types(origin, true, a, b);
- self.sub(true, trace, &a, &b).map(|InferOk { obligations: _, .. }| {
+ self.at(origin, param_env).sub(a, b).map(|InferOk { obligations: _, .. }| {
// Ignore obligations, since we are unrolling
// everything anyway.
})
})
}
- pub fn eq_types(&self,
- a_is_expected: bool,
- cause: &ObligationCause<'tcx>,
- a: Ty<'tcx>,
- b: Ty<'tcx>)
- -> InferResult<'tcx, ()>
- {
- self.commit_if_ok(|_| {
- let trace = TypeTrace::types(cause, a_is_expected, a, b);
- self.equate(a_is_expected, trace, &a, &b).map(|ok| ok.unit())
- })
- }
-
- pub fn eq_trait_refs(&self,
- a_is_expected: bool,
- cause: &ObligationCause<'tcx>,
- a: ty::TraitRef<'tcx>,
- b: ty::TraitRef<'tcx>)
- -> InferResult<'tcx, ()>
- {
- debug!("eq_trait_refs({:?} = {:?})", a, b);
- self.commit_if_ok(|_| {
- let trace = TypeTrace {
- cause: cause.clone(),
- values: TraitRefs(ExpectedFound::new(a_is_expected, a, b))
- };
- self.equate(a_is_expected, trace, &a, &b).map(|ok| ok.unit())
- })
- }
-
- pub fn eq_impl_headers(&self,
- a_is_expected: bool,
- cause: &ObligationCause<'tcx>,
- a: &ty::ImplHeader<'tcx>,
- b: &ty::ImplHeader<'tcx>)
- -> InferResult<'tcx, ()>
- {
- debug!("eq_impl_header({:?} = {:?})", a, b);
- match (a.trait_ref, b.trait_ref) {
- (Some(a_ref), Some(b_ref)) => self.eq_trait_refs(a_is_expected, cause, a_ref, b_ref),
- (None, None) => self.eq_types(a_is_expected, cause, a.self_ty, b.self_ty),
- _ => bug!("mk_eq_impl_headers given mismatched impl kinds"),
- }
- }
-
- pub fn sub_poly_trait_refs(&self,
- a_is_expected: bool,
- cause: ObligationCause<'tcx>,
- a: ty::PolyTraitRef<'tcx>,
- b: ty::PolyTraitRef<'tcx>)
- -> InferResult<'tcx, ()>
+ pub fn can_eq<T>(&self,
+ param_env: ty::ParamEnv<'tcx>,
+ a: T,
+ b: T)
+ -> UnitResult<'tcx>
+ where T: at::ToTrace<'tcx>
{
- debug!("sub_poly_trait_refs({:?} <: {:?})", a, b);
- self.commit_if_ok(|_| {
- let trace = TypeTrace {
- cause: cause,
- values: PolyTraitRefs(ExpectedFound::new(a_is_expected, a, b))
- };
- self.sub(a_is_expected, trace, &a, &b).map(|ok| ok.unit())
+ let origin = &ObligationCause::dummy();
+ self.probe(|_| {
+ self.at(origin, param_env).eq(a, b).map(|InferOk { obligations: _, .. }| {
+ // Ignore obligations, since we are unrolling
+ // everything anyway.
+ })
})
}
pub fn equality_predicate(&self,
cause: &ObligationCause<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
predicate: &ty::PolyEquatePredicate<'tcx>)
-> InferResult<'tcx, ()>
{
let (ty::EquatePredicate(a, b), skol_map) =
self.skolemize_late_bound_regions(predicate, snapshot);
let cause_span = cause.span;
- let eqty_ok = self.eq_types(false, cause, a, b)?;
+ let eqty_ok = self.at(cause, param_env).eq(b, a)?;
self.leak_check(false, cause_span, &skol_map, snapshot)?;
self.pop_skolemized(skol_map, snapshot);
Ok(eqty_ok.unit())
pub fn subtype_predicate(&self,
cause: &ObligationCause<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
predicate: &ty::PolySubtypePredicate<'tcx>)
-> Option<InferResult<'tcx, ()>>
{
self.skolemize_late_bound_regions(predicate, snapshot);
let cause_span = cause.span;
- let ok = self.sub_types(a_is_expected, cause, a, b)?;
+ let ok = self.at(cause, param_env).sub_exp(a_is_expected, a, b)?;
self.leak_check(false, cause_span, &skol_map, snapshot)?;
self.pop_skolemized(skol_map, snapshot);
Ok(ok.unit())
/// details.
pub fn match_poly_projection_predicate(&self,
cause: ObligationCause<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
match_a: ty::PolyProjectionPredicate<'tcx>,
match_b: ty::TraitRef<'tcx>)
-> InferResult<'tcx, HrMatchResult<Ty<'tcx>>>
};
let match_pair = match_a.map_bound(|p| (p.projection_ty.trait_ref, p.ty));
- let mut combine = self.combine_fields(trace);
+ let mut combine = self.combine_fields(trace, param_env);
let result = combine.higher_ranked_match(span, &match_pair, &match_b, true)?;
Ok(InferOk { value: result, obligations: combine.obligations })
}
self.region_vars.verify_generic_bound(origin, kind, a, bound);
}
- pub fn can_equate<T>(&self, a: &T, b: &T) -> UnitResult<'tcx>
- where T: Relate<'tcx> + fmt::Debug
- {
- debug!("can_equate({:?}, {:?})", a, b);
- self.probe(|_| {
- // Gin up a dummy trace, since this won't be committed
- // anyhow. We should make this typetrace stuff more
- // generic so we don't have to do anything quite this
- // terrible.
- let trace = TypeTrace::dummy(self.tcx);
- self.equate(true, trace, a, b).map(|InferOk { obligations: _, .. }| {
- // We can intentionally ignore obligations here, since
- // this is part of a simple test for general
- // "equatability". However, it's not entirely clear
- // that we *ought* to be, perhaps a better thing would
- // be to use a mini-fulfillment context or something
- // like that.
- })
- })
- }
-
pub fn node_ty(&self, id: ast::NodeId) -> McResult<Ty<'tcx>> {
let ty = self.node_type(id);
self.resolve_type_vars_or_error(&ty)
self.resolve_type_vars_or_error(&ty)
}
- pub fn type_moves_by_default(&self, ty: Ty<'tcx>, span: Span) -> bool {
+ pub fn type_moves_by_default(&self,
+ param_env: ty::ParamEnv<'tcx>,
+ ty: Ty<'tcx>,
+ span: Span)
+ -> bool {
let ty = self.resolve_type_vars_if_possible(&ty);
- if let Some(ty) = self.tcx.lift_to_global(&ty) {
+ if let Some((param_env, ty)) = self.tcx.lift_to_global(&(param_env, ty)) {
// Even if the type may have no inference variables, during
// type-checking closure types are in local tables only.
let local_closures = match self.tables {
_ => false
};
if !local_closures {
- return ty.moves_by_default(self.tcx.global_tcx(), self.param_env(), span);
+ return ty.moves_by_default(self.tcx.global_tcx(), param_env, span);
}
}
// rightly refuses to work with inference variables, but
// moves_by_default has a cache, which we want to use in other
// cases.
- !traits::type_known_to_meet_bound(self, ty, copy_def_id, span)
- }
-
- pub fn node_method_ty(&self, method_call: ty::MethodCall)
- -> Option<Ty<'tcx>> {
- self.tables
- .borrow()
- .method_map
- .get(&method_call)
- .map(|method| method.ty)
- .map(|ty| self.resolve_type_vars_if_possible(&ty))
- }
-
- pub fn node_method_id(&self, method_call: ty::MethodCall)
- -> Option<DefId> {
- self.tables
- .borrow()
- .method_map
- .get(&method_call)
- .map(|method| method.def_id)
- }
-
- pub fn is_method_call(&self, id: ast::NodeId) -> bool {
- self.tables.borrow().method_map.contains_key(&ty::MethodCall::expr(id))
+ !traits::type_known_to_meet_bound(self, param_env, ty, copy_def_id, span)
}
pub fn upvar_capture(&self, upvar_id: ty::UpvarId) -> Option<ty::UpvarCapture<'tcx>> {
self.tables.borrow().upvar_capture_map.get(&upvar_id).cloned()
}
- pub fn param_env(&self) -> ty::ParamEnv<'gcx> {
- self.param_env
- }
-
pub fn closure_kind(&self,
def_id: DefId)
-> Option<ty::ClosureKind>
self.fields.obligations.push(
Obligation::new(
self.fields.trace.cause.clone(),
+ self.fields.param_env,
ty::Predicate::Subtype(
ty::Binder(ty::SubtypePredicate {
a_is_expected: self.a_is_expected,
#![cfg_attr(stage0, feature(staged_api))]
#![cfg_attr(stage0, feature(loop_break_value))]
-#![recursion_limit="192"]
+#![recursion_limit="256"]
extern crate arena;
extern crate core;
use dep_graph::DepNode;
use hir::def_id::{CrateNum, DefId, DefIndex};
use hir::map as hir_map;
-use hir::map::definitions::{Definitions, DefKey, DisambiguatedDefPathData};
+use hir::map::definitions::{Definitions, DefKey, DisambiguatedDefPathData,
+ DefPathTable};
use hir::svh::Svh;
use ich;
use middle::lang_items;
-> Option<DefId>;
fn def_key(&self, def: DefId) -> DefKey;
fn def_path(&self, def: DefId) -> hir_map::DefPath;
- fn def_path_hash(&self, def: DefId) -> ich::Fingerprint;
+ fn def_path_hash(&self, def: DefId) -> hir_map::DefPathHash;
+ fn def_path_table(&self, cnum: CrateNum) -> Rc<DefPathTable>;
fn struct_field_names(&self, def: DefId) -> Vec<ast::Name>;
fn item_children(&self, did: DefId) -> Vec<def::Export>;
fn load_macro(&self, did: DefId, sess: &Session) -> LoadedMacro;
fn def_path(&self, def: DefId) -> hir_map::DefPath {
bug!("relative_def_path")
}
- fn def_path_hash(&self, def: DefId) -> ich::Fingerprint {
- bug!("wa")
+ fn def_path_hash(&self, def: DefId) -> hir_map::DefPathHash {
+ bug!("def_path_hash")
+ }
+ fn def_path_table(&self, cnum: CrateNum) -> Rc<DefPathTable> {
+ bug!("def_path_table")
}
fn struct_field_names(&self, def: DefId) -> Vec<ast::Name> { bug!("struct_field_names") }
fn item_children(&self, did: DefId) -> Vec<def::Export> { bug!("item_children") }
}
fn lookup_and_handle_method(&mut self, id: ast::NodeId) {
- let method_call = ty::MethodCall::expr(id);
- let method = self.tables.method_map[&method_call];
- self.check_def_id(method.def_id);
+ self.check_def_id(self.tables.type_dependent_defs[&id].def_id());
}
fn handle_field_access(&mut self, lhs: &hir::Expr, name: ast::Name) {
use self::RootUnsafeContext::*;
use ty::{self, Ty, TyCtxt};
-use ty::MethodCall;
use lint;
use syntax::ast;
fn visit_expr(&mut self, expr: &'tcx hir::Expr) {
match expr.node {
hir::ExprMethodCall(..) => {
- let method_call = MethodCall::expr(expr.id);
- let base_type = self.tables.method_map[&method_call].ty;
+ let def_id = self.tables.type_dependent_defs[&expr.id].def_id();
+ let base_type = self.tcx.type_of(def_id);
debug!("effect: method call case, base type is {:?}",
base_type);
if type_is_unsafe_function(base_type) {
pub struct ExprUseVisitor<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
mc: mc::MemCategorizationContext<'a, 'gcx, 'tcx>,
delegate: &'a mut Delegate<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
}
// If the TYPER results in an error, it's because the type check
)
}
-/// Whether the elements of an overloaded operation are passed by value or by reference
-enum PassArgs {
- ByValue,
- ByRef,
-}
-
impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> {
pub fn new(delegate: &'a mut (Delegate<'tcx>+'a),
region_maps: &'a RegionMaps,
- infcx: &'a InferCtxt<'a, 'gcx, 'tcx>)
+ infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>)
-> Self
{
ExprUseVisitor::with_options(delegate,
infcx,
+ param_env,
region_maps,
mc::MemCategorizationOptions::default())
}
pub fn with_options(delegate: &'a mut (Delegate<'tcx>+'a),
infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
region_maps: &'a RegionMaps,
options: mc::MemCategorizationOptions)
-> Self
{
ExprUseVisitor {
mc: mc::MemCategorizationContext::with_options(infcx, region_maps, options),
- delegate: delegate
+ delegate,
+ param_env,
}
}
arg.id,
arg.pat.span,
fn_body_scope_r, // Args live only as long as the fn body.
- fn_body_scope_r,
arg_ty);
self.walk_irrefutable_pat(arg_cmt, &arg.pat);
debug!("delegate_consume(consume_id={}, cmt={:?})",
consume_id, cmt);
- let mode = copy_or_move(self.mc.infcx, &cmt, DirectRefMove);
+ let mode = copy_or_move(self.mc.infcx, self.param_env, &cmt, DirectRefMove);
self.delegate.consume(consume_id, consume_span, cmt, mode);
}
}
hir::ExprUnary(hir::UnDeref, ref base) => { // *base
- if !self.walk_overloaded_operator(expr, &base, Vec::new(), PassArgs::ByRef) {
- self.select_from_expr(&base);
- }
+ self.select_from_expr(&base);
}
hir::ExprField(ref base, _) => { // base.f
}
hir::ExprIndex(ref lhs, ref rhs) => { // lhs[rhs]
- if !self.walk_overloaded_operator(expr,
- &lhs,
- vec![&rhs],
- PassArgs::ByValue) {
- self.select_from_expr(&lhs);
- self.consume_expr(&rhs);
- }
+ self.select_from_expr(&lhs);
+ self.consume_expr(&rhs);
}
hir::ExprCall(ref callee, ref args) => { // callee(args)
self.walk_block(&blk);
}
- hir::ExprUnary(op, ref lhs) => {
- let pass_args = if op.is_by_value() {
- PassArgs::ByValue
- } else {
- PassArgs::ByRef
- };
-
- if !self.walk_overloaded_operator(expr, &lhs, Vec::new(), pass_args) {
- self.consume_expr(&lhs);
- }
+ hir::ExprUnary(_, ref lhs) => {
+ self.consume_expr(&lhs);
}
- hir::ExprBinary(op, ref lhs, ref rhs) => {
- let pass_args = if op.node.is_by_value() {
- PassArgs::ByValue
- } else {
- PassArgs::ByRef
- };
-
- if !self.walk_overloaded_operator(expr, &lhs, vec![&rhs], pass_args) {
- self.consume_expr(&lhs);
- self.consume_expr(&rhs);
- }
+ hir::ExprBinary(_, ref lhs, ref rhs) => {
+ self.consume_expr(&lhs);
+ self.consume_expr(&rhs);
}
hir::ExprBlock(ref blk) => {
self.consume_expr(&base);
}
- hir::ExprAssignOp(op, ref lhs, ref rhs) => {
- // NB All our assignment operations take the RHS by value
- assert!(op.node.is_by_value());
-
- if !self.walk_overloaded_operator(expr, lhs, vec![rhs], PassArgs::ByValue) {
+ hir::ExprAssignOp(_, ref lhs, ref rhs) => {
+ if self.mc.infcx.tables.borrow().is_method_call(expr) {
+ self.consume_expr(lhs);
+ } else {
self.mutate_expr(expr, &lhs, MutateMode::WriteAndRead);
- self.consume_expr(&rhs);
}
+ self.consume_expr(&rhs);
}
hir::ExprRepeat(ref base, _) => {
}
ty::TyError => { }
_ => {
- let overloaded_call_type =
- match self.mc.infcx.node_method_id(ty::MethodCall::expr(call.id)) {
- Some(method_id) => {
- OverloadedCallType::from_method_id(self.tcx(), method_id)
- }
- None => {
- span_bug!(
- callee.span,
- "unexpected callee type {}",
- callee_ty)
- }
- };
- match overloaded_call_type {
+ let def_id = self.mc.infcx.tables.borrow().type_dependent_defs[&call.id].def_id();
+ match OverloadedCallType::from_method_id(self.tcx(), def_id) {
FnMutOverloadedCall => {
let call_scope_r = self.tcx().node_scope_region(call.id);
self.borrow_expr(callee,
// consumed or borrowed as part of the automatic adjustment
// process.
fn walk_adjustment(&mut self, expr: &hir::Expr) {
- let infcx = self.mc.infcx;
//NOTE(@jroesch): mixed RefCell borrow causes crash
- let adj = infcx.tables.borrow().adjustments.get(&expr.id).map(|x| x.clone());
- if let Some(adjustment) = adj {
+ let adjustments = self.mc.infcx.tables.borrow().expr_adjustments(expr).to_vec();
+ let mut cmt = return_if_err!(self.mc.cat_expr_unadjusted(expr));
+ for adjustment in adjustments {
+ debug!("walk_adjustment expr={:?} adj={:?}", expr, adjustment);
match adjustment.kind {
adjustment::Adjust::NeverToAny |
adjustment::Adjust::ReifyFnPointer |
adjustment::Adjust::UnsafeFnPointer |
adjustment::Adjust::ClosureFnPointer |
- adjustment::Adjust::MutToConstPointer => {
+ adjustment::Adjust::MutToConstPointer |
+ adjustment::Adjust::Unsize => {
// Creating a closure/fn-pointer or unsizing consumes
// the input and stores it into the resulting rvalue.
- debug!("walk_adjustment: trivial adjustment");
- let cmt_unadjusted =
- return_if_err!(self.mc.cat_expr_unadjusted(expr));
- self.delegate_consume(expr.id, expr.span, cmt_unadjusted);
+ self.delegate_consume(expr.id, expr.span, cmt.clone());
}
- adjustment::Adjust::DerefRef { autoderefs, autoref, unsize } => {
- debug!("walk_adjustment expr={:?} adj={:?}", expr, adjustment);
- self.walk_autoderefs(expr, autoderefs);
-
- let cmt_derefd =
- return_if_err!(self.mc.cat_expr_autoderefd(expr, autoderefs));
-
- let cmt_refd =
- self.walk_autoref(expr, cmt_derefd, autoref);
-
- if unsize {
- // Unsizing consumes the thin pointer and produces a fat one.
- self.delegate_consume(expr.id, expr.span, cmt_refd);
- }
+ adjustment::Adjust::Deref(None) => {}
+
+ // Autoderefs for overloaded Deref calls in fact reference
+ // their receiver. That is, if we have `(*x)` where `x`
+ // is of type `Rc<T>`, then this in fact is equivalent to
+ // `x.deref()`. Since `deref()` is declared with `&self`,
+ // this is an autoref of `x`.
+ adjustment::Adjust::Deref(Some(ref deref)) => {
+ let bk = ty::BorrowKind::from_mutbl(deref.mutbl);
+ self.delegate.borrow(expr.id, expr.span, cmt.clone(),
+ deref.region, bk, AutoRef);
}
- }
- }
- }
- /// Autoderefs for overloaded Deref calls in fact reference their receiver. That is, if we have
- /// `(*x)` where `x` is of type `Rc<T>`, then this in fact is equivalent to `x.deref()`. Since
- /// `deref()` is declared with `&self`, this is an autoref of `x`.
- fn walk_autoderefs(&mut self,
- expr: &hir::Expr,
- autoderefs: usize) {
- debug!("walk_autoderefs expr={:?} autoderefs={}", expr, autoderefs);
-
- for i in 0..autoderefs {
- let deref_id = ty::MethodCall::autoderef(expr.id, i as u32);
- if let Some(method_ty) = self.mc.infcx.node_method_ty(deref_id) {
- let cmt = return_if_err!(self.mc.cat_expr_autoderefd(expr, i));
-
- // the method call infrastructure should have
- // replaced all late-bound regions with variables:
- let self_ty = method_ty.fn_sig().input(0);
- let self_ty = self.tcx().no_late_bound_regions(&self_ty).unwrap();
-
- let (m, r) = match self_ty.sty {
- ty::TyRef(r, ref m) => (m.mutbl, r),
- _ => span_bug!(expr.span,
- "bad overloaded deref type {:?}",
- method_ty)
- };
- let bk = ty::BorrowKind::from_mutbl(m);
- self.delegate.borrow(expr.id, expr.span, cmt,
- r, bk, AutoRef);
+ adjustment::Adjust::Borrow(ref autoref) => {
+ self.walk_autoref(expr, cmt.clone(), autoref);
+ }
}
+ cmt = return_if_err!(self.mc.cat_expr_adjusted(expr, cmt, &adjustment));
}
}
- /// Walks the autoref `opt_autoref` applied to the autoderef'd
- /// `expr`. `cmt_derefd` is the mem-categorized form of `expr`
- /// after all relevant autoderefs have occurred. Because AutoRefs
- /// can be recursive, this function is recursive: it first walks
- /// deeply all the way down the autoref chain, and then processes
- /// the autorefs on the way out. At each point, it returns the
- /// `cmt` for the rvalue that will be produced by introduced an
- /// autoref.
+ /// Walks the autoref `autoref` applied to the autoderef'd
+ /// `expr`. `cmt_base` is the mem-categorized form of `expr`
+ /// after all relevant autoderefs have occurred.
fn walk_autoref(&mut self,
expr: &hir::Expr,
cmt_base: mc::cmt<'tcx>,
- opt_autoref: Option<adjustment::AutoBorrow<'tcx>>)
- -> mc::cmt<'tcx>
- {
- debug!("walk_autoref(expr.id={} cmt_derefd={:?} opt_autoref={:?})",
+ autoref: &adjustment::AutoBorrow<'tcx>) {
+ debug!("walk_autoref(expr.id={} cmt_base={:?} autoref={:?})",
expr.id,
cmt_base,
- opt_autoref);
-
- let cmt_base_ty = cmt_base.ty;
-
- let autoref = match opt_autoref {
- Some(ref autoref) => autoref,
- None => {
- // No AutoRef.
- return cmt_base;
- }
- };
+ autoref);
match *autoref {
adjustment::AutoBorrow::Ref(r, m) => {
AutoUnsafe);
}
}
-
- // Construct the categorization for the result of the autoref.
- // This is always an rvalue, since we are producing a new
- // (temporary) indirection.
-
- let adj_ty = cmt_base_ty.adjust_for_autoref(self.tcx(), opt_autoref);
-
- self.mc.cat_rvalue_node(expr.id, expr.span, adj_ty)
- }
-
-
- // When this returns true, it means that the expression *is* a
- // method-call (i.e. via the operator-overload). This true result
- // also implies that walk_overloaded_operator already took care of
- // recursively processing the input arguments, and thus the caller
- // should not do so.
- fn walk_overloaded_operator(&mut self,
- expr: &hir::Expr,
- receiver: &hir::Expr,
- rhs: Vec<&hir::Expr>,
- pass_args: PassArgs)
- -> bool
- {
- if !self.mc.infcx.is_method_call(expr.id) {
- return false;
- }
-
- match pass_args {
- PassArgs::ByValue => {
- self.consume_expr(receiver);
- for &arg in &rhs {
- self.consume_expr(arg);
- }
-
- return true;
- },
- PassArgs::ByRef => {},
- }
-
- self.walk_expr(receiver);
-
- // Arguments (but not receivers) to overloaded operator
- // methods are implicitly autoref'd which sadly does not use
- // adjustments, so we must hardcode the borrow here.
-
- let r = self.tcx().node_scope_region(expr.id);
- let bk = ty::ImmBorrow;
-
- for &arg in &rhs {
- self.borrow_expr(arg, r, bk, OverloadedOperator);
- }
- return true;
}
fn arm_move_mode(&mut self, discr_cmt: mc::cmt<'tcx>, arm: &hir::Arm) -> TrackMatchMode {
PatKind::Binding(hir::BindByRef(..), ..) =>
mode.lub(BorrowingMatch),
PatKind::Binding(hir::BindByValue(..), ..) => {
- match copy_or_move(self.mc.infcx, &cmt_pat, PatBindingMove) {
+ match copy_or_move(self.mc.infcx, self.param_env, &cmt_pat, PatBindingMove) {
Copy => mode.lub(CopyingMatch),
Move(..) => mode.lub(MovingMatch),
}
fn walk_pat(&mut self, cmt_discr: mc::cmt<'tcx>, pat: &hir::Pat, match_mode: MatchMode) {
debug!("walk_pat cmt_discr={:?} pat={:?}", cmt_discr, pat);
- let tcx = &self.tcx();
- let mc = &self.mc;
+ let tcx = self.tcx();
let infcx = self.mc.infcx;
- let delegate = &mut self.delegate;
+ let ExprUseVisitor { ref mc, ref mut delegate, param_env } = *self;
return_if_err!(mc.cat_pattern(cmt_discr.clone(), pat, |mc, cmt_pat, pat| {
if let PatKind::Binding(bmode, def_id, ..) = pat.node {
debug!("binding cmt_pat={:?} pat={:?} match_mode={:?}", cmt_pat, pat, match_mode);
}
}
hir::BindByValue(..) => {
- let mode = copy_or_move(infcx, &cmt_pat, PatBindingMove);
+ let mode = copy_or_move(infcx, param_env, &cmt_pat, PatBindingMove);
debug!("walk_pat binding consuming pat");
delegate.consume_pat(pat, cmt_pat, mode);
}
freevar.def));
match upvar_capture {
ty::UpvarCapture::ByValue => {
- let mode = copy_or_move(self.mc.infcx, &cmt_var, CaptureMove);
+ let mode = copy_or_move(self.mc.infcx,
+ self.param_env,
+ &cmt_var,
+ CaptureMove);
self.delegate.consume(closure_expr.id, freevar.span, cmt_var, mode);
}
ty::UpvarCapture::ByRef(upvar_borrow) => {
}
fn copy_or_move<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
cmt: &mc::cmt<'tcx>,
move_reason: MoveReason)
-> ConsumeMode
{
- if infcx.type_moves_by_default(cmt.ty, cmt.span) {
+ if infcx.type_moves_by_default(param_env, cmt.ty, cmt.span) {
Move(move_reason)
} else {
Copy
use hir::def::Def;
use hir::def_id::DefId;
-use infer::InferCtxt;
-use traits::Reveal;
use ty::{self, Ty, TyCtxt};
use ty::layout::{LayoutError, Pointer, SizeSkeleton};
tcx: TyCtxt<'a, 'tcx, 'tcx>
}
-struct ExprVisitor<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
- infcx: &'a InferCtxt<'a, 'gcx, 'tcx>
+struct ExprVisitor<'a, 'tcx: 'a> {
+ tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ tables: &'tcx ty::TypeckTables<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
}
/// If the type is `Option<T>`, it will return `T`, otherwise
ty
}
-impl<'a, 'gcx, 'tcx> ExprVisitor<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> ExprVisitor<'a, 'tcx> {
fn def_id_is_transmute(&self, def_id: DefId) -> bool {
- let intrinsic = match self.infcx.tcx.type_of(def_id).sty {
+ let intrinsic = match self.tcx.type_of(def_id).sty {
ty::TyFnDef(.., bfty) => bfty.abi() == RustIntrinsic,
_ => return false
};
- intrinsic && self.infcx.tcx.item_name(def_id) == "transmute"
+ intrinsic && self.tcx.item_name(def_id) == "transmute"
}
- fn check_transmute(&self, span: Span, from: Ty<'gcx>, to: Ty<'gcx>) {
- let sk_from = SizeSkeleton::compute(from, self.infcx);
- let sk_to = SizeSkeleton::compute(to, self.infcx);
+ fn check_transmute(&self, span: Span, from: Ty<'tcx>, to: Ty<'tcx>) {
+ let sk_from = SizeSkeleton::compute(from, self.tcx, self.param_env);
+ let sk_to = SizeSkeleton::compute(to, self.tcx, self.param_env);
// Check for same size using the skeletons.
if let (Ok(sk_from), Ok(sk_to)) = (sk_from, sk_to) {
// Special-case transmutting from `typeof(function)` and
// `Option<typeof(function)>` to present a clearer error.
- let from = unpack_option_like(self.infcx.tcx.global_tcx(), from);
+ let from = unpack_option_like(self.tcx.global_tcx(), from);
match (&from.sty, sk_to) {
(&ty::TyFnDef(..), SizeSkeleton::Known(size_to))
- if size_to == Pointer.size(self.infcx) => {
- struct_span_err!(self.infcx.tcx.sess, span, E0591,
+ if size_to == Pointer.size(self.tcx) => {
+ struct_span_err!(self.tcx.sess, span, E0591,
"`{}` is zero-sized and can't be transmuted to `{}`",
from, to)
.span_note(span, "cast with `as` to a pointer instead")
}
// Try to display a sensible error with as much information as possible.
- let skeleton_string = |ty: Ty<'gcx>, sk| {
+ let skeleton_string = |ty: Ty<'tcx>, sk| {
match sk {
Ok(SizeSkeleton::Known(size)) => {
format!("{} bits", size.bits())
}
};
- struct_span_err!(self.infcx.tcx.sess, span, E0512,
+ struct_span_err!(self.tcx.sess, span, E0512,
"transmute called with differently sized types: \
{} ({}) to {} ({})",
from, skeleton_string(from, sk_from),
}
fn visit_nested_body(&mut self, body_id: hir::BodyId) {
+ let owner_def_id = self.tcx.hir.body_owner_def_id(body_id);
let body = self.tcx.hir.body(body_id);
- self.tcx.infer_ctxt(body_id, Reveal::All).enter(|infcx| {
- let mut visitor = ExprVisitor {
- infcx: &infcx
- };
- visitor.visit_body(body);
- });
+ let param_env = self.tcx.param_env(owner_def_id);
+ let tables = self.tcx.typeck_tables_of(owner_def_id);
+ ExprVisitor { tcx: self.tcx, param_env, tables }.visit_body(body);
self.visit_body(body);
}
}
-impl<'a, 'gcx, 'tcx> Visitor<'gcx> for ExprVisitor<'a, 'gcx, 'tcx> {
- fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'gcx> {
+impl<'a, 'tcx> Visitor<'tcx> for ExprVisitor<'a, 'tcx> {
+ fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
NestedVisitorMap::None
}
- fn visit_expr(&mut self, expr: &'gcx hir::Expr) {
+ fn visit_expr(&mut self, expr: &'tcx hir::Expr) {
let def = if let hir::ExprPath(ref qpath) = expr.node {
- self.infcx.tables.borrow().qpath_def(qpath, expr.id)
+ self.tables.qpath_def(qpath, expr.id)
} else {
Def::Err
};
match def {
Def::Fn(did) if self.def_id_is_transmute(did) => {
- let typ = self.infcx.tables.borrow().node_id_to_type(expr.id);
- let typ = self.infcx.tcx.lift_to_global(&typ).unwrap();
+ let typ = self.tables.node_id_to_type(expr.id);
+ let typ = self.tcx.lift_to_global(&typ).unwrap();
match typ.sty {
ty::TyFnDef(.., sig) if sig.abi() == RustIntrinsic => {
let from = sig.inputs().skip_binder()[0];
hir::ExprAssignOp(_, ref l, ref r) => {
// an overloaded assign op is like a method call
- if self.tables.is_method_call(expr.id) {
+ if self.tables.is_method_call(expr) {
let succ = self.propagate_through_expr(&l, succ);
self.propagate_through_expr(&r, succ)
} else {
hir::ExprCall(ref f, ref args) => {
// FIXME(canndrew): This is_never should really be an is_uninhabited
- let diverges = !self.tables.is_method_call(expr.id) &&
- self.tables.expr_ty_adjusted(&f).fn_ret().0.is_never();
- let succ = if diverges {
+ let succ = if self.tables.expr_ty(expr).is_never() {
self.s.exit_ln
} else {
succ
}
hir::ExprMethodCall(.., ref args) => {
- let method_call = ty::MethodCall::expr(expr.id);
- let method_ty = self.tables.method_map[&method_call].ty;
// FIXME(canndrew): This is_never should really be an is_uninhabited
- let succ = if method_ty.fn_ret().0.is_never() {
+ let succ = if self.tables.expr_ty(expr).is_never() {
self.s.exit_ln
} else {
succ
}
hir::ExprAssignOp(_, ref l, _) => {
- if !this.tables.is_method_call(expr.id) {
+ if !this.tables.is_method_call(expr) {
this.check_lvalue(&l);
}
pub use self::PointerKind::*;
pub use self::InteriorKind::*;
pub use self::FieldName::*;
-pub use self::ElementKind::*;
pub use self::MutabilityCategory::*;
pub use self::AliasableReason::*;
pub use self::Note::*;
#[derive(Clone, PartialEq)]
pub enum Categorization<'tcx> {
- // temporary val, argument is its scope
- Rvalue(ty::Region<'tcx>, ty::Region<'tcx>),
+ Rvalue(ty::Region<'tcx>), // temporary val, argument is its scope
StaticItem,
Upvar(Upvar), // upvar referenced by closure env
Local(ast::NodeId), // local variable
- Deref(cmt<'tcx>, usize, PointerKind<'tcx>), // deref of a ptr
+ Deref(cmt<'tcx>, PointerKind<'tcx>), // deref of a ptr
Interior(cmt<'tcx>, InteriorKind), // something interior: field, tuple, etc
Downcast(cmt<'tcx>, DefId), // selects a particular enum variant (*1)
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub enum InteriorKind {
InteriorField(FieldName),
- InteriorElement(InteriorOffsetKind, ElementKind),
+ InteriorElement(InteriorOffsetKind),
}
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
Pattern, // e.g. `fn foo([_, a, _, _]: [A; 4]) { ... }`
}
-#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
-pub enum ElementKind {
- VecElement,
- OtherElement,
-}
-
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
pub enum MutabilityCategory {
McImmutable, // Immutable.
pub fn immutability_blame(&self) -> Option<ImmutabilityBlame<'tcx>> {
match self.cat {
- Categorization::Deref(ref base_cmt, _, BorrowedPtr(ty::ImmBorrow, _)) |
- Categorization::Deref(ref base_cmt, _, Implicit(ty::ImmBorrow, _)) => {
+ Categorization::Deref(ref base_cmt, BorrowedPtr(ty::ImmBorrow, _)) |
+ Categorization::Deref(ref base_cmt, Implicit(ty::ImmBorrow, _)) => {
// try to figure out where the immutable reference came from
match base_cmt.cat {
Categorization::Local(node_id) =>
}
Categorization::Rvalue(..) |
Categorization::Upvar(..) |
- Categorization::Deref(.., UnsafePtr(..)) => {
+ Categorization::Deref(_, UnsafePtr(..)) => {
// This should not be reachable up to inference limitations.
None
}
Categorization::Interior(ref base_cmt, _) |
Categorization::Downcast(ref base_cmt, _) |
- Categorization::Deref(ref base_cmt, _, _) => {
+ Categorization::Deref(ref base_cmt, _) => {
base_cmt.immutability_blame()
}
Categorization::StaticItem => {
}
pub fn cat_expr(&self, expr: &hir::Expr) -> McResult<cmt<'tcx>> {
- match self.infcx.tables.borrow().adjustments.get(&expr.id) {
- None => {
- // No adjustments.
- self.cat_expr_unadjusted(expr)
- }
-
- Some(adjustment) => {
- match adjustment.kind {
- adjustment::Adjust::DerefRef {
- autoderefs,
- autoref: None,
- unsize: false
- } => {
- // Equivalent to *expr or something similar.
- self.cat_expr_autoderefd(expr, autoderefs)
- }
-
- adjustment::Adjust::NeverToAny |
- adjustment::Adjust::ReifyFnPointer |
- adjustment::Adjust::UnsafeFnPointer |
- adjustment::Adjust::ClosureFnPointer |
- adjustment::Adjust::MutToConstPointer |
- adjustment::Adjust::DerefRef {..} => {
- debug!("cat_expr({:?}): {:?}",
- adjustment,
- expr);
- // Result is an rvalue.
- let expr_ty = self.expr_ty_adjusted(expr)?;
- Ok(self.cat_rvalue_node(expr.id(), expr.span(), expr_ty))
- }
+ // This recursion helper avoids going through *too many*
+ // adjustments, since *only* non-overloaded deref recurses.
+ fn helper<'a, 'gcx, 'tcx>(mc: &MemCategorizationContext<'a, 'gcx, 'tcx>,
+ expr: &hir::Expr,
+ adjustments: &[adjustment::Adjustment<'tcx>])
+ -> McResult<cmt<'tcx>> {
+ match adjustments.split_last() {
+ None => mc.cat_expr_unadjusted(expr),
+ Some((adjustment, previous)) => {
+ mc.cat_expr_adjusted_with(expr, || helper(mc, expr, previous), adjustment)
}
}
}
+
+ helper(self, expr, self.infcx.tables.borrow().expr_adjustments(expr))
+ }
+
+ pub fn cat_expr_adjusted(&self, expr: &hir::Expr,
+ previous: cmt<'tcx>,
+ adjustment: &adjustment::Adjustment<'tcx>)
+ -> McResult<cmt<'tcx>> {
+ self.cat_expr_adjusted_with(expr, || Ok(previous), adjustment)
}
- pub fn cat_expr_autoderefd(&self,
- expr: &hir::Expr,
- autoderefs: usize)
- -> McResult<cmt<'tcx>> {
- let mut cmt = self.cat_expr_unadjusted(expr)?;
- debug!("cat_expr_autoderefd: autoderefs={}, cmt={:?}",
- autoderefs,
- cmt);
- for deref in 1..autoderefs + 1 {
- cmt = self.cat_deref(expr, cmt, deref)?;
+ fn cat_expr_adjusted_with<F>(&self, expr: &hir::Expr,
+ previous: F,
+ adjustment: &adjustment::Adjustment<'tcx>)
+ -> McResult<cmt<'tcx>>
+ where F: FnOnce() -> McResult<cmt<'tcx>>
+ {
+ debug!("cat_expr_adjusted_with({:?}): {:?}", adjustment, expr);
+ let target = self.infcx.resolve_type_vars_if_possible(&adjustment.target);
+ match adjustment.kind {
+ adjustment::Adjust::Deref(overloaded) => {
+ // Equivalent to *expr or something similar.
+ let base = if let Some(deref) = overloaded {
+ let ref_ty = self.tcx().mk_ref(deref.region, ty::TypeAndMut {
+ ty: target,
+ mutbl: deref.mutbl,
+ });
+ self.cat_rvalue_node(expr.id, expr.span, ref_ty)
+ } else {
+ previous()?
+ };
+ self.cat_deref(expr, base, false)
+ }
+
+ adjustment::Adjust::NeverToAny |
+ adjustment::Adjust::ReifyFnPointer |
+ adjustment::Adjust::UnsafeFnPointer |
+ adjustment::Adjust::ClosureFnPointer |
+ adjustment::Adjust::MutToConstPointer |
+ adjustment::Adjust::Borrow(_) |
+ adjustment::Adjust::Unsize => {
+ // Result is an rvalue.
+ Ok(self.cat_rvalue_node(expr.id, expr.span, target))
+ }
}
- return Ok(cmt);
}
pub fn cat_expr_unadjusted(&self, expr: &hir::Expr) -> McResult<cmt<'tcx>> {
let expr_ty = self.expr_ty(expr)?;
match expr.node {
hir::ExprUnary(hir::UnDeref, ref e_base) => {
- let base_cmt = self.cat_expr(&e_base)?;
- self.cat_deref(expr, base_cmt, 0)
+ if self.infcx.tables.borrow().is_method_call(expr) {
+ self.cat_overloaded_lvalue(expr, e_base, false)
+ } else {
+ let base_cmt = self.cat_expr(&e_base)?;
+ self.cat_deref(expr, base_cmt, false)
+ }
}
hir::ExprField(ref base, f_name) => {
}
hir::ExprIndex(ref base, _) => {
- let method_call = ty::MethodCall::expr(expr.id());
- match self.infcx.node_method_ty(method_call) {
- Some(method_ty) => {
- // If this is an index implemented by a method call, then it
- // will include an implicit deref of the result.
- let ret_ty = self.overloaded_method_return_ty(method_ty);
-
- // The index method always returns an `&T`, so
- // dereference it to find the result type.
- let elem_ty = match ret_ty.sty {
- ty::TyRef(_, mt) => mt.ty,
- _ => {
- debug!("cat_expr_unadjusted: return type of overloaded index is {:?}?",
- ret_ty);
- return Err(());
- }
- };
-
- // The call to index() returns a `&T` value, which
- // is an rvalue. That is what we will be
- // dereferencing.
- let base_cmt = self.cat_rvalue_node(expr.id(), expr.span(), ret_ty);
- Ok(self.cat_deref_common(expr, base_cmt, 1, elem_ty, true))
- }
- None => {
- self.cat_index(expr, self.cat_expr(&base)?, InteriorOffsetKind::Index)
- }
+ if self.infcx.tables.borrow().is_method_call(expr) {
+ // If this is an index implemented by a method call, then it
+ // will include an implicit deref of the result.
+ // The call to index() returns a `&T` value, which
+ // is an rvalue. That is what we will be
+ // dereferencing.
+ self.cat_overloaded_lvalue(expr, base, true)
+ } else {
+ let base_cmt = self.cat_expr(&base)?;
+ self.cat_index(expr, base_cmt, expr_ty, InteriorOffsetKind::Index)
}
}
cmt_ {
id: id,
span: span,
- cat: Categorization::Deref(Rc::new(cmt_result), 0, ptr),
+ cat: Categorization::Deref(Rc::new(cmt_result), ptr),
mutbl: MutabilityCategory::from_borrow_kind(upvar_borrow.kind),
ty: var_ty,
note: NoteUpvarRef(upvar_id)
let ret = cmt_ {
id: id,
span: span,
- cat: Categorization::Deref(Rc::new(cmt_result), 0, env_ptr),
+ cat: Categorization::Deref(Rc::new(cmt_result), env_ptr),
mutbl: deref_mutbl,
ty: var_ty,
note: NoteClosureEnv(upvar_id)
/// Returns the lifetime of a temporary created by expr with id `id`.
/// This could be `'static` if `id` is part of a constant expression.
- pub fn temporary_scope(&self, id: ast::NodeId) -> (ty::Region<'tcx>, ty::Region<'tcx>)
+ pub fn temporary_scope(&self, id: ast::NodeId) -> ty::Region<'tcx>
{
- let (scope, old_scope) =
- self.region_maps.old_and_new_temporary_scope(id);
- (self.tcx().mk_region(match scope {
- Some(scope) => ty::ReScope(scope),
- None => ty::ReStatic
- }),
- self.tcx().mk_region(match old_scope {
+ let scope = self.region_maps.temporary_scope(id);
+ self.tcx().mk_region(match scope {
Some(scope) => ty::ReScope(scope),
None => ty::ReStatic
- }))
+ })
}
pub fn cat_rvalue_node(&self,
// Compute maximum lifetime of this rvalue. This is 'static if
// we can promote to a constant, otherwise equal to enclosing temp
// lifetime.
- let (re, old_re) = if promotable {
- (self.tcx().types.re_static,
- self.tcx().types.re_static)
+ let re = if promotable {
+ self.tcx().types.re_static
} else {
self.temporary_scope(id)
};
- let ret = self.cat_rvalue(id, span, re, old_re, expr_ty);
+ let ret = self.cat_rvalue(id, span, re, expr_ty);
debug!("cat_rvalue_node ret {:?}", ret);
ret
}
cmt_id: ast::NodeId,
span: Span,
temp_scope: ty::Region<'tcx>,
- old_temp_scope: ty::Region<'tcx>,
expr_ty: Ty<'tcx>) -> cmt<'tcx> {
let ret = Rc::new(cmt_ {
id:cmt_id,
span:span,
- cat:Categorization::Rvalue(temp_scope, old_temp_scope),
+ cat:Categorization::Rvalue(temp_scope),
mutbl:McDeclared,
ty:expr_ty,
note: NoteNone
ret
}
- fn cat_deref<N:ast_node>(&self,
- node: &N,
- base_cmt: cmt<'tcx>,
- deref_cnt: usize)
+ fn cat_overloaded_lvalue(&self,
+ expr: &hir::Expr,
+ base: &hir::Expr,
+ implicit: bool)
-> McResult<cmt<'tcx>> {
- let method_call = ty::MethodCall {
- expr_id: node.id(),
- autoderef: deref_cnt as u32
- };
- let method_ty = self.infcx.node_method_ty(method_call);
+ debug!("cat_overloaded_lvalue: implicit={}", implicit);
- debug!("cat_deref: method_call={:?} method_ty={:?}",
- method_call, method_ty.map(|ty| ty));
+ // Reconstruct the output assuming it's a reference with the
+ // same region and mutability as the receiver. This holds for
+ // `Deref(Mut)::Deref(_mut)` and `Index(Mut)::index(_mut)`.
+ let lvalue_ty = self.expr_ty(expr)?;
+ let base_ty = self.expr_ty_adjusted(base)?;
- let base_cmt = match method_ty {
- Some(method_ty) => {
- let ref_ty =
- self.tcx().no_late_bound_regions(&method_ty.fn_ret()).unwrap();
- self.cat_rvalue_node(node.id(), node.span(), ref_ty)
+ let (region, mutbl) = match base_ty.sty {
+ ty::TyRef(region, mt) => (region, mt.mutbl),
+ _ => {
+ span_bug!(expr.span, "cat_overloaded_lvalue: base is not a reference")
}
- None => base_cmt
};
+ let ref_ty = self.tcx().mk_ref(region, ty::TypeAndMut {
+ ty: lvalue_ty,
+ mutbl,
+ });
+
+ let base_cmt = self.cat_rvalue_node(expr.id, expr.span, ref_ty);
+ self.cat_deref(expr, base_cmt, implicit)
+ }
+
+ pub fn cat_deref<N:ast_node>(&self,
+ node: &N,
+ base_cmt: cmt<'tcx>,
+ implicit: bool)
+ -> McResult<cmt<'tcx>> {
+ debug!("cat_deref: base_cmt={:?}", base_cmt);
+
let base_cmt_ty = base_cmt.ty;
- match base_cmt_ty.builtin_deref(true, ty::NoPreference) {
- Some(mt) => {
- let ret = self.cat_deref_common(node, base_cmt, deref_cnt, mt.ty, false);
- debug!("cat_deref ret {:?}", ret);
- Ok(ret)
- }
+ let deref_ty = match base_cmt_ty.builtin_deref(true, ty::NoPreference) {
+ Some(mt) => mt.ty,
None => {
debug!("Explicit deref of non-derefable type: {:?}",
base_cmt_ty);
return Err(());
}
- }
- }
+ };
- fn cat_deref_common<N:ast_node>(&self,
- node: &N,
- base_cmt: cmt<'tcx>,
- deref_cnt: usize,
- deref_ty: Ty<'tcx>,
- implicit: bool)
- -> cmt<'tcx>
- {
let ptr = match base_cmt.ty.sty {
ty::TyAdt(def, ..) if def.is_box() => Unique,
ty::TyRawPtr(ref mt) => UnsafePtr(mt.mutbl),
let bk = ty::BorrowKind::from_mutbl(mt.mutbl);
if implicit { Implicit(bk, r) } else { BorrowedPtr(bk, r) }
}
- ref ty => bug!("unexpected type in cat_deref_common: {:?}", ty)
+ ref ty => bug!("unexpected type in cat_deref: {:?}", ty)
};
let ret = Rc::new(cmt_ {
id: node.id(),
span: node.span(),
// For unique ptrs, we inherit mutability from the owning reference.
mutbl: MutabilityCategory::from_pointer_kind(base_cmt.mutbl, ptr),
- cat: Categorization::Deref(base_cmt, deref_cnt, ptr),
+ cat: Categorization::Deref(base_cmt, ptr),
ty: deref_ty,
note: NoteNone
});
- debug!("cat_deref_common ret {:?}", ret);
- ret
+ debug!("cat_deref ret {:?}", ret);
+ Ok(ret)
}
- pub fn cat_index<N:ast_node>(&self,
- elt: &N,
- mut base_cmt: cmt<'tcx>,
- context: InteriorOffsetKind)
- -> McResult<cmt<'tcx>> {
+ fn cat_index<N:ast_node>(&self,
+ elt: &N,
+ base_cmt: cmt<'tcx>,
+ element_ty: Ty<'tcx>,
+ context: InteriorOffsetKind)
+ -> McResult<cmt<'tcx>> {
//! Creates a cmt for an indexing operation (`[]`).
//!
//! One subtle aspect of indexing that may not be
//! - `elt`: the AST node being indexed
//! - `base_cmt`: the cmt of `elt`
- let method_call = ty::MethodCall::expr(elt.id());
- let method_ty = self.infcx.node_method_ty(method_call);
-
- let (element_ty, element_kind) = match method_ty {
- Some(method_ty) => {
- let ref_ty = self.overloaded_method_return_ty(method_ty);
- base_cmt = self.cat_rvalue_node(elt.id(), elt.span(), ref_ty);
-
- (ref_ty.builtin_deref(false, ty::NoPreference).unwrap().ty,
- ElementKind::OtherElement)
- }
- None => {
- match base_cmt.ty.builtin_index() {
- Some(ty) => (ty, ElementKind::VecElement),
- None => {
- debug!("Explicit index of non-indexable type {:?}", base_cmt);
- return Err(());
- }
- }
- }
- };
-
- let interior_elem = InteriorElement(context, element_kind);
+ let interior_elem = InteriorElement(context);
let ret =
- self.cat_imm_interior(elt, base_cmt.clone(), element_ty, interior_elem);
+ self.cat_imm_interior(elt, base_cmt, element_ty, interior_elem);
debug!("cat_index ret {:?}", ret);
return Ok(ret);
}
// box p1, &p1, &mut p1. we can ignore the mutability of
// PatKind::Ref since that information is already contained
// in the type.
- let subcmt = self.cat_deref(pat, cmt, 0)?;
+ let subcmt = self.cat_deref(pat, cmt, false)?;
self.cat_pattern_(subcmt, &subpat, op)?;
}
PatKind::Slice(ref before, ref slice, ref after) => {
+ let element_ty = match cmt.ty.builtin_index() {
+ Some(ty) => ty,
+ None => {
+ debug!("Explicit index of non-indexable type {:?}", cmt);
+ return Err(());
+ }
+ };
let context = InteriorOffsetKind::Pattern;
- let elt_cmt = self.cat_index(pat, cmt, context)?;
+ let elt_cmt = self.cat_index(pat, cmt, element_ty, context)?;
for before_pat in before {
self.cat_pattern_(elt_cmt.clone(), &before_pat, op)?;
}
Ok(())
}
-
- fn overloaded_method_return_ty(&self,
- method_ty: Ty<'tcx>)
- -> Ty<'tcx>
- {
- // When we process an overloaded `*` or `[]` etc, we often
- // need to extract the return type of the method. These method
- // types are generated by method resolution and always have
- // all late-bound regions fully instantiated, so we just want
- // to skip past the binder.
- self.tcx().no_late_bound_regions(&method_ty.fn_ret())
- .unwrap()
- }
}
#[derive(Clone, Debug)]
Categorization::Rvalue(..) |
Categorization::StaticItem |
Categorization::Local(..) |
- Categorization::Deref(.., UnsafePtr(..)) |
- Categorization::Deref(.., BorrowedPtr(..)) |
- Categorization::Deref(.., Implicit(..)) |
+ Categorization::Deref(_, UnsafePtr(..)) |
+ Categorization::Deref(_, BorrowedPtr(..)) |
+ Categorization::Deref(_, Implicit(..)) |
Categorization::Upvar(..) => {
Rc::new((*self).clone())
}
Categorization::Downcast(ref b, _) |
Categorization::Interior(ref b, _) |
- Categorization::Deref(ref b, _, Unique) => {
+ Categorization::Deref(ref b, Unique) => {
b.guarantor()
}
}
// aliased and eventually recused.
match self.cat {
- Categorization::Deref(ref b, _, BorrowedPtr(ty::MutBorrow, _)) |
- Categorization::Deref(ref b, _, Implicit(ty::MutBorrow, _)) |
- Categorization::Deref(ref b, _, BorrowedPtr(ty::UniqueImmBorrow, _)) |
- Categorization::Deref(ref b, _, Implicit(ty::UniqueImmBorrow, _)) |
- Categorization::Deref(ref b, _, Unique) |
+ Categorization::Deref(ref b, BorrowedPtr(ty::MutBorrow, _)) |
+ Categorization::Deref(ref b, Implicit(ty::MutBorrow, _)) |
+ Categorization::Deref(ref b, BorrowedPtr(ty::UniqueImmBorrow, _)) |
+ Categorization::Deref(ref b, Implicit(ty::UniqueImmBorrow, _)) |
+ Categorization::Deref(ref b, Unique) |
Categorization::Downcast(ref b, _) |
Categorization::Interior(ref b, _) => {
// Aliasability depends on base cmt
Categorization::Rvalue(..) |
Categorization::Local(..) |
Categorization::Upvar(..) |
- Categorization::Deref(.., UnsafePtr(..)) => { // yes, it's aliasable, but...
+ Categorization::Deref(_, UnsafePtr(..)) => { // yes, it's aliasable, but...
NonAliasable
}
}
}
- Categorization::Deref(_, _, BorrowedPtr(ty::ImmBorrow, _)) |
- Categorization::Deref(_, _, Implicit(ty::ImmBorrow, _)) => {
+ Categorization::Deref(_, BorrowedPtr(ty::ImmBorrow, _)) |
+ Categorization::Deref(_, Implicit(ty::ImmBorrow, _)) => {
FreelyAliasable(AliasableBorrowed)
}
}
match self.note {
NoteClosureEnv(..) | NoteUpvarRef(..) => {
Some(match self.cat {
- Categorization::Deref(ref inner, ..) => {
+ Categorization::Deref(ref inner, _) => {
match inner.cat {
- Categorization::Deref(ref inner, ..) => inner.clone(),
+ Categorization::Deref(ref inner, _) => inner.clone(),
Categorization::Upvar(..) => inner.clone(),
_ => bug!()
}
"local variable".to_string()
}
}
- Categorization::Deref(.., pk) => {
+ Categorization::Deref(_, pk) => {
let upvar = self.upvar();
match upvar.as_ref().map(|i| &i.cat) {
Some(&Categorization::Upvar(ref var)) => {
Categorization::Interior(_, InteriorField(PositionalField(_))) => {
"anonymous field".to_string()
}
- Categorization::Interior(_, InteriorElement(InteriorOffsetKind::Index,
- VecElement)) |
- Categorization::Interior(_, InteriorElement(InteriorOffsetKind::Index,
- OtherElement)) => {
+ Categorization::Interior(_, InteriorElement(InteriorOffsetKind::Index)) => {
"indexed content".to_string()
}
- Categorization::Interior(_, InteriorElement(InteriorOffsetKind::Pattern,
- VecElement)) |
- Categorization::Interior(_, InteriorElement(InteriorOffsetKind::Pattern,
- OtherElement)) => {
+ Categorization::Interior(_, InteriorElement(InteriorOffsetKind::Pattern)) => {
"pattern-bound indexed content".to_string()
}
Categorization::Upvar(ref var) => {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Categorization::StaticItem => write!(f, "static"),
- Categorization::Rvalue(r, or) => {
- write!(f, "rvalue({:?}, {:?})", r, or)
- }
+ Categorization::Rvalue(r) => { write!(f, "rvalue({:?})", r) }
Categorization::Local(id) => {
let name = ty::tls::with(|tcx| tcx.local_var_name_str(id));
write!(f, "local({})", name)
Categorization::Upvar(upvar) => {
write!(f, "upvar({:?})", upvar)
}
- Categorization::Deref(ref cmt, derefs, ptr) => {
- write!(f, "{:?}-{:?}{}->", cmt.cat, ptr, derefs)
+ Categorization::Deref(ref cmt, ptr) => {
+ write!(f, "{:?}-{:?}->", cmt.cat, ptr)
}
Categorization::Interior(ref cmt, interior) => {
write!(f, "{:?}.{:?}", cmt.cat, interior)
Some(self.tables.qpath_def(qpath, expr.id))
}
hir::ExprMethodCall(..) => {
- let method_call = ty::MethodCall::expr(expr.id);
- let def_id = self.tables.method_map[&method_call].def_id;
- Some(Def::Method(def_id))
+ Some(self.tables.type_dependent_defs[&expr.id])
}
_ => None
};
/// block (see `terminating_scopes`).
rvalue_scopes: NodeMap<CodeExtent>,
- /// Records the value of rvalue scopes before they were shrunk by
- /// #36082, for error reporting.
- ///
- /// FIXME: this should be temporary. Remove this by 1.18.0 or
- /// so.
- shrunk_rvalue_scopes: NodeMap<CodeExtent>,
-
/// Encodes the hierarchy of fn bodies. Every fn body (including
/// closures) forms its own distinct region hierarchy, rooted in
/// the block that is the fn body. This map points from the id of
destruction_scopes: FxHashMap(),
var_map: NodeMap(),
rvalue_scopes: NodeMap(),
- shrunk_rvalue_scopes: NodeMap(),
fn_tree: NodeMap(),
}
}
self.rvalue_scopes.insert(var, lifetime);
}
- fn record_shrunk_rvalue_scope(&mut self, var: ast::NodeId, lifetime: CodeExtent) {
- debug!("record_rvalue_scope(sub={:?}, sup={:?})", var, lifetime);
- assert!(var != lifetime.node_id());
- self.shrunk_rvalue_scopes.insert(var, lifetime);
- }
-
pub fn opt_encl_scope(&self, id: CodeExtent) -> Option<CodeExtent> {
//! Returns the narrowest scope that encloses `id`, if any.
self.scope_map.get(&id).cloned()
}
}
- pub fn temporary_scope2(&self, expr_id: ast::NodeId)
- -> (Option<CodeExtent>, bool) {
- let temporary_scope = self.temporary_scope(expr_id);
- let was_shrunk = match self.shrunk_rvalue_scopes.get(&expr_id) {
- Some(&s) => {
- info!("temporary_scope2({:?}, scope={:?}, shrunk={:?})",
- expr_id, temporary_scope, s);
- temporary_scope != Some(s)
- }
- _ => false
- };
- info!("temporary_scope2({:?}) - was_shrunk={:?}", expr_id, was_shrunk);
- (temporary_scope, was_shrunk)
- }
-
- pub fn old_and_new_temporary_scope(&self, expr_id: ast::NodeId)
- -> (Option<CodeExtent>,
- Option<CodeExtent>)
- {
- let temporary_scope = self.temporary_scope(expr_id);
- (temporary_scope,
- self.shrunk_rvalue_scopes
- .get(&expr_id).cloned()
- .or(temporary_scope))
- }
-
pub fn temporary_scope(&self, expr_id: ast::NodeId) -> Option<CodeExtent> {
//! Returns the scope when temp created by expr_id will be cleaned up
// Rule A. `let (ref x, ref y) = (foo().x, 44)`. The rvalue `(22, 44)`
// would have an extended lifetime, but not `foo()`.
//
- // Rule B. `let x: &[...] = [foo().x]`. The rvalue `[foo().x]`
- // would have an extended lifetime, but not `foo()`.
- //
- // Rule C. `let x = &foo().x`. The rvalue ``foo()` would have extended
+ // Rule B. `let x = &foo().x`. The rvalue ``foo()` would have extended
// lifetime.
//
// In some cases, multiple rules may apply (though not to the same
if let Some(ref expr) = local.init {
record_rvalue_scope_if_borrow_expr(visitor, &expr, blk_scope);
- let is_borrow =
- if let Some(ref ty) = local.ty { is_borrowed_ty(&ty) } else { false };
-
if is_binding_pat(&local.pat) {
- record_rvalue_scope(visitor, &expr, blk_scope, false);
- } else if is_borrow {
- record_rvalue_scope(visitor, &expr, blk_scope, true);
+ record_rvalue_scope(visitor, &expr, blk_scope);
}
}
}
}
- /// True if `ty` is a borrowed pointer type like `&int` or `&[...]`.
- fn is_borrowed_ty(ty: &hir::Ty) -> bool {
- match ty.node {
- hir::TyRptr(..) => true,
- _ => false
- }
- }
-
/// If `expr` matches the `E&` grammar, then records an extended rvalue scope as appropriate:
///
/// E& = & ET
match expr.node {
hir::ExprAddrOf(_, ref subexpr) => {
record_rvalue_scope_if_borrow_expr(visitor, &subexpr, blk_id);
- record_rvalue_scope(visitor, &subexpr, blk_id, false);
+ record_rvalue_scope(visitor, &subexpr, blk_id);
}
hir::ExprStruct(_, ref fields, _) => {
for field in fields {
/// Note: ET is intended to match "rvalues or lvalues based on rvalues".
fn record_rvalue_scope<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>,
expr: &hir::Expr,
- blk_scope: CodeExtent,
- is_shrunk: bool) {
+ blk_scope: CodeExtent) {
let mut expr = expr;
loop {
// Note: give all the expressions matching `ET` with the
// because in trans if we must compile e.g. `*rvalue()`
// into a temporary, we request the temporary scope of the
// outer expression.
- if is_shrunk {
- // this changed because of #36082
- visitor.region_maps.record_shrunk_rvalue_scope(expr.id, blk_scope);
- } else {
- visitor.region_maps.record_rvalue_scope(expr.id, blk_scope);
- }
+ visitor.region_maps.record_rvalue_scope(expr.id, blk_scope);
match expr.node {
hir::ExprAddrOf(_, ref subexpr) |
//! See `README.md` for high-level documentation
-use super::{SelectionContext, Obligation, ObligationCause};
-
use hir::def_id::{DefId, LOCAL_CRATE};
+use syntax_pos::DUMMY_SP;
+use traits::{self, Normalized, SelectionContext, Obligation, ObligationCause, Reveal};
use ty::{self, Ty, TyCtxt};
+use ty::subst::Subst;
use infer::{InferCtxt, InferOk};
overlap(selcx, impl1_def_id, impl2_def_id)
}
+fn with_fresh_ty_vars<'cx, 'gcx, 'tcx>(selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ impl_def_id: DefId)
+ -> ty::ImplHeader<'tcx>
+{
+ let tcx = selcx.tcx();
+ let impl_substs = selcx.infcx().fresh_substs_for_item(DUMMY_SP, impl_def_id);
+
+ let header = ty::ImplHeader {
+ impl_def_id: impl_def_id,
+ self_ty: tcx.type_of(impl_def_id),
+ trait_ref: tcx.impl_trait_ref(impl_def_id),
+ predicates: tcx.predicates_of(impl_def_id).predicates
+ }.subst(tcx, impl_substs);
+
+ let Normalized { value: mut header, obligations } =
+ traits::normalize(selcx, param_env, ObligationCause::dummy(), &header);
+
+ header.predicates.extend(obligations.into_iter().map(|o| o.predicate));
+ header
+}
+
/// Can both impl `a` and impl `b` be satisfied by a common type (including
/// `where` clauses)? If so, returns an `ImplHeader` that unifies the two impls.
fn overlap<'cx, 'gcx, 'tcx>(selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>,
a_def_id,
b_def_id);
- let a_impl_header = ty::ImplHeader::with_fresh_ty_vars(selcx, a_def_id);
- let b_impl_header = ty::ImplHeader::with_fresh_ty_vars(selcx, b_def_id);
+ // For the purposes of this check, we don't bring any skolemized
+ // types into scope; instead, we replace the generic types with
+ // fresh type variables, and hence we do our evaluations in an
+ // empty environment.
+ let param_env = ty::ParamEnv::empty(Reveal::UserFacing);
+
+ let a_impl_header = with_fresh_ty_vars(selcx, param_env, a_def_id);
+ let b_impl_header = with_fresh_ty_vars(selcx, param_env, b_def_id);
debug!("overlap: a_impl_header={:?}", a_impl_header);
debug!("overlap: b_impl_header={:?}", b_impl_header);
// Do `a` and `b` unify? If not, no overlap.
- let obligations = match selcx.infcx().eq_impl_headers(true,
- &ObligationCause::dummy(),
- &a_impl_header,
- &b_impl_header) {
- Ok(InferOk { obligations, .. }) => {
+ let obligations = match selcx.infcx().at(&ObligationCause::dummy(), param_env)
+ .eq_impl_headers(&a_impl_header, &b_impl_header) {
+ Ok(InferOk { obligations, value: () }) => {
obligations
}
Err(_) => return None
.chain(&b_impl_header.predicates)
.map(|p| infcx.resolve_type_vars_if_possible(p))
.map(|p| Obligation { cause: ObligationCause::dummy(),
+ param_env: param_env,
recursion_depth: 0,
predicate: p })
.chain(obligations)
data);
let normalized = super::normalize_projection_type(
&mut selcx,
+ obligation.param_env,
data.projection_ty,
obligation.cause.clone(),
0
);
- if let Err(error) = self.eq_types(
- false, &obligation.cause,
- data.ty, normalized.value
- ) {
+ if let Err(error) = self.at(&obligation.cause, obligation.param_env)
+ .eq(normalized.value, data.ty) {
values = Some(infer::ValuePairs::Types(ExpectedFound {
expected: normalized.value,
found: data.ty,
-> Option<DefId>
{
let tcx = self.tcx;
-
+ let param_env = obligation.param_env;
let trait_ref = tcx.erase_late_bound_regions(&trait_ref);
let trait_self_ty = trait_ref.self_ty();
let impl_self_ty = impl_trait_ref.self_ty();
- if let Ok(..) = self.can_equate(&trait_self_ty, &impl_self_ty) {
+ if let Ok(..) = self.can_eq(param_env, trait_self_ty, impl_self_ty) {
self_match_impls.push(def_id);
if trait_ref.substs.types().skip(1)
// Try to report a help message
if !trait_ref.has_infer_types() &&
- self.predicate_can_apply(trait_ref) {
+ self.predicate_can_apply(obligation.param_env, trait_ref) {
// If a where-clause may be useful, remind the
// user that they can add it.
//
ty::Predicate::Equate(ref predicate) => {
let predicate = self.resolve_type_vars_if_possible(predicate);
let err = self.equality_predicate(&obligation.cause,
- &predicate).err().unwrap();
+ obligation.param_env,
+ &predicate).err().unwrap();
struct_span_err!(self.tcx.sess, span, E0278,
"the requirement `{}` is not satisfied (`{}`)",
predicate, err)
/// Returns whether the trait predicate may apply for *some* assignment
/// to the type parameters.
- fn predicate_can_apply(&self, pred: ty::PolyTraitRef<'tcx>) -> bool {
+ fn predicate_can_apply(&self,
+ param_env: ty::ParamEnv<'tcx>,
+ pred: ty::PolyTraitRef<'tcx>)
+ -> bool {
struct ParamToVarFolder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
var_map: FxHashMap<Ty<'tcx>, Ty<'tcx>>
let cleaned_pred = super::project::normalize(
&mut selcx,
+ param_env,
ObligationCause::dummy(),
&cleaned_pred
).value;
let obligation = Obligation::new(
ObligationCause::dummy(),
+ param_env,
cleaned_pred.to_predicate()
);
/// `projection_ty` again.
pub fn normalize_projection_type(&mut self,
infcx: &InferCtxt<'a, 'gcx, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
projection_ty: ty::ProjectionTy<'tcx>,
cause: ObligationCause<'tcx>)
-> Ty<'tcx>
// FIXME(#20304) -- cache
let mut selcx = SelectionContext::new(infcx);
- let normalized = project::normalize_projection_type(&mut selcx, projection_ty, cause, 0);
+ let normalized = project::normalize_projection_type(&mut selcx,
+ param_env,
+ projection_ty,
+ cause,
+ 0);
for obligation in normalized.obligations {
self.register_predicate_obligation(infcx, obligation);
normalized.value
}
+ /// Requires that `ty` must implement the trait with `def_id` in
+ /// the given environment. This trait must not have any type
+ /// parameters (except for `Self`).
pub fn register_bound(&mut self,
infcx: &InferCtxt<'a, 'gcx, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
ty: Ty<'tcx>,
def_id: DefId,
cause: ObligationCause<'tcx>)
self.register_predicate_obligation(infcx, Obligation {
cause: cause,
recursion_depth: 0,
+ param_env,
predicate: trait_ref.to_predicate()
});
}
}
ty::Predicate::Equate(ref binder) => {
- match selcx.infcx().equality_predicate(&obligation.cause, binder) {
+ match selcx.infcx().equality_predicate(&obligation.cause,
+ obligation.param_env,
+ binder) {
Ok(InferOk { obligations, value: () }) => {
Ok(Some(obligations))
},
}
ty::Predicate::WellFormed(ty) => {
- match ty::wf::obligations(selcx.infcx(), obligation.cause.body_id,
+ match ty::wf::obligations(selcx.infcx(),
+ obligation.param_env,
+ obligation.cause.body_id,
ty, obligation.cause.span) {
None => {
pending_obligation.stalled_on = vec![ty];
}
ty::Predicate::Subtype(ref subtype) => {
- match selcx.infcx().subtype_predicate(&obligation.cause, subtype) {
+ match selcx.infcx().subtype_predicate(&obligation.cause,
+ obligation.param_env,
+ subtype) {
None => {
// none means that both are unresolved
pending_obligation.stalled_on = vec![subtype.skip_binder().a,
#[derive(Clone, PartialEq, Eq)]
pub struct Obligation<'tcx, T> {
pub cause: ObligationCause<'tcx>,
+ pub param_env: ty::ParamEnv<'tcx>,
pub recursion_depth: usize,
pub predicate: T,
}
/// Creates predicate obligations from the generic bounds.
pub fn predicates_for_generics<'tcx>(cause: ObligationCause<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
generic_bounds: &ty::InstantiatedPredicates<'tcx>)
-> PredicateObligations<'tcx>
{
- util::predicates_for_generics(cause, 0, generic_bounds)
+ util::predicates_for_generics(cause, 0, param_env, generic_bounds)
}
/// Determines whether the type `ty` is known to meet `bound` and
/// conservative towards *no impl*, which is the opposite of the
/// `evaluate` methods).
pub fn type_known_to_meet_bound<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
ty: Ty<'tcx>,
def_id: DefId,
span: Span)
substs: infcx.tcx.mk_substs_trait(ty, &[]),
};
let obligation = Obligation {
+ param_env,
cause: ObligationCause::misc(span, ast::DUMMY_NODE_ID),
recursion_depth: 0,
predicate: trait_ref.to_predicate(),
// anyhow).
let cause = ObligationCause::misc(span, ast::DUMMY_NODE_ID);
- fulfill_cx.register_bound(infcx, ty, def_id, cause);
+ fulfill_cx.register_bound(infcx, param_env, ty, def_id, cause);
// Note: we only assume something is `Copy` if we can
// *definitively* show that it implements `Copy`. Otherwise,
debug!("normalize_param_env_or_error: elaborated-predicates={:?}",
predicates);
- let elaborated_env = ty::ParamEnv::new(tcx.intern_predicates(&predicates));
+ let elaborated_env = ty::ParamEnv::new(tcx.intern_predicates(&predicates),
+ unnormalized_env.reveal);
- tcx.infer_ctxt(elaborated_env, Reveal::UserFacing).enter(|infcx| {
+ tcx.infer_ctxt(()).enter(|infcx| {
let predicates = match fully_normalize(
- &infcx, cause,
- // You would really want to pass infcx.param_env.caller_bounds here,
- // but that is an interned slice, and fully_normalize takes &T and returns T, so
- // without further refactoring, a slice can't be used. Luckily, we still have the
- // predicate vector from which we created the ParamEnv in infcx, so we
- // can pass that instead. It's roundabout and a bit brittle, but this code path
- // ought to be refactored anyway, and until then it saves us from having to copy.
- &predicates,
+ &infcx,
+ cause,
+ elaborated_env,
+ // You would really want to pass infcx.param_env.caller_bounds here,
+ // but that is an interned slice, and fully_normalize takes &T and returns T, so
+ // without further refactoring, a slice can't be used. Luckily, we still have the
+ // predicate vector from which we created the ParamEnv in infcx, so we
+ // can pass that instead. It's roundabout and a bit brittle, but this code path
+ // ought to be refactored anyway, and until then it saves us from having to copy.
+ &predicates,
) {
Ok(predicates) => predicates,
Err(errors) => {
infcx.report_fulfillment_errors(&errors);
// An unnormalized env is better than nothing.
- return infcx.param_env;
+ return elaborated_env;
}
};
// all things considered.
tcx.sess.span_err(span, &fixup_err.to_string());
// An unnormalized env is better than nothing.
- return infcx.param_env;
+ return elaborated_env;
}
};
let predicates = match tcx.lift_to_global(&predicates) {
Some(predicates) => predicates,
- None => return infcx.param_env
+ None => return elaborated_env,
};
debug!("normalize_param_env_or_error: resolved predicates={:?}",
- predicates);
+ predicates);
- ty::ParamEnv::new(tcx.intern_predicates(&predicates))
+ ty::ParamEnv::new(tcx.intern_predicates(&predicates), unnormalized_env.reveal)
})
}
pub fn fully_normalize<'a, 'gcx, 'tcx, T>(infcx: &InferCtxt<'a, 'gcx, 'tcx>,
cause: ObligationCause<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
value: &T)
-> Result<T, Vec<FulfillmentError<'tcx>>>
where T : TypeFoldable<'tcx>
let mut fulfill_cx = FulfillmentContext::new();
let Normalized { value: normalized_value, obligations } =
- project::normalize(selcx, cause, value);
+ project::normalize(selcx, param_env, cause, value);
debug!("fully_normalize: normalized_value={:?} obligations={:?}",
normalized_value,
obligations);
Ok(resolved_value)
}
-/// Normalizes the predicates and checks whether they hold. If this
-/// returns false, then either normalize encountered an error or one
-/// of the predicates did not hold. Used when creating vtables to
-/// check for unsatisfiable methods.
+/// Normalizes the predicates and checks whether they hold in an empty
+/// environment. If this returns false, then either normalize
+/// encountered an error or one of the predicates did not hold. Used
+/// when creating vtables to check for unsatisfiable methods.
pub fn normalize_and_test_predicates<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
predicates: Vec<ty::Predicate<'tcx>>)
-> bool
debug!("normalize_and_test_predicates(predicates={:?})",
predicates);
- tcx.infer_ctxt((), Reveal::All).enter(|infcx| {
+ tcx.infer_ctxt(()).enter(|infcx| {
+ let param_env = ty::ParamEnv::empty(Reveal::All);
let mut selcx = SelectionContext::new(&infcx);
let mut fulfill_cx = FulfillmentContext::new();
let cause = ObligationCause::dummy();
let Normalized { value: predicates, obligations } =
- normalize(&mut selcx, cause.clone(), &predicates);
+ normalize(&mut selcx, param_env, cause.clone(), &predicates);
for obligation in obligations {
fulfill_cx.register_predicate_obligation(&infcx, obligation);
}
for predicate in predicates {
- let obligation = Obligation::new(cause.clone(), predicate);
+ let obligation = Obligation::new(cause.clone(), param_env, predicate);
fulfill_cx.register_predicate_obligation(&infcx, obligation);
}
impl<'tcx,O> Obligation<'tcx,O> {
pub fn new(cause: ObligationCause<'tcx>,
- trait_ref: O)
+ param_env: ty::ParamEnv<'tcx>,
+ predicate: O)
-> Obligation<'tcx, O>
{
- Obligation { cause: cause,
- recursion_depth: 0,
- predicate: trait_ref }
+ Obligation { cause, param_env, recursion_depth: 0, predicate }
}
fn with_depth(cause: ObligationCause<'tcx>,
recursion_depth: usize,
- trait_ref: O)
+ param_env: ty::ParamEnv<'tcx>,
+ predicate: O)
-> Obligation<'tcx, O>
{
- Obligation { cause: cause,
- recursion_depth: recursion_depth,
- predicate: trait_ref }
+ Obligation { cause, param_env, recursion_depth, predicate }
}
- pub fn misc(span: Span, body_id: ast::NodeId, trait_ref: O) -> Obligation<'tcx, O> {
- Obligation::new(ObligationCause::misc(span, body_id), trait_ref)
+ pub fn misc(span: Span,
+ body_id: ast::NodeId,
+ param_env: ty::ParamEnv<'tcx>,
+ trait_ref: O)
+ -> Obligation<'tcx, O> {
+ Obligation::new(ObligationCause::misc(span, body_id), param_env, trait_ref)
}
pub fn with<P>(&self, value: P) -> Obligation<'tcx,P> {
Obligation { cause: self.cause.clone(),
+ param_env: self.param_env,
recursion_depth: self.recursion_depth,
predicate: value }
}
/// Depending on the stage of compilation, we want projection to be
/// more or less conservative.
-#[derive(Debug, Copy, Clone, PartialEq, Eq)]
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub enum Reveal {
/// At type-checking time, we refuse to project any associated
/// type that is marked `default`. Non-`default` ("final") types
let Normalized { value: normalized_ty, mut obligations } =
match opt_normalize_projection_type(selcx,
- obligation.predicate.projection_ty.clone(),
+ obligation.param_env,
+ obligation.predicate.projection_ty,
obligation.cause.clone(),
obligation.recursion_depth) {
Some(n) => n,
obligations);
let infcx = selcx.infcx();
- match infcx.eq_types(true, &obligation.cause, normalized_ty, obligation.predicate.ty) {
+ match infcx.at(&obligation.cause, obligation.param_env)
+ .eq(normalized_ty, obligation.predicate.ty) {
Ok(InferOk { obligations: inferred_obligations, value: () }) => {
obligations.extend(inferred_obligations);
Ok(Some(obligations))
/// combines the normalized result and any additional obligations that
/// were incurred as result.
pub fn normalize<'a, 'b, 'gcx, 'tcx, T>(selcx: &'a mut SelectionContext<'b, 'gcx, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
cause: ObligationCause<'tcx>,
value: &T)
-> Normalized<'tcx, T>
where T : TypeFoldable<'tcx>
{
- normalize_with_depth(selcx, cause, 0, value)
+ normalize_with_depth(selcx, param_env, cause, 0, value)
}
/// As `normalize`, but with a custom depth.
pub fn normalize_with_depth<'a, 'b, 'gcx, 'tcx, T>(
selcx: &'a mut SelectionContext<'b, 'gcx, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
cause: ObligationCause<'tcx>,
depth: usize,
value: &T)
where T : TypeFoldable<'tcx>
{
debug!("normalize_with_depth(depth={}, value={:?})", depth, value);
- let mut normalizer = AssociatedTypeNormalizer::new(selcx, cause, depth);
+ let mut normalizer = AssociatedTypeNormalizer::new(selcx, param_env, cause, depth);
let result = normalizer.fold(value);
debug!("normalize_with_depth: depth={} result={:?} with {} obligations",
depth, result, normalizer.obligations.len());
struct AssociatedTypeNormalizer<'a, 'b: 'a, 'gcx: 'b+'tcx, 'tcx: 'b> {
selcx: &'a mut SelectionContext<'b, 'gcx, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
cause: ObligationCause<'tcx>,
obligations: Vec<PredicateObligation<'tcx>>,
depth: usize,
impl<'a, 'b, 'gcx, 'tcx> AssociatedTypeNormalizer<'a, 'b, 'gcx, 'tcx> {
fn new(selcx: &'a mut SelectionContext<'b, 'gcx, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
cause: ObligationCause<'tcx>,
depth: usize)
-> AssociatedTypeNormalizer<'a, 'b, 'gcx, 'tcx>
{
AssociatedTypeNormalizer {
selcx: selcx,
+ param_env: param_env,
cause: cause,
obligations: vec![],
depth: depth,
match ty.sty {
ty::TyAnon(def_id, substs) if !substs.has_escaping_regions() => { // (*)
// Only normalize `impl Trait` after type-checking, usually in trans.
- if self.selcx.projection_mode() == Reveal::All {
- let generic_ty = self.tcx().type_of(def_id);
- let concrete_ty = generic_ty.subst(self.tcx(), substs);
- self.fold_ty(concrete_ty)
- } else {
- ty
+ match self.param_env.reveal {
+ Reveal::UserFacing => ty,
+
+ Reveal::All => {
+ let generic_ty = self.tcx().type_of(def_id);
+ let concrete_ty = generic_ty.subst(self.tcx(), substs);
+ self.fold_ty(concrete_ty)
+ }
}
}
let Normalized { value: normalized_ty, obligations } =
normalize_projection_type(self.selcx,
+ self.param_env,
data.clone(),
self.cause.clone(),
self.depth);
/// obligation `<T as Trait>::Item == $X` for later.
pub fn normalize_projection_type<'a, 'b, 'gcx, 'tcx>(
selcx: &'a mut SelectionContext<'b, 'gcx, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
projection_ty: ty::ProjectionTy<'tcx>,
cause: ObligationCause<'tcx>,
depth: usize)
-> NormalizedTy<'tcx>
{
- opt_normalize_projection_type(selcx, projection_ty.clone(), cause.clone(), depth)
+ opt_normalize_projection_type(selcx, param_env, projection_ty.clone(), cause.clone(), depth)
.unwrap_or_else(move || {
// if we bottom out in ambiguity, create a type variable
// and a deferred predicate to resolve this when more type
ty: ty_var
});
let obligation = Obligation::with_depth(
- cause, depth + 1, projection.to_predicate());
+ cause, depth + 1, param_env, projection.to_predicate());
Normalized {
value: ty_var,
obligations: vec![obligation]
/// which indicates that there are unbound type variables.
fn opt_normalize_projection_type<'a, 'b, 'gcx, 'tcx>(
selcx: &'a mut SelectionContext<'b, 'gcx, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
projection_ty: ty::ProjectionTy<'tcx>,
cause: ObligationCause<'tcx>,
depth: usize)
let recursion_limit = selcx.tcx().sess.recursion_limit.get();
let obligation = Obligation::with_depth(cause.clone(),
recursion_limit,
+ param_env,
projection_ty);
selcx.infcx().report_overflow_error(&obligation, false);
}
Err(ProjectionCacheEntry::Error) => {
debug!("opt_normalize_projection_type: \
found error");
- return Some(normalize_to_error(selcx, projection_ty, cause, depth));
+ return Some(normalize_to_error(selcx, param_env, projection_ty, cause, depth));
}
}
- let obligation = Obligation::with_depth(cause.clone(), depth, projection_ty.clone());
+ let obligation = Obligation::with_depth(cause.clone(), depth, param_env, projection_ty);
match project_type(selcx, &obligation) {
Ok(ProjectedTy::Progress(Progress { ty: projected_ty,
mut obligations,
cacheable);
let result = if projected_ty.has_projection_types() {
- let mut normalizer = AssociatedTypeNormalizer::new(selcx, cause, depth+1);
+ let mut normalizer = AssociatedTypeNormalizer::new(selcx,
+ param_env,
+ cause,
+ depth+1);
let normalized_ty = normalizer.fold(&projected_ty);
debug!("opt_normalize_projection_type: \
infcx.projection_cache.borrow_mut()
.error(projection_ty);
- Some(normalize_to_error(selcx, projection_ty, cause, depth))
+ Some(normalize_to_error(selcx, param_env, projection_ty, cause, depth))
}
}
}
/// because it contains `[type error]`. Yuck! (See issue #29857 for
/// one case where this arose.)
fn normalize_to_error<'a, 'gcx, 'tcx>(selcx: &mut SelectionContext<'a, 'gcx, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
projection_ty: ty::ProjectionTy<'tcx>,
cause: ObligationCause<'tcx>,
depth: usize)
let trait_ref = projection_ty.trait_ref.to_poly_trait_ref();
let trait_obligation = Obligation { cause: cause,
recursion_depth: depth,
+ param_env,
predicate: trait_ref.to_predicate() };
let tcx = selcx.infcx().tcx;
let def_id = tcx.associated_items(projection_ty.trait_ref.def_id).find(|i|
candidate_set: &mut ProjectionTyCandidateSet<'tcx>)
{
debug!("assemble_candidates_from_param_env(..)");
- let env_predicates = selcx.param_env().caller_bounds.iter().cloned();
assemble_candidates_from_predicates(selcx,
obligation,
obligation_trait_ref,
candidate_set,
ProjectionTyCandidate::ParamEnv,
- env_predicates);
+ obligation.param_env.caller_bounds.iter().cloned());
}
/// In the case of a nested projection like <<A as Foo>::FooT as Bar>::BarT, we may find
candidate_set: &mut ProjectionTyCandidateSet<'tcx>,
ctor: fn(ty::PolyProjectionPredicate<'tcx>) -> ProjectionTyCandidate<'tcx>,
env_predicates: I)
- where I: Iterator<Item=ty::Predicate<'tcx>>
+ where I: IntoIterator<Item=ty::Predicate<'tcx>>
{
debug!("assemble_candidates_from_predicates(obligation={:?})",
obligation);
data.to_poly_trait_ref();
let obligation_poly_trait_ref =
obligation_trait_ref.to_poly_trait_ref();
- infcx.sub_poly_trait_refs(false,
- obligation.cause.clone(),
- data_poly_trait_ref,
- obligation_poly_trait_ref)
- .map(|InferOk { obligations: _, value: () }| {
- // FIXME(#32730) -- do we need to take obligations
- // into account in any way? At the moment, no.
- })
- .is_ok()
+ infcx.at(&obligation.cause, obligation.param_env)
+ .sup(obligation_poly_trait_ref, data_poly_trait_ref)
+ .map(|InferOk { obligations: _, value: () }| {
+ // FIXME(#32730) -- do we need to take obligations
+ // into account in any way? At the moment, no.
+ })
+ .is_ok()
});
debug!("assemble_candidates_from_predicates: candidate={:?} \
// get a result which isn't correct for all monomorphizations.
let new_candidate = if !is_default {
Some(ProjectionTyCandidate::Select)
- } else if selcx.projection_mode() == Reveal::All {
+ } else if obligation.param_env.reveal == Reveal::All {
assert!(!poly_trait_ref.needs_infer());
if !poly_trait_ref.needs_subst() {
Some(ProjectionTyCandidate::Select)
let data_poly_trait_ref = data.to_poly_trait_ref();
let obligation_poly_trait_ref = obligation_trait_ref.to_poly_trait_ref();
selcx.infcx().probe(|_| {
- selcx.infcx().sub_poly_trait_refs(false,
- obligation.cause.clone(),
- data_poly_trait_ref,
- obligation_poly_trait_ref).is_ok()
+ selcx.infcx().at(&obligation.cause, obligation.param_env)
+ .sup(obligation_poly_trait_ref, data_poly_trait_ref)
+ .is_ok()
})
});
value: closure_type,
obligations
} = normalize_with_depth(selcx,
+ obligation.param_env,
obligation.cause.clone(),
obligation.recursion_depth+1,
&closure_type);
{
let infcx = selcx.infcx();
let cause = obligation.cause.clone();
+ let param_env = obligation.param_env;
let trait_ref = obligation.predicate.trait_ref;
- match infcx.match_poly_projection_predicate(cause, poly_projection, trait_ref) {
+ match infcx.match_poly_projection_predicate(cause, param_env, poly_projection, trait_ref) {
Ok(InferOk { value: ty_match, obligations }) => {
Progress {
ty: ty_match.value,
let VtableImplData { substs, nested, impl_def_id } = impl_vtable;
let tcx = selcx.tcx();
+ let param_env = obligation.param_env;
let assoc_ty = assoc_ty_def(selcx, impl_def_id, obligation.predicate.item_name(tcx));
let ty = if !assoc_ty.item.defaultness.has_value() {
} else {
tcx.type_of(assoc_ty.item.def_id)
};
- let substs = translate_substs(selcx.infcx(), impl_def_id, substs, assoc_ty.node);
+ let substs = translate_substs(selcx.infcx(), param_env, impl_def_id, substs, assoc_ty.node);
Progress {
ty: ty.subst(tcx, substs),
obligations: nested,
use super::{ObligationCauseCode, BuiltinDerivedObligation, ImplDerivedObligation};
use super::{SelectionError, Unimplemented, OutputTypeParameterMismatch};
use super::{ObjectCastObligation, Obligation};
-use super::Reveal;
use super::TraitNotObjectSafe;
use super::Selection;
use super::SelectionResult;
self.infcx.tcx
}
- pub fn param_env(&self) -> ty::ParamEnv<'gcx> {
- self.infcx.param_env()
- }
-
pub fn closure_typer(&self) -> &'cx InferCtxt<'cx, 'gcx, 'tcx> {
self.infcx
}
- pub fn projection_mode(&self) -> Reveal {
- self.infcx.projection_mode()
- }
-
/// Wraps the inference context's in_snapshot s.t. snapshot handling is only from the selection
/// context's self.
fn in_snapshot<R, F>(&mut self, f: F) -> R
ty::Predicate::Equate(ref p) => {
// does this code ever run?
- match self.infcx.equality_predicate(&obligation.cause, p) {
+ match self.infcx.equality_predicate(&obligation.cause, obligation.param_env, p) {
Ok(InferOk { obligations, .. }) => {
self.inferred_obligations.extend(obligations);
EvaluatedToOk
ty::Predicate::Subtype(ref p) => {
// does this code ever run?
- match self.infcx.subtype_predicate(&obligation.cause, p) {
+ match self.infcx.subtype_predicate(&obligation.cause, obligation.param_env, p) {
Some(Ok(InferOk { obligations, .. })) => {
self.inferred_obligations.extend(obligations);
EvaluatedToOk
}
ty::Predicate::WellFormed(ty) => {
- match ty::wf::obligations(self.infcx, obligation.cause.body_id,
+ match ty::wf::obligations(self.infcx,
+ obligation.param_env,
+ obligation.cause.body_id,
ty, obligation.cause.span) {
Some(obligations) =>
self.evaluate_predicates_recursively(previous_stack, obligations.iter()),
let stack = self.push_stack(previous_stack, obligation);
let fresh_trait_ref = stack.fresh_trait_ref;
- if let Some(result) = self.check_evaluation_cache(fresh_trait_ref) {
+ if let Some(result) = self.check_evaluation_cache(obligation.param_env, fresh_trait_ref) {
debug!("CACHE HIT: EVAL({:?})={:?}",
fresh_trait_ref,
result);
debug!("CACHE MISS: EVAL({:?})={:?}",
fresh_trait_ref,
result);
- self.insert_evaluation_cache(fresh_trait_ref, result);
+ self.insert_evaluation_cache(obligation.param_env, fresh_trait_ref, result);
result
}
result
}
- fn check_evaluation_cache(&self, trait_ref: ty::PolyTraitRef<'tcx>)
+ fn check_evaluation_cache(&self,
+ param_env: ty::ParamEnv<'tcx>,
+ trait_ref: ty::PolyTraitRef<'tcx>)
-> Option<EvaluationResult>
{
- if self.can_use_global_caches() {
+ if self.can_use_global_caches(param_env) {
let cache = self.tcx().evaluation_cache.hashmap.borrow();
if let Some(cached) = cache.get(&trait_ref) {
return Some(cached.clone());
}
fn insert_evaluation_cache(&mut self,
+ param_env: ty::ParamEnv<'tcx>,
trait_ref: ty::PolyTraitRef<'tcx>,
result: EvaluationResult)
{
return;
}
- if self.can_use_global_caches() {
+ if self.can_use_global_caches(param_env) {
let mut cache = self.tcx().evaluation_cache.hashmap.borrow_mut();
if let Some(trait_ref) = self.tcx().lift_to_global(&trait_ref) {
cache.insert(trait_ref, result);
stack);
assert!(!stack.obligation.predicate.has_escaping_regions());
- if let Some(c) = self.check_candidate_cache(&cache_fresh_trait_pred) {
+ if let Some(c) = self.check_candidate_cache(stack.obligation.param_env,
+ &cache_fresh_trait_pred) {
debug!("CACHE HIT: SELECT({:?})={:?}",
cache_fresh_trait_pred,
c);
if self.should_update_candidate_cache(&cache_fresh_trait_pred, &candidate) {
debug!("CACHE MISS: SELECT({:?})={:?}",
cache_fresh_trait_pred, candidate);
- self.insert_candidate_cache(cache_fresh_trait_pred, candidate.clone());
+ self.insert_candidate_cache(stack.obligation.param_env,
+ cache_fresh_trait_pred,
+ candidate.clone());
}
candidate
/// Returns true if the global caches can be used.
/// Do note that if the type itself is not in the
/// global tcx, the local caches will be used.
- fn can_use_global_caches(&self) -> bool {
+ fn can_use_global_caches(&self, param_env: ty::ParamEnv<'tcx>) -> bool {
// If there are any where-clauses in scope, then we always use
// a cache local to this particular scope. Otherwise, we
// switch to a global cache. We used to try and draw
// annoying and weird bugs like #22019 and #18290. This simple
// rule seems to be pretty clearly safe and also still retains
// a very high hit rate (~95% when compiling rustc).
- if !self.param_env().caller_bounds.is_empty() {
+ if !param_env.caller_bounds.is_empty() {
return false;
}
}
fn check_candidate_cache(&mut self,
+ param_env: ty::ParamEnv<'tcx>,
cache_fresh_trait_pred: &ty::PolyTraitPredicate<'tcx>)
-> Option<SelectionResult<'tcx, SelectionCandidate<'tcx>>>
{
let trait_ref = &cache_fresh_trait_pred.0.trait_ref;
- if self.can_use_global_caches() {
+ if self.can_use_global_caches(param_env) {
let cache = self.tcx().selection_cache.hashmap.borrow();
if let Some(cached) = cache.get(&trait_ref) {
return Some(cached.clone());
}
fn insert_candidate_cache(&mut self,
+ param_env: ty::ParamEnv<'tcx>,
cache_fresh_trait_pred: ty::PolyTraitPredicate<'tcx>,
candidate: SelectionResult<'tcx, SelectionCandidate<'tcx>>)
{
let trait_ref = cache_fresh_trait_pred.0.trait_ref;
- if self.can_use_global_caches() {
+ if self.can_use_global_caches(param_env) {
let mut cache = self.tcx().selection_cache.hashmap.borrow_mut();
if let Some(trait_ref) = self.tcx().lift_to_global(&trait_ref) {
if let Some(candidate) = self.tcx().lift_to_global(&candidate) {
{
let TraitObligationStack { obligation, .. } = *stack;
let ref obligation = Obligation {
+ param_env: obligation.param_env,
cause: obligation.cause.clone(),
recursion_depth: obligation.recursion_depth,
predicate: self.infcx().resolve_type_vars_if_possible(&obligation.predicate)
-> bool
{
assert!(!skol_trait_ref.has_escaping_regions());
- let cause = obligation.cause.clone();
- match self.infcx.sub_poly_trait_refs(false,
- cause,
- trait_bound.clone(),
- ty::Binder(skol_trait_ref.clone())) {
+ match self.infcx.at(&obligation.cause, obligation.param_env)
+ .sup(ty::Binder(skol_trait_ref), trait_bound) {
Ok(InferOk { obligations, .. }) => {
self.inferred_obligations.extend(obligations);
}
stack.obligation);
let all_bounds =
- self.param_env().caller_bounds
- .iter()
- .filter_map(|o| o.to_opt_poly_trait_ref());
+ stack.obligation.param_env.caller_bounds
+ .iter()
+ .filter_map(|o| o.to_opt_poly_trait_ref());
// micro-optimization: filter out predicates relating to different
// traits.
}
fn collect_predicates_for_types(&mut self,
+ param_env: ty::ParamEnv<'tcx>,
cause: ObligationCause<'tcx>,
recursion_depth: usize,
trait_def_id: DefId,
this.infcx().skolemize_late_bound_regions(&ty, snapshot);
let Normalized { value: normalized_ty, mut obligations } =
project::normalize_with_depth(this,
+ param_env,
cause.clone(),
recursion_depth,
&skol_ty);
let skol_obligation =
- this.tcx().predicate_for_trait_def(
- cause.clone(),
- trait_def_id,
- recursion_depth,
- normalized_ty,
- &[]);
+ this.tcx().predicate_for_trait_def(param_env,
+ cause.clone(),
+ trait_def_id,
+ recursion_depth,
+ normalized_ty,
+ &[]);
obligations.push(skol_obligation);
this.infcx().plug_leaks(skol_map, snapshot, obligations)
})
};
let cause = obligation.derived_cause(BuiltinDerivedObligation);
- self.collect_predicates_for_types(cause,
+ self.collect_predicates_for_types(obligation.param_env,
+ cause,
obligation.recursion_depth+1,
trait_def,
nested)
let cause = obligation.derived_cause(BuiltinDerivedObligation);
let mut obligations = self.collect_predicates_for_types(
+ obligation.param_env,
cause,
obligation.recursion_depth+1,
trait_def_id,
let cause = obligation.derived_cause(ImplDerivedObligation);
this.impl_or_trait_obligations(cause,
obligation.recursion_depth + 1,
+ obligation.param_env,
trait_def_id,
&trait_ref.substs,
skol_map,
snapshot);
debug!("confirm_impl_candidate substs={:?}", substs);
let cause = obligation.derived_cause(ImplDerivedObligation);
- this.vtable_impl(impl_def_id, substs, cause,
+ this.vtable_impl(impl_def_id,
+ substs,
+ cause,
obligation.recursion_depth + 1,
- skol_map, snapshot)
+ obligation.param_env,
+ skol_map,
+ snapshot)
})
}
mut substs: Normalized<'tcx, &'tcx Substs<'tcx>>,
cause: ObligationCause<'tcx>,
recursion_depth: usize,
+ param_env: ty::ParamEnv<'tcx>,
skol_map: infer::SkolemizationMap<'tcx>,
snapshot: &infer::CombinedSnapshot)
-> VtableImplData<'tcx, PredicateObligation<'tcx>>
let mut impl_obligations =
self.impl_or_trait_obligations(cause,
recursion_depth,
+ param_env,
impl_def_id,
&substs.value,
skol_map,
.map_bound(|(trait_ref, _)| trait_ref);
self.confirm_poly_trait_refs(obligation.cause.clone(),
+ obligation.param_env,
obligation.predicate.to_poly_trait_ref(),
trait_ref)?;
Ok(VtableFnPointerData { fn_ty: self_ty, nested: vec![] })
obligations);
self.confirm_poly_trait_refs(obligation.cause.clone(),
+ obligation.param_env,
obligation.predicate.to_poly_trait_ref(),
trait_ref)?;
obligations.push(Obligation::new(
- obligation.cause.clone(),
- ty::Predicate::ClosureKind(closure_def_id, kind)));
+ obligation.cause.clone(),
+ obligation.param_env,
+ ty::Predicate::ClosureKind(closure_def_id, kind)));
Ok(VtableClosureData {
closure_def_id: closure_def_id,
/// report an error to the user.
fn confirm_poly_trait_refs(&mut self,
obligation_cause: ObligationCause<'tcx>,
+ obligation_param_env: ty::ParamEnv<'tcx>,
obligation_trait_ref: ty::PolyTraitRef<'tcx>,
expected_trait_ref: ty::PolyTraitRef<'tcx>)
-> Result<(), SelectionError<'tcx>>
{
let obligation_trait_ref = obligation_trait_ref.clone();
- self.infcx.sub_poly_trait_refs(false,
- obligation_cause.clone(),
- expected_trait_ref.clone(),
- obligation_trait_ref.clone())
+ self.infcx
+ .at(&obligation_cause, obligation_param_env)
+ .sup(obligation_trait_ref, expected_trait_ref)
.map(|InferOk { obligations, .. }| self.inferred_obligations.extend(obligations))
.map_err(|e| OutputTypeParameterMismatch(expected_trait_ref, obligation_trait_ref, e))
}
let new_trait = tcx.mk_dynamic(
ty::Binder(tcx.mk_existential_predicates(iter)), r_b);
let InferOk { obligations, .. } =
- self.infcx.eq_types(false, &obligation.cause, new_trait, target)
- .map_err(|_| Unimplemented)?;
+ self.infcx.at(&obligation.cause, obligation.param_env)
+ .eq(target, new_trait)
+ .map_err(|_| Unimplemented)?;
self.inferred_obligations.extend(obligations);
// Register one obligation for 'a: 'b.
let outlives = ty::OutlivesPredicate(r_a, r_b);
nested.push(Obligation::with_depth(cause,
obligation.recursion_depth + 1,
+ obligation.param_env,
ty::Binder(outlives).to_predicate()));
}
let mut push = |predicate| {
nested.push(Obligation::with_depth(cause.clone(),
obligation.recursion_depth + 1,
+ obligation.param_env,
predicate));
};
// [T; n] -> [T].
(&ty::TyArray(a, _), &ty::TySlice(b)) => {
let InferOk { obligations, .. } =
- self.infcx.eq_types(false, &obligation.cause, a, b)
- .map_err(|_| Unimplemented)?;
+ self.infcx.at(&obligation.cause, obligation.param_env)
+ .eq(b, a)
+ .map_err(|_| Unimplemented)?;
self.inferred_obligations.extend(obligations);
}
});
let new_struct = tcx.mk_adt(def, tcx.mk_substs(params));
let InferOk { obligations, .. } =
- self.infcx.eq_types(false, &obligation.cause, new_struct, target)
- .map_err(|_| Unimplemented)?;
+ self.infcx.at(&obligation.cause, obligation.param_env)
+ .eq(target, new_struct)
+ .map_err(|_| Unimplemented)?;
self.inferred_obligations.extend(obligations);
// Construct the nested Field<T>: Unsize<Field<U>> predicate.
nested.push(tcx.predicate_for_trait_def(
+ obligation.param_env,
obligation.cause.clone(),
obligation.predicate.def_id(),
obligation.recursion_depth + 1,
let impl_trait_ref =
project::normalize_with_depth(self,
+ obligation.param_env,
obligation.cause.clone(),
obligation.recursion_depth + 1,
&impl_trait_ref);
skol_obligation_trait_ref);
let InferOk { obligations, .. } =
- self.infcx.eq_trait_refs(false,
- &obligation.cause,
- impl_trait_ref.value.clone(),
- skol_obligation_trait_ref)
- .map_err(|e| {
- debug!("match_impl: failed eq_trait_refs due to `{}`", e);
- ()
- })?;
+ self.infcx.at(&obligation.cause, obligation.param_env)
+ .eq(skol_obligation_trait_ref, impl_trait_ref.value)
+ .map_err(|e| {
+ debug!("match_impl: failed eq_trait_refs due to `{}`", e);
+ ()
+ })?;
self.inferred_obligations.extend(obligations);
if let Err(e) = self.infcx.leak_check(false,
obligation,
poly_trait_ref);
- self.infcx.sub_poly_trait_refs(false,
- obligation.cause.clone(),
- poly_trait_ref,
- obligation.predicate.to_poly_trait_ref())
- .map(|InferOk { obligations, .. }| self.inferred_obligations.extend(obligations))
- .map_err(|_| ())
+ self.infcx.at(&obligation.cause, obligation.param_env)
+ .sup(obligation.predicate.to_poly_trait_ref(), poly_trait_ref)
+ .map(|InferOk { obligations, .. }| self.inferred_obligations.extend(obligations))
+ .map_err(|_| ())
}
///////////////////////////////////////////////////////////////////////////
// A closure signature can contain associated types which
// must be normalized.
normalize_with_depth(self,
+ obligation.param_env,
obligation.cause.clone(),
obligation.recursion_depth+1,
&trait_ref)
fn impl_or_trait_obligations(&mut self,
cause: ObligationCause<'tcx>,
recursion_depth: usize,
+ param_env: ty::ParamEnv<'tcx>,
def_id: DefId, // of impl or trait
substs: &Substs<'tcx>, // for impl or trait
skol_map: infer::SkolemizationMap<'tcx>,
let predicates = tcx.predicates_of(def_id);
assert_eq!(predicates.parent, None);
let predicates = predicates.predicates.iter().flat_map(|predicate| {
- let predicate = normalize_with_depth(self, cause.clone(), recursion_depth,
+ let predicate = normalize_with_depth(self, param_env, cause.clone(), recursion_depth,
&predicate.subst(tcx, substs));
predicate.obligations.into_iter().chain(
Some(Obligation {
cause: cause.clone(),
recursion_depth: recursion_depth,
+ param_env,
predicate: predicate.value
}))
}).collect();
/// Given a subst for the requested impl, translate it to a subst
/// appropriate for the actual item definition (whether it be in that impl,
/// a parent impl, or the trait).
+///
/// When we have selected one impl, but are actually using item definitions from
/// a parent impl providing a default, we need a way to translate between the
/// type parameters of the two impls. Here the `source_impl` is the one we've
/// *fulfillment* to relate the two impls, requiring that all projections are
/// resolved.
pub fn translate_substs<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
source_impl: DefId,
source_substs: &'tcx Substs<'tcx>,
target_node: specialization_graph::Node)
return source_substs;
}
- fulfill_implication(infcx, source_trait_ref, target_impl).unwrap_or_else(|_| {
- bug!("When translating substitutions for specialization, the expected \
- specializaiton failed to hold")
- })
+ fulfill_implication(infcx, param_env, source_trait_ref, target_impl)
+ .unwrap_or_else(|_| {
+ bug!("When translating substitutions for specialization, the expected \
+ specializaiton failed to hold")
+ })
}
specialization_graph::Node::Trait(..) => source_trait_ref.substs,
};
let ancestors = trait_def.ancestors(tcx, impl_data.impl_def_id);
match ancestors.defs(tcx, item.name, item.kind).next() {
Some(node_item) => {
- let substs = tcx.infer_ctxt((), Reveal::All).enter(|infcx| {
+ let substs = tcx.infer_ctxt(()).enter(|infcx| {
+ let param_env = ty::ParamEnv::empty(Reveal::All);
let substs = substs.rebase_onto(tcx, trait_def_id, impl_data.substs);
- let substs = translate_substs(&infcx, impl_data.impl_def_id,
+ let substs = translate_substs(&infcx, param_env, impl_data.impl_def_id,
substs, node_item.node);
let substs = infcx.tcx.erase_regions(&substs);
tcx.lift(&substs).unwrap_or_else(|| {
let impl1_trait_ref = tcx.impl_trait_ref(impl1_def_id).unwrap();
// Create a infcx, taking the predicates of impl1 as assumptions:
- let result = tcx.infer_ctxt(penv, Reveal::UserFacing).enter(|infcx| {
+ let result = tcx.infer_ctxt(()).enter(|infcx| {
// Normalize the trait reference. The WF rules ought to ensure
// that this always succeeds.
let impl1_trait_ref =
- match traits::fully_normalize(&infcx, ObligationCause::dummy(), &impl1_trait_ref) {
+ match traits::fully_normalize(&infcx,
+ ObligationCause::dummy(),
+ penv,
+ &impl1_trait_ref) {
Ok(impl1_trait_ref) => impl1_trait_ref,
Err(err) => {
bug!("failed to fully normalize {:?}: {:?}", impl1_trait_ref, err);
};
// Attempt to prove that impl2 applies, given all of the above.
- fulfill_implication(&infcx, impl1_trait_ref, impl2_def_id).is_ok()
+ fulfill_implication(&infcx, penv, impl1_trait_ref, impl2_def_id).is_ok()
});
tcx.specializes_cache.borrow_mut().insert(impl1_def_id, impl2_def_id, result);
/// `source_trait_ref` and those whose identity is determined via a where
/// clause in the impl.
fn fulfill_implication<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
source_trait_ref: ty::TraitRef<'tcx>,
target_impl: DefId)
-> Result<&'tcx Substs<'tcx>, ()> {
let selcx = &mut SelectionContext::new(&infcx);
let target_substs = infcx.fresh_substs_for_item(DUMMY_SP, target_impl);
let (target_trait_ref, mut obligations) = impl_trait_ref_and_oblig(selcx,
- target_impl,
- target_substs);
+ param_env,
+ target_impl,
+ target_substs);
// do the impls unify? If not, no specialization.
- match infcx.eq_trait_refs(true,
- &ObligationCause::dummy(),
- source_trait_ref,
- target_trait_ref) {
+ match infcx.at(&ObligationCause::dummy(), param_env)
+ .eq(source_trait_ref, target_trait_ref) {
Ok(InferOk { obligations: o, .. }) => {
obligations.extend(o);
}
source_trait_ref,
target_trait_ref,
errors,
- infcx.param_env.caller_bounds);
+ param_env.caller_bounds);
Err(())
}
use super::{OverlapError, specializes};
use hir::def_id::DefId;
-use traits::{self, Reveal};
+use traits;
use ty::{self, TyCtxt, TypeFoldable};
use ty::fast_reject::{self, SimplifiedType};
use std::rc::Rc;
let possible_sibling = *slot;
let tcx = tcx.global_tcx();
- let (le, ge) = tcx.infer_ctxt((), Reveal::UserFacing).enter(|infcx| {
+ let (le, ge) = tcx.infer_ctxt(()).enter(|infcx| {
let overlap = traits::overlapping_impls(&infcx,
possible_sibling,
impl_def_id);
cause: self.cause.clone(),
recursion_depth: self.recursion_depth,
predicate: self.predicate.fold_with(folder),
+ param_env: self.param_env.fold_with(folder),
}
}
// Do the initial selection for the obligation. This yields the
// shallow result we are looking for -- that is, what specific impl.
- self.infer_ctxt((), Reveal::All).enter(|infcx| {
+ self.infer_ctxt(()).enter(|infcx| {
let mut selcx = SelectionContext::new(&infcx);
+ let param_env = ty::ParamEnv::empty(Reveal::All);
let obligation_cause = ObligationCause::misc(span,
ast::DUMMY_NODE_ID);
let obligation = Obligation::new(obligation_cause,
+ param_env,
trait_ref.to_poly_trait_predicate());
let selection = match selcx.select(&obligation) {
/// returning the resulting trait ref and all obligations that arise.
/// The obligations are closed under normalization.
pub fn impl_trait_ref_and_oblig<'a, 'gcx, 'tcx>(selcx: &mut SelectionContext<'a, 'gcx, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
impl_def_id: DefId,
impl_substs: &Substs<'tcx>)
-> (ty::TraitRef<'tcx>,
let impl_trait_ref =
impl_trait_ref.subst(selcx.tcx(), impl_substs);
let Normalized { value: impl_trait_ref, obligations: normalization_obligations1 } =
- super::normalize(selcx, ObligationCause::dummy(), &impl_trait_ref);
+ super::normalize(selcx, param_env, ObligationCause::dummy(), &impl_trait_ref);
let predicates = selcx.tcx().predicates_of(impl_def_id);
let predicates = predicates.instantiate(selcx.tcx(), impl_substs);
let Normalized { value: predicates, obligations: normalization_obligations2 } =
- super::normalize(selcx, ObligationCause::dummy(), &predicates);
+ super::normalize(selcx, param_env, ObligationCause::dummy(), &predicates);
let impl_obligations =
- predicates_for_generics(ObligationCause::dummy(), 0, &predicates);
+ predicates_for_generics(ObligationCause::dummy(), 0, param_env, &predicates);
let impl_obligations: Vec<_> =
impl_obligations.into_iter()
/// See `super::obligations_for_generics`
pub fn predicates_for_generics<'tcx>(cause: ObligationCause<'tcx>,
recursion_depth: usize,
+ param_env: ty::ParamEnv<'tcx>,
generic_bounds: &ty::InstantiatedPredicates<'tcx>)
-> Vec<PredicateObligation<'tcx>>
{
generic_bounds.predicates.iter().map(|predicate| {
Obligation { cause: cause.clone(),
recursion_depth: recursion_depth,
+ param_env: param_env,
predicate: predicate.clone() }
}).collect()
}
pub fn predicate_for_trait_ref<'tcx>(
cause: ObligationCause<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
trait_ref: ty::TraitRef<'tcx>,
recursion_depth: usize)
-> PredicateObligation<'tcx>
{
Obligation {
cause: cause,
+ param_env: param_env,
recursion_depth: recursion_depth,
predicate: trait_ref.to_predicate(),
}
impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
pub fn predicate_for_trait_def(self,
- cause: ObligationCause<'tcx>,
- trait_def_id: DefId,
- recursion_depth: usize,
- param_ty: Ty<'tcx>,
- ty_params: &[Ty<'tcx>])
+ param_env: ty::ParamEnv<'tcx>,
+ cause: ObligationCause<'tcx>,
+ trait_def_id: DefId,
+ recursion_depth: usize,
+ param_ty: Ty<'tcx>,
+ ty_params: &[Ty<'tcx>])
-> PredicateObligation<'tcx>
{
let trait_ref = ty::TraitRef {
def_id: trait_def_id,
substs: self.mk_substs_trait(param_ty, ty_params)
};
- predicate_for_trait_ref(cause, trait_ref, recursion_depth)
+ predicate_for_trait_ref(cause, param_env, trait_ref, recursion_depth)
}
/// Cast a trait reference into a reference to one of its super
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use ty::{self, Ty, TyCtxt, TypeAndMut};
-use ty::LvaluePreference::{NoPreference};
-
-use syntax::ast;
-use syntax_pos::Span;
-
use hir;
+use hir::def_id::DefId;
+use ty::{self, Ty, TyCtxt};
+use ty::subst::Substs;
+
-#[derive(Copy, Clone, RustcEncodable, RustcDecodable)]
+/// Represents coercing a value to a different type of value.
+///
+/// We transform values by following a number of `Adjust` steps in order.
+/// See the documentation on variants of `Adjust` for more details.
+///
+/// Here are some common scenarios:
+///
+/// 1. The simplest cases are where a pointer is not adjusted fat vs thin.
+/// Here the pointer will be dereferenced N times (where a dereference can
+/// happen to raw or borrowed pointers or any smart pointer which implements
+/// Deref, including Box<_>). The types of dereferences is given by
+/// `autoderefs`. It can then be auto-referenced zero or one times, indicated
+/// by `autoref`, to either a raw or borrowed pointer. In these cases unsize is
+/// `false`.
+///
+/// 2. A thin-to-fat coercon involves unsizing the underlying data. We start
+/// with a thin pointer, deref a number of times, unsize the underlying data,
+/// then autoref. The 'unsize' phase may change a fixed length array to a
+/// dynamically sized one, a concrete object to a trait object, or statically
+/// sized struct to a dynamically sized one. E.g., &[i32; 4] -> &[i32] is
+/// represented by:
+///
+/// ```
+/// Deref(None) -> [i32; 4],
+/// Borrow(AutoBorrow::Ref) -> &[i32; 4],
+/// Unsize -> &[i32],
+/// ```
+///
+/// Note that for a struct, the 'deep' unsizing of the struct is not recorded.
+/// E.g., `struct Foo<T> { x: T }` we can coerce &Foo<[i32; 4]> to &Foo<[i32]>
+/// The autoderef and -ref are the same as in the above example, but the type
+/// stored in `unsize` is `Foo<[i32]>`, we don't store any further detail about
+/// the underlying conversions from `[i32; 4]` to `[i32]`.
+///
+/// 3. Coercing a `Box<T>` to `Box<Trait>` is an interesting special case. In
+/// that case, we have the pointer we need coming in, so there are no
+/// autoderefs, and no autoref. Instead we just do the `Unsize` transformation.
+/// At some point, of course, `Box` should move out of the compiler, in which
+/// case this is analogous to transformating a struct. E.g., Box<[i32; 4]> ->
+/// Box<[i32]> is an `Adjust::Unsize` with the target `Box<[i32]>`.
+#[derive(Clone, RustcEncodable, RustcDecodable)]
pub struct Adjustment<'tcx> {
pub kind: Adjust<'tcx>,
- pub target: Ty<'tcx>
+ pub target: Ty<'tcx>,
}
-#[derive(Copy, Clone, Debug, RustcEncodable, RustcDecodable)]
+#[derive(Clone, Debug, RustcEncodable, RustcDecodable)]
pub enum Adjust<'tcx> {
/// Go from ! to any type.
NeverToAny,
/// Go from a mut raw pointer to a const raw pointer.
MutToConstPointer,
- /// Represents coercing a pointer to a different kind of pointer - where 'kind'
- /// here means either or both of raw vs borrowed vs unique and fat vs thin.
- ///
- /// We transform pointers by following the following steps in order:
- /// 1. Deref the pointer `self.autoderefs` times (may be 0).
- /// 2. If `autoref` is `Some(_)`, then take the address and produce either a
- /// `&` or `*` pointer.
- /// 3. If `unsize` is `Some(_)`, then apply the unsize transformation,
- /// which will do things like convert thin pointers to fat
- /// pointers, or convert structs containing thin pointers to
- /// structs containing fat pointers, or convert between fat
- /// pointers. We don't store the details of how the transform is
- /// done (in fact, we don't know that, because it might depend on
- /// the precise type parameters). We just store the target
- /// type. Trans figures out what has to be done at monomorphization
- /// time based on the precise source/target type at hand.
- ///
- /// To make that more concrete, here are some common scenarios:
- ///
- /// 1. The simplest cases are where the pointer is not adjusted fat vs thin.
- /// Here the pointer will be dereferenced N times (where a dereference can
- /// happen to raw or borrowed pointers or any smart pointer which implements
- /// Deref, including Box<_>). The number of dereferences is given by
- /// `autoderefs`. It can then be auto-referenced zero or one times, indicated
- /// by `autoref`, to either a raw or borrowed pointer. In these cases unsize is
- /// None.
- ///
- /// 2. A thin-to-fat coercon involves unsizing the underlying data. We start
- /// with a thin pointer, deref a number of times, unsize the underlying data,
- /// then autoref. The 'unsize' phase may change a fixed length array to a
- /// dynamically sized one, a concrete object to a trait object, or statically
- /// sized struct to a dyncamically sized one. E.g., &[i32; 4] -> &[i32] is
- /// represented by:
- ///
- /// ```
- /// Adjust::DerefRef {
- /// autoderefs: 1, // &[i32; 4] -> [i32; 4]
- /// autoref: Some(AutoBorrow::Ref), // [i32] -> &[i32]
- /// unsize: Some([i32]), // [i32; 4] -> [i32]
- /// }
- /// ```
- ///
- /// Note that for a struct, the 'deep' unsizing of the struct is not recorded.
- /// E.g., `struct Foo<T> { x: T }` we can coerce &Foo<[i32; 4]> to &Foo<[i32]>
- /// The autoderef and -ref are the same as in the above example, but the type
- /// stored in `unsize` is `Foo<[i32]>`, we don't store any further detail about
- /// the underlying conversions from `[i32; 4]` to `[i32]`.
- ///
- /// 3. Coercing a `Box<T>` to `Box<Trait>` is an interesting special case. In
- /// that case, we have the pointer we need coming in, so there are no
- /// autoderefs, and no autoref. Instead we just do the `Unsize` transformation.
- /// At some point, of course, `Box` should move out of the compiler, in which
- /// case this is analogous to transformating a struct. E.g., Box<[i32; 4]> ->
- /// Box<[i32]> is represented by:
- ///
- /// ```
- /// Adjust::DerefRef {
- /// autoderefs: 0,
- /// autoref: None,
- /// unsize: Some(Box<[i32]>),
- /// }
- /// ```
- DerefRef {
- /// Step 1. Apply a number of dereferences, producing an lvalue.
- autoderefs: usize,
-
- /// Step 2. Optionally produce a pointer/reference from the value.
- autoref: Option<AutoBorrow<'tcx>>,
-
- /// Step 3. Unsize a pointer/reference value, e.g. `&[T; n]` to
- /// `&[T]`. Note that the source could be a thin or fat pointer.
- unsize: bool,
- }
+ /// Dereference once, producing an lvalue.
+ Deref(Option<OverloadedDeref<'tcx>>),
+
+ /// Take the address and produce either a `&` or `*` pointer.
+ Borrow(AutoBorrow<'tcx>),
+
+ /// Unsize a pointer/reference value, e.g. `&[T; n]` to
+ /// `&[T]`. Note that the source could be a thin or fat pointer.
+ /// This will do things like convert thin pointers to fat
+ /// pointers, or convert structs containing thin pointers to
+ /// structs containing fat pointers, or convert between fat
+ /// pointers. We don't store the details of how the transform is
+ /// done (in fact, we don't know that, because it might depend on
+ /// the precise type parameters). We just store the target
+ /// type. Trans figures out what has to be done at monomorphization
+ /// time based on the precise source/target type at hand.
+ Unsize,
}
-impl<'tcx> Adjustment<'tcx> {
- pub fn is_identity(&self) -> bool {
- match self.kind {
- Adjust::NeverToAny => self.target.is_never(),
-
- Adjust::DerefRef { autoderefs: 0, autoref: None, unsize: false } => true,
+/// An overloaded autoderef step, representing a `Deref(Mut)::deref(_mut)`
+/// call, with the signature `&'a T -> &'a U` or `&'a mut T -> &'a mut U`.
+/// The target type is `U` in both cases, with the region and mutability
+/// being those shared by both the receiver and the returned reference.
+#[derive(Copy, Clone, PartialEq, Debug, RustcEncodable, RustcDecodable)]
+pub struct OverloadedDeref<'tcx> {
+ pub region: ty::Region<'tcx>,
+ pub mutbl: hir::Mutability,
+}
- Adjust::ReifyFnPointer |
- Adjust::UnsafeFnPointer |
- Adjust::ClosureFnPointer |
- Adjust::MutToConstPointer |
- Adjust::DerefRef {..} => false,
- }
+impl<'a, 'gcx, 'tcx> OverloadedDeref<'tcx> {
+ pub fn method_call(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, source: Ty<'tcx>)
+ -> (DefId, &'tcx Substs<'tcx>) {
+ let trait_def_id = match self.mutbl {
+ hir::MutImmutable => tcx.lang_items.deref_trait(),
+ hir::MutMutable => tcx.lang_items.deref_mut_trait()
+ };
+ let method_def_id = tcx.associated_items(trait_def_id.unwrap())
+ .find(|m| m.kind == ty::AssociatedKind::Method).unwrap().def_id;
+ (method_def_id, tcx.mk_substs_trait(source, &[]))
}
}
/// Records the index of the field being coerced.
Struct(usize)
}
-
-impl<'a, 'gcx, 'tcx> ty::TyS<'tcx> {
- pub fn adjust_for_autoderef<F>(&'tcx self,
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
- expr_id: ast::NodeId,
- expr_span: Span,
- autoderef: u32, // how many autoderefs so far?
- mut method_type: F)
- -> Ty<'tcx> where
- F: FnMut(ty::MethodCall) -> Option<Ty<'tcx>>,
- {
- let method_call = ty::MethodCall::autoderef(expr_id, autoderef);
- let mut adjusted_ty = self;
- if let Some(method_ty) = method_type(method_call) {
- // Method calls always have all late-bound regions
- // fully instantiated.
- adjusted_ty = tcx.no_late_bound_regions(&method_ty.fn_ret()).unwrap();
- }
- match adjusted_ty.builtin_deref(true, NoPreference) {
- Some(mt) => mt.ty,
- None => {
- span_bug!(
- expr_span,
- "the {}th autoderef for {} failed: {}",
- autoderef,
- expr_id,
- adjusted_ty);
- }
- }
- }
-
- pub fn adjust_for_autoref(&'tcx self, tcx: TyCtxt<'a, 'gcx, 'tcx>,
- autoref: Option<AutoBorrow<'tcx>>)
- -> Ty<'tcx> {
- match autoref {
- None => self,
- Some(AutoBorrow::Ref(r, m)) => {
- tcx.mk_ref(r, TypeAndMut { ty: self, mutbl: m })
- }
- Some(AutoBorrow::RawPtr(m)) => {
- tcx.mk_ptr(TypeAndMut { ty: self, mutbl: m })
- }
- }
- }
-}
use hir::def::{Def, ExportMap};
use hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
use hir::map as hir_map;
-use hir::map::DisambiguatedDefPathData;
+use hir::map::{DisambiguatedDefPathData, DefPathHash};
use middle::free_region::FreeRegionMap;
use middle::lang_items;
use middle::resolve_lifetime;
#[derive(RustcEncodable, RustcDecodable)]
pub struct TypeckTables<'tcx> {
- /// Resolved definitions for `<T>::X` associated paths.
- pub type_relative_path_defs: NodeMap<Def>,
+ /// Resolved definitions for `<T>::X` associated paths and
+ /// method calls, including those of overloaded operators.
+ pub type_dependent_defs: NodeMap<Def>,
/// Stores the types for various nodes in the AST. Note that this table
/// is not guaranteed to be populated until after typeck. See
/// of this node. This only applies to nodes that refer to entities
/// parameterized by type parameters, such as generic fns, types, or
/// other items.
- pub item_substs: NodeMap<ty::ItemSubsts<'tcx>>,
+ pub node_substs: NodeMap<&'tcx Substs<'tcx>>,
- pub adjustments: NodeMap<ty::adjustment::Adjustment<'tcx>>,
-
- pub method_map: ty::MethodMap<'tcx>,
+ pub adjustments: NodeMap<Vec<ty::adjustment::Adjustment<'tcx>>>,
/// Borrows
pub upvar_capture_map: ty::UpvarCaptureMap<'tcx>,
impl<'tcx> TypeckTables<'tcx> {
pub fn empty() -> TypeckTables<'tcx> {
TypeckTables {
- type_relative_path_defs: NodeMap(),
+ type_dependent_defs: NodeMap(),
node_types: FxHashMap(),
- item_substs: NodeMap(),
+ node_substs: NodeMap(),
adjustments: NodeMap(),
- method_map: FxHashMap(),
upvar_capture_map: FxHashMap(),
closure_tys: NodeMap(),
closure_kinds: NodeMap(),
match *qpath {
hir::QPath::Resolved(_, ref path) => path.def,
hir::QPath::TypeRelative(..) => {
- self.type_relative_path_defs.get(&id).cloned().unwrap_or(Def::Err)
+ self.type_dependent_defs.get(&id).cloned().unwrap_or(Def::Err)
}
}
}
self.node_types.get(&id).cloned()
}
- pub fn node_id_item_substs(&self, id: NodeId) -> Option<&'tcx Substs<'tcx>> {
- self.item_substs.get(&id).map(|ts| ts.substs)
+ pub fn node_substs(&self, id: NodeId) -> &'tcx Substs<'tcx> {
+ self.node_substs.get(&id).cloned().unwrap_or(Substs::empty())
}
// Returns the type of a pattern as a monotype. Like @expr_ty, this function
self.node_id_to_type_opt(expr.id)
}
+ pub fn expr_adjustments(&self, expr: &hir::Expr)
+ -> &[ty::adjustment::Adjustment<'tcx>] {
+ self.adjustments.get(&expr.id).map_or(&[], |a| &a[..])
+ }
+
/// Returns the type of `expr`, considering any `Adjustment`
/// entry recorded for that expression.
pub fn expr_ty_adjusted(&self, expr: &hir::Expr) -> Ty<'tcx> {
- self.adjustments.get(&expr.id)
+ self.expr_adjustments(expr)
+ .last()
.map_or_else(|| self.expr_ty(expr), |adj| adj.target)
}
pub fn expr_ty_adjusted_opt(&self, expr: &hir::Expr) -> Option<Ty<'tcx>> {
- self.adjustments.get(&expr.id)
- .map(|adj| adj.target).or_else(|| self.expr_ty_opt(expr))
+ self.expr_adjustments(expr)
+ .last()
+ .map(|adj| adj.target)
+ .or_else(|| self.expr_ty_opt(expr))
}
- pub fn is_method_call(&self, expr_id: NodeId) -> bool {
- self.method_map.contains_key(&ty::MethodCall::expr(expr_id))
- }
+ pub fn is_method_call(&self, expr: &hir::Expr) -> bool {
+ // Only paths and method calls/overloaded operators have
+ // entries in type_dependent_defs, ignore the former here.
+ if let hir::ExprPath(_) = expr.node {
+ return false;
+ }
- pub fn is_overloaded_autoderef(&self, expr_id: NodeId, autoderefs: u32) -> bool {
- self.method_map.contains_key(&ty::MethodCall::autoderef(expr_id, autoderefs))
+ match self.type_dependent_defs.get(&expr.id) {
+ Some(&Def::Method(_)) => true,
+ _ => false
+ }
}
pub fn upvar_capture(&self, upvar_id: ty::UpvarId) -> Option<ty::UpvarCapture<'tcx>> {
pub hir: hir_map::Map<'tcx>,
+ /// A map from DefPathHash -> DefId. Includes DefIds from the local crate
+ /// as well as all upstream crates. Only populated in incremental mode.
+ pub def_path_hash_to_def_id: Option<FxHashMap<DefPathHash, DefId>>,
+
pub maps: maps::Maps<'tcx>,
pub mir_passes: Rc<Passes>,
/// Data layout specification for the current target.
pub data_layout: TargetDataLayout,
- /// Cache for layouts computed from types.
- pub layout_cache: RefCell<FxHashMap<Ty<'tcx>, &'tcx Layout>>,
-
/// Used to prevent layout from recursing too deeply.
pub layout_depth: Cell<usize>,
let max_cnum = s.cstore.crates().iter().map(|c| c.as_usize()).max().unwrap_or(0);
let mut providers = IndexVec::from_elem_n(extern_providers, max_cnum + 1);
providers[LOCAL_CRATE] = local_providers;
+
+ let def_path_hash_to_def_id = if s.opts.build_dep_graph() {
+ let upstream_def_path_tables: Vec<(CrateNum, Rc<_>)> = s
+ .cstore
+ .crates()
+ .iter()
+ .map(|&cnum| (cnum, s.cstore.def_path_table(cnum)))
+ .collect();
+
+ let def_path_tables = || {
+ upstream_def_path_tables
+ .iter()
+ .map(|&(cnum, ref rc)| (cnum, &**rc))
+ .chain(iter::once((LOCAL_CRATE, hir.definitions().def_path_table())))
+ };
+
+ // Precompute the capacity of the hashmap so we don't have to
+ // re-allocate when populating it.
+ let capacity = def_path_tables().map(|(_, t)| t.size()).sum::<usize>();
+
+ let mut map: FxHashMap<_, _> = FxHashMap::with_capacity_and_hasher(
+ capacity,
+ ::std::default::Default::default()
+ );
+
+ for (cnum, def_path_table) in def_path_tables() {
+ def_path_table.add_def_path_hashes_to(cnum, &mut map);
+ }
+
+ Some(map)
+ } else {
+ None
+ };
+
tls::enter_global(GlobalCtxt {
sess: s,
trans_trait_caches: traits::trans::TransTraitCaches::new(dep_graph.clone()),
export_map: resolutions.export_map,
fulfilled_predicates: RefCell::new(fulfilled_predicates),
hir: hir,
+ def_path_hash_to_def_id: def_path_hash_to_def_id,
maps: maps::Maps::new(providers),
mir_passes,
freevars: RefCell::new(resolutions.freevars),
rvalue_promotable_to_static: RefCell::new(NodeMap()),
crate_name: Symbol::intern(crate_name),
data_layout: data_layout,
- layout_cache: RefCell::new(FxHashMap()),
layout_interner: RefCell::new(FxHashSet()),
layout_depth: Cell::new(0),
derive_macros: RefCell::new(NodeMap()),
fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option<Self::Lifted>;
}
+impl<'a, 'tcx> Lift<'tcx> for ty::ParamEnv<'a> {
+ type Lifted = ty::ParamEnv<'tcx>;
+ fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<ty::ParamEnv<'tcx>> {
+ self.caller_bounds.lift_to_tcx(tcx).and_then(|caller_bounds| {
+ Some(ty::ParamEnv {
+ reveal: self.reveal,
+ caller_bounds,
+ })
+ })
+ }
+}
+
impl<'a, 'tcx> Lift<'tcx> for Ty<'a> {
type Lifted = Ty<'tcx>;
fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Ty<'tcx>> {
}
}
+impl<'a, 'tcx> Lift<'tcx> for &'a Slice<Predicate<'a>> {
+ type Lifted = &'tcx Slice<Predicate<'tcx>>;
+ fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>)
+ -> Option<&'tcx Slice<Predicate<'tcx>>> {
+ if self.is_empty() {
+ return Some(Slice::empty());
+ }
+ if tcx.interners.arena.in_arena(*self as *const _) {
+ return Some(unsafe { mem::transmute(*self) });
+ }
+ // Also try in the global tcx if we're not that.
+ if !tcx.is_global() {
+ self.lift_to_tcx(tcx.global_tcx())
+ } else {
+ None
+ }
+ }
+}
+
pub mod tls {
use super::{CtxtInterners, GlobalCtxt, TyCtxt};
//! These methods return true to indicate that the visitor has found what it is looking for
//! and does not need to visit anything else.
-use ty::subst::Substs;
-use ty::adjustment;
use ty::{self, Binder, Ty, TyCtxt, TypeFlags};
use std::fmt;
t.super_fold_with(self)
}
- fn fold_mt(&mut self, t: &ty::TypeAndMut<'tcx>) -> ty::TypeAndMut<'tcx> {
- t.super_fold_with(self)
- }
-
- fn fold_impl_header(&mut self, imp: &ty::ImplHeader<'tcx>) -> ty::ImplHeader<'tcx> {
- imp.super_fold_with(self)
- }
-
- fn fold_substs(&mut self,
- substs: &'tcx Substs<'tcx>)
- -> &'tcx Substs<'tcx> {
- substs.super_fold_with(self)
- }
-
- fn fold_fn_sig(&mut self,
- sig: &ty::FnSig<'tcx>)
- -> ty::FnSig<'tcx> {
- sig.super_fold_with(self)
- }
-
fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
r.super_fold_with(self)
}
-
- fn fold_autoref(&mut self, ar: &adjustment::AutoBorrow<'tcx>)
- -> adjustment::AutoBorrow<'tcx> {
- ar.super_fold_with(self)
- }
}
pub trait TypeVisitor<'tcx> : Sized {
pub use self::Layout::*;
pub use self::Primitive::*;
-use infer::InferCtxt;
-use session::Session;
-use traits;
+use session::{self, DataTypeKind, Session};
use ty::{self, Ty, TyCtxt, TypeFoldable, ReprOptions, ReprFlags};
-use syntax::ast::{FloatTy, IntTy, UintTy};
+use syntax::ast::{self, FloatTy, IntTy, UintTy};
use syntax::attr;
use syntax_pos::DUMMY_SP;
}
}
+impl<'a, 'tcx> HasDataLayout for TyCtxt<'a, 'tcx, 'tcx> {
+ fn data_layout(&self) -> &TargetDataLayout {
+ &self.data_layout
+ }
+}
+
/// Endianness of the target, which must match cfg(target-endian).
#[derive(Copy, Clone)]
pub enum Endian {
/// signed discriminant range and #[repr] attribute.
/// N.B.: u64 values above i64::MAX will be treated as signed, but
/// that shouldn't affect anything, other than maybe debuginfo.
- fn repr_discr(tcx: TyCtxt, ty: Ty, repr: &ReprOptions, min: i64, max: i64)
- -> (Integer, bool) {
+ fn repr_discr<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ ty: Ty<'tcx>,
+ repr: &ReprOptions,
+ min: i64,
+ max: i64)
+ -> (Integer, bool) {
// Theoretically, negative values could be larger in unsigned representation
// than the unsigned representation of the signed minimum. However, if there
// are any negative values, the only valid unsigned representation is u64
EnumVariant,
}
-impl<'a, 'gcx, 'tcx> Struct {
- fn new(dl: &TargetDataLayout, fields: &Vec<&'a Layout>,
- repr: &ReprOptions, kind: StructKind,
- scapegoat: Ty<'gcx>) -> Result<Struct, LayoutError<'gcx>> {
+impl<'a, 'tcx> Struct {
+ fn new(dl: &TargetDataLayout,
+ fields: &Vec<&'a Layout>,
+ repr: &ReprOptions,
+ kind: StructKind,
+ scapegoat: Ty<'tcx>)
+ -> Result<Struct, LayoutError<'tcx>> {
if repr.packed() && repr.align > 0 {
bug!("Struct cannot be packed and aligned");
}
/// Determine whether a structure would be zero-sized, given its fields.
fn would_be_zero_sized<I>(dl: &TargetDataLayout, fields: I)
- -> Result<bool, LayoutError<'gcx>>
- where I: Iterator<Item=Result<&'a Layout, LayoutError<'gcx>>> {
+ -> Result<bool, LayoutError<'tcx>>
+ where I: Iterator<Item=Result<&'a Layout, LayoutError<'tcx>>> {
for field in fields {
let field = field?;
if field.is_unsized() || field.size(dl).bytes() > 0 {
/// The tuple is `(path, source_path)`,
/// where `path` is in memory order and `source_path` in source order.
// FIXME(eddyb) track value ranges and traverse already optimized enums.
- fn non_zero_field_in_type(infcx: &InferCtxt<'a, 'gcx, 'tcx>,
- ty: Ty<'gcx>)
- -> Result<Option<(FieldPath, FieldPath)>, LayoutError<'gcx>> {
- let tcx = infcx.tcx.global_tcx();
- match (ty.layout(infcx)?, &ty.sty) {
+ fn non_zero_field_in_type(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ ty: Ty<'tcx>)
+ -> Result<Option<(FieldPath, FieldPath)>, LayoutError<'tcx>> {
+ match (ty.layout(tcx, param_env)?, &ty.sty) {
(&Scalar { non_zero: true, .. }, _) |
(&CEnum { non_zero: true, .. }, _) => Ok(Some((vec![], vec![]))),
(&FatPointer { non_zero: true, .. }, _) => {
(&Univariant { non_zero: true, .. }, &ty::TyAdt(def, substs)) => {
let fields = &def.struct_variant().fields;
assert_eq!(fields.len(), 1);
- match *fields[0].ty(tcx, substs).layout(infcx)? {
+ match *fields[0].ty(tcx, substs).layout(tcx, param_env)? {
// FIXME(eddyb) also allow floating-point types here.
Scalar { value: Int(_), non_zero: false } |
Scalar { value: Pointer, non_zero: false } => {
// Perhaps one of the fields of this struct is non-zero
// let's recurse and find out
(&Univariant { ref variant, .. }, &ty::TyAdt(def, substs)) if def.is_struct() => {
- Struct::non_zero_field_paths(infcx, def.struct_variant().fields
- .iter().map(|field| {
- field.ty(tcx, substs)
- }),
- Some(&variant.memory_index[..]))
+ Struct::non_zero_field_paths(
+ tcx,
+ param_env,
+ def.struct_variant().fields.iter().map(|field| {
+ field.ty(tcx, substs)
+ }),
+ Some(&variant.memory_index[..]))
}
// Perhaps one of the upvars of this closure is non-zero
(&Univariant { ref variant, .. }, &ty::TyClosure(def, substs)) => {
let upvar_tys = substs.upvar_tys(def, tcx);
- Struct::non_zero_field_paths(infcx, upvar_tys,
+ Struct::non_zero_field_paths(
+ tcx,
+ param_env,
+ upvar_tys,
Some(&variant.memory_index[..]))
}
// Can we use one of the fields in this tuple?
(&Univariant { ref variant, .. }, &ty::TyTuple(tys, _)) => {
- Struct::non_zero_field_paths(infcx, tys.iter().cloned(),
+ Struct::non_zero_field_paths(
+ tcx,
+ param_env,
+ tys.iter().cloned(),
Some(&variant.memory_index[..]))
}
// Is this a fixed-size array of something non-zero
// with at least one element?
(_, &ty::TyArray(ety, d)) if d > 0 => {
- Struct::non_zero_field_paths(infcx, Some(ety).into_iter(), None)
+ Struct::non_zero_field_paths(
+ tcx,
+ param_env,
+ Some(ety).into_iter(),
+ None)
}
(_, &ty::TyProjection(_)) | (_, &ty::TyAnon(..)) => {
- let normalized = infcx.normalize_projections(ty);
+ let normalized = tcx.normalize_associated_type_in_env(&ty, param_env);
if ty == normalized {
return Ok(None);
}
- return Struct::non_zero_field_in_type(infcx, normalized);
+ return Struct::non_zero_field_in_type(tcx, param_env, normalized);
}
// Anything else is not a non-zero type.
/// the given set of fields and recursing through aggregates.
/// Returns Some((path, source_path)) on success.
/// `path` is translated to memory order. `source_path` is not.
- fn non_zero_field_paths<I>(infcx: &InferCtxt<'a, 'gcx, 'tcx>,
- fields: I,
- permutation: Option<&[u32]>)
- -> Result<Option<(FieldPath, FieldPath)>, LayoutError<'gcx>>
- where I: Iterator<Item=Ty<'gcx>> {
+ fn non_zero_field_paths<I>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ fields: I,
+ permutation: Option<&[u32]>)
+ -> Result<Option<(FieldPath, FieldPath)>, LayoutError<'tcx>>
+ where I: Iterator<Item=Ty<'tcx>> {
for (i, ty) in fields.enumerate() {
- if let Some((mut path, mut source_path)) = Struct::non_zero_field_in_type(infcx, ty)? {
+ let r = Struct::non_zero_field_in_type(tcx, param_env, ty)?;
+ if let Some((mut path, mut source_path)) = r {
source_path.push(i as u32);
let index = if let Some(p) = permutation {
p[i] as usize
pub packed: bool,
}
-impl<'a, 'gcx, 'tcx> Union {
+impl<'a, 'tcx> Union {
fn new(dl: &TargetDataLayout, packed: bool) -> Union {
let align = if packed { dl.i8_align } else { dl.aggregate_align };
Union {
/// Extend the Struct with more fields.
fn extend<I>(&mut self, dl: &TargetDataLayout,
fields: I,
- scapegoat: Ty<'gcx>)
- -> Result<(), LayoutError<'gcx>>
- where I: Iterator<Item=Result<&'a Layout, LayoutError<'gcx>>> {
+ scapegoat: Ty<'tcx>)
+ -> Result<(), LayoutError<'tcx>>
+ where I: Iterator<Item=Result<&'a Layout, LayoutError<'tcx>>> {
for (index, field) in fields.enumerate() {
let field = field?;
if field.is_unsized() {
}
}
-impl<'a, 'gcx, 'tcx> Layout {
- pub fn compute_uncached(ty: Ty<'gcx>,
- infcx: &InferCtxt<'a, 'gcx, 'tcx>)
- -> Result<&'gcx Layout, LayoutError<'gcx>> {
- let tcx = infcx.tcx.global_tcx();
+impl<'a, 'tcx> Layout {
+ pub fn compute_uncached(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ ty: Ty<'tcx>)
+ -> Result<&'tcx Layout, LayoutError<'tcx>> {
let success = |layout| Ok(tcx.intern_layout(layout));
let dl = &tcx.data_layout;
assert!(!ty.has_infer_types());
- let ptr_layout = |pointee: Ty<'gcx>| {
+ let ptr_layout = |pointee: Ty<'tcx>| {
let non_zero = !ty.is_unsafe_ptr();
- let pointee = infcx.normalize_projections(pointee);
- if pointee.is_sized(tcx, infcx.param_env, DUMMY_SP) {
+ let pointee = tcx.normalize_associated_type_in_env(&pointee, param_env);
+ if pointee.is_sized(tcx, param_env, DUMMY_SP) {
Ok(Scalar { value: Pointer, non_zero: non_zero })
} else {
let unsized_part = tcx.struct_tail(pointee);
// Arrays and slices.
ty::TyArray(element, count) => {
- let element = element.layout(infcx)?;
+ let element = element.layout(tcx, param_env)?;
let element_size = element.size(dl);
// FIXME(eddyb) Don't use host `usize` for array lengths.
let usize_count: usize = count;
}
}
ty::TySlice(element) => {
- let element = element.layout(infcx)?;
+ let element = element.layout(tcx, param_env)?;
Array {
sized: false,
align: element.align(dl),
ty::TyClosure(def_id, ref substs) => {
let tys = substs.upvar_tys(def_id, tcx);
let st = Struct::new(dl,
- &tys.map(|ty| ty.layout(infcx))
+ &tys.map(|ty| ty.layout(tcx, param_env))
.collect::<Result<Vec<_>, _>>()?,
&ReprOptions::default(),
StructKind::AlwaysSizedUnivariant, ty)?;
// FIXME(camlorn): if we ever allow unsized tuples, this needs to be checked.
// See the univariant case below to learn how.
let st = Struct::new(dl,
- &tys.iter().map(|ty| ty.layout(infcx))
+ &tys.iter().map(|ty| ty.layout(tcx, param_env))
.collect::<Result<Vec<_>, _>>()?,
&ReprOptions::default(), StructKind::AlwaysSizedUnivariant, ty)?;
Univariant { variant: st, non_zero: false }
// SIMD vector types.
ty::TyAdt(def, ..) if def.repr.simd() => {
let element = ty.simd_type(tcx);
- match *element.layout(infcx)? {
+ match *element.layout(tcx, param_env)? {
Scalar { value, .. } => {
return success(Vector {
element: value,
};
let fields = def.variants[0].fields.iter().map(|field| {
- field.ty(tcx, substs).layout(infcx)
+ field.ty(tcx, substs).layout(tcx, param_env)
}).collect::<Result<Vec<_>, _>>()?;
let layout = if def.is_union() {
let mut un = Union::new(dl, def.repr.packed());
// Nullable pointer optimization
for discr in 0..2 {
let other_fields = variants[1 - discr].iter().map(|ty| {
- ty.layout(infcx)
+ ty.layout(tcx, param_env)
});
if !Struct::would_be_zero_sized(dl, other_fields)? {
continue;
}
- let paths = Struct::non_zero_field_paths(infcx,
- variants[discr].iter().cloned(),
- None)?;
+ let paths = Struct::non_zero_field_paths(tcx,
+ param_env,
+ variants[discr].iter().cloned(),
+ None)?;
let (mut path, mut path_source) = if let Some(p) = paths { p }
else { continue };
// FIXME(eddyb) should take advantage of a newtype.
if path == &[0] && variants[discr].len() == 1 {
- let value = match *variants[discr][0].layout(infcx)? {
+ let value = match *variants[discr][0].layout(tcx, param_env)? {
Scalar { value, .. } => value,
CEnum { discr, .. } => Int(discr),
_ => bug!("Layout::compute: `{}`'s non-zero \
}
let st = Struct::new(dl,
- &variants[discr].iter().map(|ty| ty.layout(infcx))
+ &variants[discr].iter().map(|ty| ty.layout(tcx, param_env))
.collect::<Result<Vec<_>, _>>()?,
&def.repr, StructKind::AlwaysSizedUnivariant, ty)?;
let discr = Scalar { value: Int(min_ity), non_zero: false };
let mut variants = variants.into_iter().map(|fields| {
let mut fields = fields.into_iter().map(|field| {
- field.layout(infcx)
+ field.layout(tcx, param_env)
}).collect::<Result<Vec<_>, _>>()?;
fields.insert(0, &discr);
let st = Struct::new(dl,
// Types with no meaningful known layout.
ty::TyProjection(_) | ty::TyAnon(..) => {
- let normalized = infcx.normalize_projections(ty);
+ let normalized = tcx.normalize_associated_type_in_env(&ty, param_env);
if ty == normalized {
return Err(LayoutError::Unknown(ty));
}
- return normalized.layout(infcx);
+ return normalized.layout(tcx, param_env);
}
ty::TyParam(_) => {
return Err(LayoutError::Unknown(ty));
}
}
}
+
+ /// This is invoked by the `layout_raw` query to record the final
+ /// layout of each type.
+ #[inline]
+ pub fn record_layout_for_printing(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ ty: Ty<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ layout: &Layout) {
+ // If we are running with `-Zprint-type-sizes`, record layouts for
+ // dumping later. Ignore layouts that are done with non-empty
+ // environments or non-monomorphic layouts, as the user only wants
+ // to see the stuff resulting from the final trans session.
+ if
+ !tcx.sess.opts.debugging_opts.print_type_sizes ||
+ ty.has_param_types() ||
+ ty.has_self_ty() ||
+ !param_env.caller_bounds.is_empty()
+ {
+ return;
+ }
+
+ Self::record_layout_for_printing_outlined(tcx, ty, param_env, layout)
+ }
+
+ fn record_layout_for_printing_outlined(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ ty: Ty<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ layout: &Layout) {
+ // (delay format until we actually need it)
+ let record = |kind, opt_discr_size, variants| {
+ let type_desc = format!("{:?}", ty);
+ let overall_size = layout.size(tcx);
+ let align = layout.align(tcx);
+ tcx.sess.code_stats.borrow_mut().record_type_size(kind,
+ type_desc,
+ align,
+ overall_size,
+ opt_discr_size,
+ variants);
+ };
+
+ let (adt_def, substs) = match ty.sty {
+ ty::TyAdt(ref adt_def, substs) => {
+ debug!("print-type-size t: `{:?}` process adt", ty);
+ (adt_def, substs)
+ }
+
+ ty::TyClosure(..) => {
+ debug!("print-type-size t: `{:?}` record closure", ty);
+ record(DataTypeKind::Closure, None, vec![]);
+ return;
+ }
+
+ _ => {
+ debug!("print-type-size t: `{:?}` skip non-nominal", ty);
+ return;
+ }
+ };
+
+ let adt_kind = adt_def.adt_kind();
+
+ let build_field_info = |(field_name, field_ty): (ast::Name, Ty<'tcx>), offset: &Size| {
+ let layout = field_ty.layout(tcx, param_env);
+ match layout {
+ Err(_) => bug!("no layout found for field {} type: `{:?}`", field_name, field_ty),
+ Ok(field_layout) => {
+ session::FieldInfo {
+ name: field_name.to_string(),
+ offset: offset.bytes(),
+ size: field_layout.size(tcx).bytes(),
+ align: field_layout.align(tcx).abi(),
+ }
+ }
+ }
+ };
+
+ let build_primitive_info = |name: ast::Name, value: &Primitive| {
+ session::VariantInfo {
+ name: Some(name.to_string()),
+ kind: session::SizeKind::Exact,
+ align: value.align(tcx).abi(),
+ size: value.size(tcx).bytes(),
+ fields: vec![],
+ }
+ };
+
+ enum Fields<'a> {
+ WithDiscrim(&'a Struct),
+ NoDiscrim(&'a Struct),
+ }
+
+ let build_variant_info = |n: Option<ast::Name>,
+ flds: &[(ast::Name, Ty<'tcx>)],
+ layout: Fields| {
+ let (s, field_offsets) = match layout {
+ Fields::WithDiscrim(s) => (s, &s.offsets[1..]),
+ Fields::NoDiscrim(s) => (s, &s.offsets[0..]),
+ };
+ let field_info: Vec<_> =
+ flds.iter()
+ .zip(field_offsets.iter())
+ .map(|(&field_name_ty, offset)| build_field_info(field_name_ty, offset))
+ .collect();
+
+ session::VariantInfo {
+ name: n.map(|n|n.to_string()),
+ kind: if s.sized {
+ session::SizeKind::Exact
+ } else {
+ session::SizeKind::Min
+ },
+ align: s.align.abi(),
+ size: s.min_size.bytes(),
+ fields: field_info,
+ }
+ };
+
+ match *layout {
+ Layout::StructWrappedNullablePointer { nonnull: ref variant_layout,
+ nndiscr,
+ discrfield: _,
+ discrfield_source: _ } => {
+ debug!("print-type-size t: `{:?}` adt struct-wrapped nullable nndiscr {} is {:?}",
+ ty, nndiscr, variant_layout);
+ let variant_def = &adt_def.variants[nndiscr as usize];
+ let fields: Vec<_> =
+ variant_def.fields.iter()
+ .map(|field_def| (field_def.name, field_def.ty(tcx, substs)))
+ .collect();
+ record(adt_kind.into(),
+ None,
+ vec![build_variant_info(Some(variant_def.name),
+ &fields,
+ Fields::NoDiscrim(variant_layout))]);
+ }
+ Layout::RawNullablePointer { nndiscr, value } => {
+ debug!("print-type-size t: `{:?}` adt raw nullable nndiscr {} is {:?}",
+ ty, nndiscr, value);
+ let variant_def = &adt_def.variants[nndiscr as usize];
+ record(adt_kind.into(), None,
+ vec![build_primitive_info(variant_def.name, &value)]);
+ }
+ Layout::Univariant { variant: ref variant_layout, non_zero: _ } => {
+ let variant_names = || {
+ adt_def.variants.iter().map(|v|format!("{}", v.name)).collect::<Vec<_>>()
+ };
+ debug!("print-type-size t: `{:?}` adt univariant {:?} variants: {:?}",
+ ty, variant_layout, variant_names());
+ assert!(adt_def.variants.len() <= 1,
+ "univariant with variants {:?}", variant_names());
+ if adt_def.variants.len() == 1 {
+ let variant_def = &adt_def.variants[0];
+ let fields: Vec<_> =
+ variant_def.fields.iter()
+ .map(|f| (f.name, f.ty(tcx, substs)))
+ .collect();
+ record(adt_kind.into(),
+ None,
+ vec![build_variant_info(Some(variant_def.name),
+ &fields,
+ Fields::NoDiscrim(variant_layout))]);
+ } else {
+ // (This case arises for *empty* enums; so give it
+ // zero variants.)
+ record(adt_kind.into(), None, vec![]);
+ }
+ }
+
+ Layout::General { ref variants, discr, .. } => {
+ debug!("print-type-size t: `{:?}` adt general variants def {} layouts {} {:?}",
+ ty, adt_def.variants.len(), variants.len(), variants);
+ let variant_infos: Vec<_> =
+ adt_def.variants.iter()
+ .zip(variants.iter())
+ .map(|(variant_def, variant_layout)| {
+ let fields: Vec<_> =
+ variant_def.fields
+ .iter()
+ .map(|f| (f.name, f.ty(tcx, substs)))
+ .collect();
+ build_variant_info(Some(variant_def.name),
+ &fields,
+ Fields::WithDiscrim(variant_layout))
+ })
+ .collect();
+ record(adt_kind.into(), Some(discr.size()), variant_infos);
+ }
+
+ Layout::UntaggedUnion { ref variants } => {
+ debug!("print-type-size t: `{:?}` adt union variants {:?}",
+ ty, variants);
+ // layout does not currently store info about each
+ // variant...
+ record(adt_kind.into(), None, Vec::new());
+ }
+
+ Layout::CEnum { discr, .. } => {
+ debug!("print-type-size t: `{:?}` adt c-like enum", ty);
+ let variant_infos: Vec<_> =
+ adt_def.variants.iter()
+ .map(|variant_def| {
+ build_primitive_info(variant_def.name,
+ &Primitive::Int(discr))
+ })
+ .collect();
+ record(adt_kind.into(), Some(discr.size()), variant_infos);
+ }
+
+ // other cases provide little interesting (i.e. adjustable
+ // via representation tweaks) size info beyond total size.
+ Layout::Scalar { .. } |
+ Layout::Vector { .. } |
+ Layout::Array { .. } |
+ Layout::FatPointer { .. } => {
+ debug!("print-type-size t: `{:?}` adt other", ty);
+ record(adt_kind.into(), None, Vec::new())
+ }
+ }
+ }
}
/// Type size "skeleton", i.e. the only information determining a type's size.
}
}
-impl<'a, 'gcx, 'tcx> SizeSkeleton<'gcx> {
- pub fn compute(ty: Ty<'gcx>, infcx: &InferCtxt<'a, 'gcx, 'tcx>)
- -> Result<SizeSkeleton<'gcx>, LayoutError<'gcx>> {
- let tcx = infcx.tcx.global_tcx();
+impl<'a, 'tcx> SizeSkeleton<'tcx> {
+ pub fn compute(ty: Ty<'tcx>,
+ tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>)
+ -> Result<SizeSkeleton<'tcx>, LayoutError<'tcx>> {
assert!(!ty.has_infer_types());
// First try computing a static layout.
- let err = match ty.layout(infcx) {
+ let err = match ty.layout(tcx, param_env) {
Ok(layout) => {
return Ok(SizeSkeleton::Known(layout.size(tcx)));
}
Err(err) => err
};
- let ptr_skeleton = |pointee: Ty<'gcx>| {
+ let ptr_skeleton = |pointee: Ty<'tcx>| {
let non_zero = !ty.is_unsafe_ptr();
let tail = tcx.struct_tail(pointee);
match tail.sty {
// Get a zero-sized variant or a pointer newtype.
let zero_or_ptr_variant = |i: usize| {
let fields = def.variants[i].fields.iter().map(|field| {
- SizeSkeleton::compute(field.ty(tcx, substs), infcx)
+ SizeSkeleton::compute(field.ty(tcx, substs), tcx, param_env)
});
let mut ptr = None;
for field in fields {
}
ty::TyProjection(_) | ty::TyAnon(..) => {
- let normalized = infcx.normalize_projections(ty);
+ let normalized = tcx.normalize_associated_type_in_env(&ty, param_env);
if ty == normalized {
Err(err)
} else {
- SizeSkeleton::compute(normalized, infcx)
+ SizeSkeleton::compute(normalized, tcx, param_env)
}
}
}
}
-pub trait HasTyCtxt<'tcx>: HasDataLayout {
+pub trait LayoutTyper<'tcx>: HasDataLayout {
+ type TyLayout;
+
fn tcx<'a>(&'a self) -> TyCtxt<'a, 'tcx, 'tcx>;
+ fn layout_of(self, ty: Ty<'tcx>) -> Self::TyLayout;
+ fn normalize_projections(self, ty: Ty<'tcx>) -> Ty<'tcx>;
}
-impl<'a, 'gcx, 'tcx> HasDataLayout for TyCtxt<'a, 'gcx, 'tcx> {
- fn data_layout(&self) -> &TargetDataLayout {
- &self.data_layout
- }
+/// Combines a tcx with the parameter environment so that you can
+/// compute layout operations.
+#[derive(Copy, Clone)]
+pub struct LayoutCx<'a, 'tcx: 'a> {
+ tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
}
-impl<'a, 'gcx, 'tcx> HasTyCtxt<'gcx> for TyCtxt<'a, 'gcx, 'tcx> {
- fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'gcx> {
- self.global_tcx()
+impl<'a, 'tcx> LayoutCx<'a, 'tcx> {
+ pub fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>, param_env: ty::ParamEnv<'tcx>) -> Self {
+ LayoutCx { tcx, param_env }
}
}
-impl<'a, 'gcx, 'tcx> HasDataLayout for &'a InferCtxt<'a, 'gcx, 'tcx> {
+impl<'a, 'tcx> HasDataLayout for LayoutCx<'a, 'tcx> {
fn data_layout(&self) -> &TargetDataLayout {
&self.tcx.data_layout
}
}
-impl<'a, 'gcx, 'tcx> HasTyCtxt<'gcx> for &'a InferCtxt<'a, 'gcx, 'tcx> {
- fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'gcx> {
- self.tcx.global_tcx()
- }
-}
-
-pub trait LayoutTyper<'tcx>: HasTyCtxt<'tcx> {
- type TyLayout;
-
- fn layout_of(self, ty: Ty<'tcx>) -> Self::TyLayout;
- fn normalize_projections(self, ty: Ty<'tcx>) -> Ty<'tcx>;
-}
+impl<'a, 'tcx> LayoutTyper<'tcx> for LayoutCx<'a, 'tcx> {
+ type TyLayout = Result<TyLayout<'tcx>, LayoutError<'tcx>>;
-impl<'a, 'gcx, 'tcx> LayoutTyper<'gcx> for &'a InferCtxt<'a, 'gcx, 'tcx> {
- type TyLayout = Result<TyLayout<'gcx>, LayoutError<'gcx>>;
+ fn tcx<'b>(&'b self) -> TyCtxt<'b, 'tcx, 'tcx> {
+ self.tcx
+ }
- fn layout_of(self, ty: Ty<'gcx>) -> Self::TyLayout {
+ fn layout_of(self, ty: Ty<'tcx>) -> Self::TyLayout {
let ty = self.normalize_projections(ty);
Ok(TyLayout {
ty: ty,
- layout: ty.layout(self)?,
+ layout: ty.layout(self.tcx, self.param_env)?,
variant_index: None
})
}
- fn normalize_projections(self, ty: Ty<'gcx>) -> Ty<'gcx> {
- if !ty.has_projection_types() {
- return ty;
- }
-
- let mut selcx = traits::SelectionContext::new(self);
- let cause = traits::ObligationCause::dummy();
- let traits::Normalized { value: result, obligations } =
- traits::normalize(&mut selcx, cause, &ty);
-
- let mut fulfill_cx = traits::FulfillmentContext::new();
-
- for obligation in obligations {
- fulfill_cx.register_predicate_obligation(self, obligation);
- }
-
- self.drain_fulfillment_cx_or_panic(DUMMY_SP, &mut fulfill_cx, &result)
+ fn normalize_projections(self, ty: Ty<'tcx>) -> Ty<'tcx> {
+ self.tcx.normalize_associated_type_in_env(&ty, self.param_env)
}
}
}
}
- pub fn field_type<C: HasTyCtxt<'tcx>>(&self, cx: C, i: usize) -> Ty<'tcx> {
+ pub fn field_type<C: LayoutTyper<'tcx>>(&self, cx: C, i: usize) -> Ty<'tcx> {
let tcx = cx.tcx();
let ptr_field_type = |pointee: Ty<'tcx>| {
}
}
- pub fn field<C: LayoutTyper<'tcx>>(&self, cx: C, i: usize) -> C::TyLayout {
+ pub fn field<C: LayoutTyper<'tcx>>(&self,
+ cx: C,
+ i: usize)
+ -> C::TyLayout {
cx.layout_of(cx.normalize_projections(self.field_type(cx, i)))
}
}
use session::CompileResult;
use traits::specialization_graph;
use ty::{self, CrateInherentImpls, Ty, TyCtxt};
+use ty::layout::{Layout, LayoutError};
use ty::item_path;
use ty::steal::Steal;
use ty::subst::Substs;
}
}
+impl<'tcx> QueryDescription for queries::layout_raw<'tcx> {
+ fn describe(_tcx: TyCtxt, env: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> String {
+ format!("computing layout of `{}`", env.value)
+ }
+}
+
impl<'tcx> QueryDescription for queries::super_predicates_of<'tcx> {
fn describe(tcx: TyCtxt, def_id: DefId) -> String {
format!("computing the supertraits of `{}`",
[] specialization_graph_of: SpecializationGraph(DefId) -> Rc<specialization_graph::Graph>,
[] is_object_safe: ObjectSafety(DefId) -> bool,
+ // Get the ParameterEnvironment for a given item; this environment
+ // will be in "user-facing" mode, meaning that it is suitabe for
+ // type-checking etc, and it does not normalize specializable
+ // associated types. This is almost always what you want,
+ // unless you are doing MIR optimizations, in which case you
+ // might want to use `reveal_all()` method to change modes.
[] param_env: ParamEnv(DefId) -> ty::ParamEnv<'tcx>,
// Trait selection queries. These are best used by invoking `ty.moves_by_default()`,
[] is_sized_raw: is_sized_dep_node(ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> bool,
[] is_freeze_raw: is_freeze_dep_node(ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> bool,
[] needs_drop_raw: needs_drop_dep_node(ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> bool,
+ [] layout_raw: layout_dep_node(ty::ParamEnvAnd<'tcx, Ty<'tcx>>)
+ -> Result<&'tcx Layout, LayoutError<'tcx>>,
}
fn coherent_trait_dep_node((_, def_id): (CrateNum, DefId)) -> DepNode<DefId> {
.unwrap_or(DefId::local(CRATE_DEF_INDEX));
DepNode::NeedsDrop(def_id)
}
+
+fn layout_dep_node<'tcx>(key: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> DepNode<DefId> {
+ let def_id = ty::item_path::characteristic_def_id_of_type(key.value)
+ .unwrap_or(DefId::local(CRATE_DEF_INDEX));
+ DepNode::Layout(def_id)
+}
use hir::{map as hir_map, FreevarMap, TraitMap};
use hir::def::{Def, CtorKind, ExportMap};
use hir::def_id::{CrateNum, DefId, DefIndex, CRATE_DEF_INDEX, LOCAL_CRATE};
-use ich::{self, StableHashingContext};
+use ich::StableHashingContext;
use middle::const_val::ConstVal;
use middle::lang_items::{FnTraitLangItem, FnMutTraitLangItem, FnOnceTraitLangItem};
use middle::privacy::AccessLevels;
pub predicates: Vec<Predicate<'tcx>>,
}
-impl<'a, 'gcx, 'tcx> ImplHeader<'tcx> {
- pub fn with_fresh_ty_vars(selcx: &mut traits::SelectionContext<'a, 'gcx, 'tcx>,
- impl_def_id: DefId)
- -> ImplHeader<'tcx>
- {
- let tcx = selcx.tcx();
- let impl_substs = selcx.infcx().fresh_substs_for_item(DUMMY_SP, impl_def_id);
-
- let header = ImplHeader {
- impl_def_id: impl_def_id,
- self_ty: tcx.type_of(impl_def_id),
- trait_ref: tcx.impl_trait_ref(impl_def_id),
- predicates: tcx.predicates_of(impl_def_id).predicates
- }.subst(tcx, impl_substs);
-
- let traits::Normalized { value: mut header, obligations } =
- traits::normalize(selcx, traits::ObligationCause::dummy(), &header);
-
- header.predicates.extend(obligations.into_iter().map(|o| o.predicate));
- header
- }
-}
-
#[derive(Copy, Clone, Debug)]
pub struct AssociatedItem {
pub def_id: DefId,
AssociatedKind::Method => !self.method_has_self_argument,
}
}
+
+ pub fn signature<'a, 'tcx>(&self, tcx: &TyCtxt<'a, 'tcx, 'tcx>) -> String {
+ match self.kind {
+ ty::AssociatedKind::Method => {
+ // We skip the binder here because the binder would deanonymize all
+ // late-bound regions, and we don't want method signatures to show up
+ // `as for<'r> fn(&'r MyType)`. Pretty-printing handles late-bound
+ // regions just fine, showing `fn(&MyType)`.
+ format!("{}", tcx.type_of(self.def_id).fn_sig().skip_binder())
+ }
+ ty::AssociatedKind::Type => format!("type {};", self.name.to_string()),
+ ty::AssociatedKind::Const => {
+ format!("const {}: {:?};", self.name.to_string(), tcx.type_of(self.def_id))
+ }
+ }
+ }
}
#[derive(Clone, Debug, PartialEq, Eq, Copy, RustcEncodable, RustcDecodable)]
}
}
-#[derive(Clone, Copy, Debug, RustcDecodable, RustcEncodable)]
-pub struct MethodCallee<'tcx> {
- /// Impl method ID, for inherent methods, or trait method ID, otherwise.
- pub def_id: DefId,
- pub ty: Ty<'tcx>,
- pub substs: &'tcx Substs<'tcx>
-}
-
-/// With method calls, we store some extra information in
-/// side tables (i.e method_map). We use
-/// MethodCall as a key to index into these tables instead of
-/// just directly using the expression's NodeId. The reason
-/// for this being that we may apply adjustments (coercions)
-/// with the resulting expression also needing to use the
-/// side tables. The problem with this is that we don't
-/// assign a separate NodeId to this new expression
-/// and so it would clash with the base expression if both
-/// needed to add to the side tables. Thus to disambiguate
-/// we also keep track of whether there's an adjustment in
-/// our key.
-#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)]
-pub struct MethodCall {
- pub expr_id: NodeId,
- pub autoderef: u32
-}
-
-impl MethodCall {
- pub fn expr(id: NodeId) -> MethodCall {
- MethodCall {
- expr_id: id,
- autoderef: 0
- }
- }
-
- pub fn autoderef(expr_id: NodeId, autoderef: u32) -> MethodCall {
- MethodCall {
- expr_id: expr_id,
- autoderef: 1 + autoderef
- }
- }
-}
-
-// maps from an expression id that corresponds to a method call to the details
-// of the method to be invoked
-pub type MethodMap<'tcx> = FxHashMap<MethodCall, MethodCallee<'tcx>>;
-
// Contains information needed to resolve types and (in the future) look up
// the types of AST nodes.
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
/// the set of bounds on the in-scope type parameters, translated
/// into Obligations, and elaborated and normalized.
pub caller_bounds: &'tcx Slice<ty::Predicate<'tcx>>,
+
+ /// Typically, this is `Reveal::UserFacing`, but during trans we
+ /// want `Reveal::All` -- note that this is always paired with an
+ /// empty environment. To get that, use `ParamEnv::reveal()`.
+ pub reveal: traits::Reveal,
}
impl<'tcx> ParamEnv<'tcx> {
}
} else {
ParamEnvAnd {
- param_env: ParamEnv::empty(),
+ param_env: ParamEnv::empty(self.reveal),
value: value,
}
}
}
}
-/// Records the substitutions used to translate the polytype for an
-/// item into the monotype of an item reference.
-#[derive(Clone, RustcEncodable, RustcDecodable)]
-pub struct ItemSubsts<'tcx> {
- pub substs: &'tcx Substs<'tcx>,
-}
-
#[derive(Clone, Copy, PartialOrd, Ord, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)]
pub enum ClosureKind {
// Warning: Ordering is significant here! The ordering is chosen
}
}
-impl<'tcx> ItemSubsts<'tcx> {
- pub fn is_noop(&self) -> bool {
- self.substs.is_noop()
- }
-}
-
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum LvaluePreference {
PreferMutLvalue,
}
#[inline]
- pub fn def_path_hash(self, def_id: DefId) -> ich::Fingerprint {
+ pub fn def_path_hash(self, def_id: DefId) -> hir_map::DefPathHash {
if def_id.is_local() {
self.hir.definitions().def_path_hash(def_id.index)
} else {
/// See `ParamEnv` struct def'n for details.
fn param_env<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def_id: DefId)
- -> ParamEnv<'tcx> {
+ def_id: DefId)
+ -> ParamEnv<'tcx> {
// Compute the bounds on Self and the type parameters.
let bounds = tcx.predicates_of(def_id).instantiate_identity(tcx);
// are any errors at that point, so after type checking you can be
// sure that this will succeed without errors anyway.
- let unnormalized_env = ty::ParamEnv::new(tcx.intern_predicates(&predicates));
+ let unnormalized_env = ty::ParamEnv::new(tcx.intern_predicates(&predicates),
+ traits::Reveal::UserFacing);
let body_id = tcx.hir.as_local_node_id(def_id).map_or(DUMMY_NODE_ID, |id| {
tcx.hir.maybe_body_owned_by(id).map_or(id, |body| body.node_id)
}
}
-impl<'a, 'tcx> Lift<'tcx> for ty::ItemSubsts<'a> {
- type Lifted = ty::ItemSubsts<'tcx>;
+impl<'a, 'tcx> Lift<'tcx> for ty::adjustment::Adjustment<'a> {
+ type Lifted = ty::adjustment::Adjustment<'tcx>;
fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
- tcx.lift(&self.substs).map(|substs| {
- ty::ItemSubsts {
- substs: substs
+ tcx.lift(&self.kind).and_then(|kind| {
+ tcx.lift(&self.target).map(|target| {
+ ty::adjustment::Adjustment { kind, target }
+ })
+ })
+ }
+}
+
+impl<'a, 'tcx> Lift<'tcx> for ty::adjustment::Adjust<'a> {
+ type Lifted = ty::adjustment::Adjust<'tcx>;
+ fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
+ match *self {
+ ty::adjustment::Adjust::NeverToAny =>
+ Some(ty::adjustment::Adjust::NeverToAny),
+ ty::adjustment::Adjust::ReifyFnPointer =>
+ Some(ty::adjustment::Adjust::ReifyFnPointer),
+ ty::adjustment::Adjust::UnsafeFnPointer =>
+ Some(ty::adjustment::Adjust::UnsafeFnPointer),
+ ty::adjustment::Adjust::ClosureFnPointer =>
+ Some(ty::adjustment::Adjust::ClosureFnPointer),
+ ty::adjustment::Adjust::MutToConstPointer =>
+ Some(ty::adjustment::Adjust::MutToConstPointer),
+ ty::adjustment::Adjust::Unsize =>
+ Some(ty::adjustment::Adjust::Unsize),
+ ty::adjustment::Adjust::Deref(ref overloaded) => {
+ tcx.lift(overloaded).map(ty::adjustment::Adjust::Deref)
+ }
+ ty::adjustment::Adjust::Borrow(ref autoref) => {
+ tcx.lift(autoref).map(ty::adjustment::Adjust::Borrow)
+ }
+ }
+ }
+}
+
+impl<'a, 'tcx> Lift<'tcx> for ty::adjustment::OverloadedDeref<'a> {
+ type Lifted = ty::adjustment::OverloadedDeref<'tcx>;
+ fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
+ tcx.lift(&self.region).map(|region| {
+ ty::adjustment::OverloadedDeref {
+ region,
+ mutbl: self.mutbl,
}
})
}
}
}
+impl<'tcx> TypeFoldable<'tcx> for ty::ParamEnv<'tcx> {
+ fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ ty::ParamEnv {
+ reveal: self.reveal,
+ caller_bounds: self.caller_bounds.fold_with(folder),
+ }
+ }
+
+ fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
+ let &ty::ParamEnv { reveal: _, ref caller_bounds } = self;
+ caller_bounds.super_visit_with(visitor)
+ }
+}
+
impl<'tcx> TypeFoldable<'tcx> for &'tcx ty::Slice<ty::ExistentialPredicate<'tcx>> {
fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
let v = self.iter().map(|p| p.fold_with(folder)).collect::<AccumulateVec<[_; 8]>>();
ty::TypeAndMut { ty: self.ty.fold_with(folder), mutbl: self.mutbl }
}
- fn fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
- folder.fold_mt(self)
- }
-
fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
self.ty.visit_with(visitor)
}
}
}
- fn fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
- folder.fold_fn_sig(self)
- }
-
fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
self.inputs().iter().any(|i| i.visit_with(visitor)) ||
self.output().visit_with(visitor)
}
}
- fn fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
- folder.fold_impl_header(self)
- }
-
fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
self.self_ty.visit_with(visitor) ||
self.trait_ref.map(|r| r.visit_with(visitor)).unwrap_or(false) ||
}
}
-impl<'tcx> TypeFoldable<'tcx> for ty::ItemSubsts<'tcx> {
+impl<'tcx> TypeFoldable<'tcx> for ty::adjustment::Adjustment<'tcx> {
fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
- ty::ItemSubsts {
- substs: self.substs.fold_with(folder),
+ ty::adjustment::Adjustment {
+ kind: self.kind.fold_with(folder),
+ target: self.target.fold_with(folder),
}
}
fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
- self.substs.visit_with(visitor)
+ self.kind.visit_with(visitor) ||
+ self.target.visit_with(visitor)
+ }
+}
+
+impl<'tcx> TypeFoldable<'tcx> for ty::adjustment::Adjust<'tcx> {
+ fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ match *self {
+ ty::adjustment::Adjust::NeverToAny |
+ ty::adjustment::Adjust::ReifyFnPointer |
+ ty::adjustment::Adjust::UnsafeFnPointer |
+ ty::adjustment::Adjust::ClosureFnPointer |
+ ty::adjustment::Adjust::MutToConstPointer |
+ ty::adjustment::Adjust::Unsize => self.clone(),
+ ty::adjustment::Adjust::Deref(ref overloaded) => {
+ ty::adjustment::Adjust::Deref(overloaded.fold_with(folder))
+ }
+ ty::adjustment::Adjust::Borrow(ref autoref) => {
+ ty::adjustment::Adjust::Borrow(autoref.fold_with(folder))
+ }
+ }
+ }
+
+ fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
+ match *self {
+ ty::adjustment::Adjust::NeverToAny |
+ ty::adjustment::Adjust::ReifyFnPointer |
+ ty::adjustment::Adjust::UnsafeFnPointer |
+ ty::adjustment::Adjust::ClosureFnPointer |
+ ty::adjustment::Adjust::MutToConstPointer |
+ ty::adjustment::Adjust::Unsize => false,
+ ty::adjustment::Adjust::Deref(ref overloaded) => {
+ overloaded.visit_with(visitor)
+ }
+ ty::adjustment::Adjust::Borrow(ref autoref) => {
+ autoref.visit_with(visitor)
+ }
+ }
+ }
+}
+
+impl<'tcx> TypeFoldable<'tcx> for ty::adjustment::OverloadedDeref<'tcx> {
+ fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ ty::adjustment::OverloadedDeref {
+ region: self.region.fold_with(folder),
+ mutbl: self.mutbl,
+ }
+ }
+
+ fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
+ self.region.visit_with(visitor)
}
}
}
}
- fn fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
- folder.fold_autoref(self)
- }
-
fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
match *self {
ty::adjustment::AutoBorrow::Ref(r, _m) => r.visit_with(visitor),
}
}
+impl<'tcx> TypeFoldable<'tcx> for &'tcx ty::Slice<ty::Predicate<'tcx>> {
+ fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
+ let v = self.iter().map(|p| p.fold_with(folder)).collect::<AccumulateVec<[_; 8]>>();
+ folder.tcx().intern_predicates(&v)
+ }
+
+ fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
+ self.iter().any(|p| p.visit_with(visitor))
+ }
+}
+
impl<'tcx> TypeFoldable<'tcx> for ty::Predicate<'tcx> {
fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
match *self {
//! This module contains TypeVariants and its major components
use hir::def_id::DefId;
+use hir::map::DefPathHash;
use middle::region;
use ty::subst::Substs;
use serialize;
use hir;
-use ich;
use self::InferTy::*;
use self::TypeVariants::*;
TyFnDef(DefId, &'tcx Substs<'tcx>, PolyFnSig<'tcx>),
/// A pointer to a function. Written as `fn() -> i32`.
- /// FIXME: This is currently also used to represent the callee of a method;
- /// see ty::MethodCallee etc.
TyFnPtr(PolyFnSig<'tcx>),
/// A trait, defined with `trait`.
self.item_name // safe to skip the binder to access a name
}
- pub fn sort_key(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> (ich::Fingerprint, InternedString) {
+ pub fn sort_key(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> (DefPathHash, InternedString) {
// We want something here that is stable across crate boundaries.
// The DefId isn't but the `deterministic_hash` of the corresponding
// DefPath is.
self.skip_binder().item_name()
}
- pub fn sort_key(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> (ich::Fingerprint, InternedString) {
+ pub fn sort_key(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> (DefPathHash, InternedString) {
self.skip_binder().sort_key(tcx)
}
}
}
- /// Type accessors for substructures of types
- pub fn fn_args(&self) -> ty::Binder<&'tcx [Ty<'tcx>]> {
- self.fn_sig().inputs()
- }
-
- pub fn fn_ret(&self) -> Binder<Ty<'tcx>> {
- self.fn_sig().output()
- }
-
pub fn is_fn(&self) -> bool {
match self.sty {
TyFnDef(..) | TyFnPtr(_) => true,
}
}
- fn fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
- folder.fold_substs(self)
- }
-
fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
self.iter().any(|t| t.visit_with(visitor))
}
// except according to those terms.
use hir::def_id::DefId;
-use ich::Fingerprint;
+use hir::map::DefPathHash;
use traits::specialization_graph;
use ty::fast_reject;
use ty::fold::TypeFoldable;
/// The ICH of this trait's DefPath, cached here so it doesn't have to be
/// recomputed all the time.
- pub def_path_hash: Fingerprint,
+ pub def_path_hash: DefPathHash,
}
// We don't store the list of impls in a flat list because each cached list of
unsafety: hir::Unsafety,
paren_sugar: bool,
has_default_impl: bool,
- def_path_hash: Fingerprint)
+ def_path_hash: DefPathHash)
-> TraitDef {
TraitDef {
def_id,
use hir::def_id::{DefId, LOCAL_CRATE};
use hir::map::DefPathData;
-use infer::InferCtxt;
use ich::{StableHashingContext, NodeIdHashingMode};
use traits::{self, Reveal};
use ty::{self, Ty, TyCtxt, TypeFoldable};
impl<'tcx> ty::ParamEnv<'tcx> {
/// Construct a trait environment suitable for contexts where
/// there are no where clauses in scope.
- pub fn empty() -> Self {
- Self::new(ty::Slice::empty())
+ pub fn empty(reveal: Reveal) -> Self {
+ Self::new(ty::Slice::empty(), reveal)
}
/// Construct a trait environment with the given set of predicates.
- pub fn new(caller_bounds: &'tcx ty::Slice<ty::Predicate<'tcx>>) -> Self {
- ty::ParamEnv { caller_bounds }
+ pub fn new(caller_bounds: &'tcx ty::Slice<ty::Predicate<'tcx>>,
+ reveal: Reveal)
+ -> Self {
+ ty::ParamEnv { caller_bounds, reveal }
}
- pub fn can_type_implement_copy<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ /// Returns a new parameter environment with the same clauses, but
+ /// which "reveals" the true results of projections in all cases
+ /// (even for associated types that are specializable). This is
+ /// the desired behavior during trans and certain other special
+ /// contexts; normally though we want to use `Reveal::UserFacing`,
+ /// which is the default.
+ pub fn reveal_all(self) -> Self {
+ ty::ParamEnv { reveal: Reveal::All, ..self }
+ }
+
+ pub fn can_type_implement_copy<'a>(self,
+ tcx: TyCtxt<'a, 'tcx, 'tcx>,
self_type: Ty<'tcx>, span: Span)
- -> Result<(), CopyImplementationError> {
+ -> Result<(), CopyImplementationError<'tcx>> {
// FIXME: (@jroesch) float this code up
- tcx.infer_ctxt(self.clone(), Reveal::UserFacing).enter(|infcx| {
+ tcx.infer_ctxt(()).enter(|infcx| {
let (adt, substs) = match self_type.sty {
ty::TyAdt(adt, substs) => (adt, substs),
_ => return Err(CopyImplementationError::NotAnAdt),
let field_implements_copy = |field: &ty::FieldDef| {
let cause = traits::ObligationCause::dummy();
- match traits::fully_normalize(&infcx, cause, &field.ty(tcx, substs)) {
- Ok(ty) => !infcx.type_moves_by_default(ty, span),
+ match traits::fully_normalize(&infcx, cause, self, &field.ty(tcx, substs)) {
+ Ok(ty) => !infcx.type_moves_by_default(self, ty, span),
Err(..) => false,
}
};
tcx.needs_drop_raw(param_env.and(self))
}
+ /// Computes the layout of a type. Note that this implicitly
+ /// executes in "reveal all" mode.
#[inline]
- pub fn layout<'lcx>(&'tcx self, infcx: &InferCtxt<'a, 'tcx, 'lcx>)
+ pub fn layout<'lcx>(&'tcx self,
+ tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>)
-> Result<&'tcx Layout, LayoutError<'tcx>> {
- let tcx = infcx.tcx.global_tcx();
- let can_cache = !self.has_param_types() && !self.has_self_ty();
- if can_cache {
- if let Some(&cached) = tcx.layout_cache.borrow().get(&self) {
- return Ok(cached);
- }
+ let ty = tcx.erase_regions(&self);
+ let layout = tcx.layout_raw(param_env.reveal_all().and(ty));
+
+ // NB: This recording is normally disabled; when enabled, it
+ // can however trigger recursive invocations of `layout()`.
+ // Therefore, we execute it *after* the main query has
+ // completed, to avoid problems around recursive structures
+ // and the like. (Admitedly, I wasn't able to reproduce a problem
+ // here, but it seems like the right thing to do. -nmatsakis)
+ if let Ok(l) = layout {
+ Layout::record_layout_for_printing(tcx, ty, param_env, l);
}
- let rec_limit = tcx.sess.recursion_limit.get();
- let depth = tcx.layout_depth.get();
- if depth > rec_limit {
- tcx.sess.fatal(
- &format!("overflow representing the type `{}`", self));
- }
-
- tcx.layout_depth.set(depth+1);
- let layout = Layout::compute_uncached(self, infcx);
- tcx.layout_depth.set(depth);
- let layout = layout?;
- if can_cache {
- tcx.layout_cache.borrow_mut().insert(self, layout);
- }
- Ok(layout)
+ layout
}
{
let (param_env, ty) = query.into_parts();
let trait_def_id = tcx.require_lang_item(lang_items::CopyTraitLangItem);
- tcx.infer_ctxt(param_env, Reveal::UserFacing)
- .enter(|infcx| traits::type_known_to_meet_bound(&infcx, ty, trait_def_id, DUMMY_SP))
+ tcx.infer_ctxt(())
+ .enter(|infcx| traits::type_known_to_meet_bound(&infcx,
+ param_env,
+ ty,
+ trait_def_id,
+ DUMMY_SP))
}
fn is_sized_raw<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
{
let (param_env, ty) = query.into_parts();
let trait_def_id = tcx.require_lang_item(lang_items::SizedTraitLangItem);
- tcx.infer_ctxt(param_env, Reveal::UserFacing)
- .enter(|infcx| traits::type_known_to_meet_bound(&infcx, ty, trait_def_id, DUMMY_SP))
+ tcx.infer_ctxt(())
+ .enter(|infcx| traits::type_known_to_meet_bound(&infcx,
+ param_env,
+ ty,
+ trait_def_id,
+ DUMMY_SP))
}
fn is_freeze_raw<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
{
let (param_env, ty) = query.into_parts();
let trait_def_id = tcx.require_lang_item(lang_items::FreezeTraitLangItem);
- tcx.infer_ctxt(param_env, Reveal::UserFacing)
- .enter(|infcx| traits::type_known_to_meet_bound(&infcx, ty, trait_def_id, DUMMY_SP))
+ tcx.infer_ctxt(())
+ .enter(|infcx| traits::type_known_to_meet_bound(&infcx,
+ param_env,
+ ty,
+ trait_def_id,
+ DUMMY_SP))
}
fn needs_drop_raw<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
}
}
+fn layout_raw<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ query: ty::ParamEnvAnd<'tcx, Ty<'tcx>>)
+ -> Result<&'tcx Layout, LayoutError<'tcx>>
+{
+ let (param_env, ty) = query.into_parts();
+
+ let rec_limit = tcx.sess.recursion_limit.get();
+ let depth = tcx.layout_depth.get();
+ if depth > rec_limit {
+ tcx.sess.fatal(
+ &format!("overflow representing the type `{}`", ty));
+ }
+
+ tcx.layout_depth.set(depth+1);
+ let layout = Layout::compute_uncached(tcx, param_env, ty);
+ tcx.layout_depth.set(depth);
+
+ layout
+}
pub fn provide(providers: &mut ty::maps::Providers) {
*providers = ty::maps::Providers {
is_sized_raw,
is_freeze_raw,
needs_drop_raw,
+ layout_raw,
..*providers
};
}
/// make any progress at all. This is to prevent "livelock" where we
/// say "$0 is WF if $0 is WF".
pub fn obligations<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
body_id: ast::NodeId,
ty: Ty<'tcx>,
span: Span)
-> Option<Vec<traits::PredicateObligation<'tcx>>>
{
let mut wf = WfPredicates { infcx: infcx,
+ param_env: param_env,
body_id: body_id,
span: span,
out: vec![] };
/// `trait Set<K:Eq>`, then the trait reference `Foo: Set<Bar>` is WF
/// if `Bar: Eq`.
pub fn trait_obligations<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
body_id: ast::NodeId,
trait_ref: &ty::TraitRef<'tcx>,
span: Span)
-> Vec<traits::PredicateObligation<'tcx>>
{
- let mut wf = WfPredicates { infcx: infcx, body_id: body_id, span: span, out: vec![] };
+ let mut wf = WfPredicates { infcx, param_env, body_id, span, out: vec![] };
wf.compute_trait_ref(trait_ref);
wf.normalize()
}
pub fn predicate_obligations<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
body_id: ast::NodeId,
predicate: &ty::Predicate<'tcx>,
span: Span)
-> Vec<traits::PredicateObligation<'tcx>>
{
- let mut wf = WfPredicates { infcx: infcx, body_id: body_id, span: span, out: vec![] };
+ let mut wf = WfPredicates { infcx, param_env, body_id, span, out: vec![] };
// (*) ok to skip binders, because wf code is prepared for it
match *predicate {
/// the `ImpliedBound` type for more details.
pub fn implied_bounds<'a, 'gcx, 'tcx>(
infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
body_id: ast::NodeId,
ty: Ty<'tcx>,
span: Span)
// than the ultimate set. (Note: normally there won't be
// unresolved inference variables here anyway, but there might be
// during typeck under some circumstances.)
- let obligations = obligations(infcx, body_id, ty, span).unwrap_or(vec![]);
+ let obligations = obligations(infcx, param_env, body_id, ty, span).unwrap_or(vec![]);
// From the full set of obligations, just filter down to the
// region relationships.
struct WfPredicates<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
body_id: ast::NodeId,
span: Span,
out: Vec<traits::PredicateObligation<'tcx>>,
fn normalize(&mut self) -> Vec<traits::PredicateObligation<'tcx>> {
let cause = self.cause(traits::MiscObligation);
let infcx = &mut self.infcx;
+ let param_env = self.param_env;
self.out.iter()
.inspect(|pred| assert!(!pred.has_escaping_regions()))
.flat_map(|pred| {
let mut selcx = traits::SelectionContext::new(infcx);
- let pred = traits::normalize(&mut selcx, cause.clone(), pred);
+ let pred = traits::normalize(&mut selcx, param_env, cause.clone(), pred);
once(pred.value).chain(pred.obligations)
})
.collect()
self.out.extend(obligations);
let cause = self.cause(traits::MiscObligation);
+ let param_env = self.param_env;
self.out.extend(
trait_ref.substs.types()
.filter(|ty| !ty.has_escaping_regions())
.map(|ty| traits::Obligation::new(cause.clone(),
+ param_env,
ty::Predicate::WellFormed(ty))));
}
if !data.has_escaping_regions() {
let predicate = data.trait_ref.to_predicate();
let cause = self.cause(traits::ProjectionWf(data));
- self.out.push(traits::Obligation::new(cause, predicate));
+ self.out.push(traits::Obligation::new(cause, self.param_env, predicate));
}
}
def_id: self.infcx.tcx.require_lang_item(lang_items::SizedTraitLangItem),
substs: self.infcx.tcx.mk_substs_trait(subty, &[]),
};
- self.out.push(traits::Obligation::new(cause, trait_ref.to_predicate()));
+ self.out.push(traits::Obligation::new(cause, self.param_env, trait_ref.to_predicate()));
}
}
/// in which case we are not able to simplify at all.
fn compute(&mut self, ty0: Ty<'tcx>) -> bool {
let mut subtys = ty0.walk();
+ let param_env = self.param_env;
while let Some(ty) = subtys.next() {
match ty.sty {
ty::TyBool |
self.out.push(
traits::Obligation::new(
cause,
+ param_env,
ty::Predicate::TypeOutlives(
ty::Binder(
ty::OutlivesPredicate(mt.ty, r)))));
// checking those
let cause = self.cause(traits::MiscObligation);
-
let component_traits =
data.auto_traits().chain(data.principal().map(|p| p.def_id()));
self.out.extend(
component_traits.map(|did| traits::Obligation::new(
cause.clone(),
+ param_env,
ty::Predicate::ObjectSafe(did)
))
);
let cause = self.cause(traits::MiscObligation);
self.out.push( // ...not the type we started from, so we made progress.
- traits::Obligation::new(cause, ty::Predicate::WellFormed(ty)));
+ traits::Obligation::new(cause,
+ self.param_env,
+ ty::Predicate::WellFormed(ty)));
} else {
// Yes, resolved, proceed with the
// result. Should never return false because
let cause = self.cause(traits::ItemObligation(def_id));
predicates.predicates
.into_iter()
- .map(|pred| traits::Obligation::new(cause.clone(), pred))
+ .map(|pred| traits::Obligation::new(cause.clone(),
+ self.param_env,
+ pred))
.filter(|pred| !pred.has_escaping_regions())
.collect()
}
for implicit_bound in implicit_bounds {
let cause = self.cause(traits::ObjectTypeBound(ty, explicit_bound));
let outlives = ty::Binder(ty::OutlivesPredicate(explicit_bound, implicit_bound));
- self.out.push(traits::Obligation::new(cause, outlives.to_predicate()));
+ self.out.push(traits::Obligation::new(cause,
+ self.param_env,
+ outlives.to_predicate()));
}
}
}
}
}
-impl<'tcx> fmt::Debug for ty::ItemSubsts<'tcx> {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- write!(f, "ItemSubsts({:?})", self.substs)
- }
-}
-
impl<'tcx> fmt::Debug for ty::TraitRef<'tcx> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
// when printing out the debug representation, we don't need
dfcx_loans: &'a LoanDataFlow<'a, 'tcx>,
move_data: &'a move_data::FlowedMoveData<'a, 'tcx>,
all_loans: &'a [Loan<'tcx>],
- param_env: &'a ty::ParamEnv<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
}
impl<'a, 'tcx> euv::Delegate<'tcx> for CheckLoanCtxt<'a, 'tcx> {
body: &hir::Body) {
debug!("check_loans(body id={})", body.value.id);
+ let def_id = bccx.tcx.hir.body_owner_def_id(body.id());
let infcx = bccx.tcx.borrowck_fake_infer_ctxt(body.id());
+ let param_env = bccx.tcx.param_env(def_id);
let mut clcx = CheckLoanCtxt {
bccx: bccx,
dfcx_loans: dfcx_loans,
move_data: move_data,
all_loans: all_loans,
- param_env: &infcx.param_env
+ param_env,
};
- euv::ExprUseVisitor::new(&mut clcx, &bccx.region_maps, &infcx).consume_body(body);
+ euv::ExprUseVisitor::new(&mut clcx, &bccx.region_maps, &infcx, param_env).consume_body(body);
}
#[derive(PartialEq)]
self.check_if_assigned_path_is_moved(id, span,
use_kind, lp_base);
}
- LpExtend(ref lp_base, _, LpInterior(_, InteriorElement(..))) |
+ LpExtend(ref lp_base, _, LpInterior(_, InteriorElement)) |
LpExtend(ref lp_base, _, LpDeref(_)) => {
// assigning to `P[i]` requires `P` is initialized
// assigning to `(*P)` requires `P` is initialized
cmt: &mc::cmt<'tcx>)
-> Option<mc::cmt<'tcx>> {
match cmt.cat {
- Categorization::Deref(.., mc::BorrowedPtr(..)) |
- Categorization::Deref(.., mc::Implicit(..)) |
- Categorization::Deref(.., mc::UnsafePtr(..)) |
+ Categorization::Deref(_, mc::BorrowedPtr(..)) |
+ Categorization::Deref(_, mc::Implicit(..)) |
+ Categorization::Deref(_, mc::UnsafePtr(..)) |
Categorization::StaticItem => {
Some(cmt.clone())
}
Categorization::Downcast(ref b, _) |
Categorization::Interior(ref b, mc::InteriorField(_)) |
- Categorization::Interior(ref b, mc::InteriorElement(Kind::Pattern, _)) => {
+ Categorization::Interior(ref b, mc::InteriorElement(Kind::Pattern)) => {
match b.ty.sty {
ty::TyAdt(def, _) => {
if def.has_dtor(bccx.tcx) {
}
}
- Categorization::Interior(_, mc::InteriorElement(Kind::Index, _)) => {
+ Categorization::Interior(_, mc::InteriorElement(Kind::Index)) => {
// Forbid move of arr[i] for arr: [T; 3]; see RFC 533.
Some(cmt.clone())
}
- Categorization::Deref(ref b, _, mc::Unique) => {
+ Categorization::Deref(ref b, mc::Unique) => {
check_and_get_illegal_move_origin(bccx, b)
}
}
match cmt.cat {
Categorization::Rvalue(..) |
- Categorization::Local(..) | // L-Local
+ Categorization::Local(..) | // L-Local
Categorization::Upvar(..) |
- Categorization::Deref(.., mc::BorrowedPtr(..)) | // L-Deref-Borrowed
- Categorization::Deref(.., mc::Implicit(..)) |
- Categorization::Deref(.., mc::UnsafePtr(..)) => {
+ Categorization::Deref(_, mc::BorrowedPtr(..)) | // L-Deref-Borrowed
+ Categorization::Deref(_, mc::Implicit(..)) |
+ Categorization::Deref(_, mc::UnsafePtr(..)) => {
self.check_scope(self.scope(cmt))
}
}
Categorization::Downcast(ref base, _) |
- Categorization::Deref(ref base, _, mc::Unique) | // L-Deref-Send
- Categorization::Interior(ref base, _) => { // L-Field
+ Categorization::Deref(ref base, mc::Unique) | // L-Deref-Send
+ Categorization::Interior(ref base, _) => { // L-Field
self.check(base, discr_scope)
}
}
//! rooting etc, and presuming `cmt` is not mutated.
match cmt.cat {
- Categorization::Rvalue(temp_scope, _) => {
+ Categorization::Rvalue(temp_scope) => {
temp_scope
}
Categorization::Upvar(..) => {
self.bccx.region_maps.var_scope(local_id)))
}
Categorization::StaticItem |
- Categorization::Deref(.., mc::UnsafePtr(..)) => {
+ Categorization::Deref(_, mc::UnsafePtr(..)) => {
self.bccx.tcx.types.re_static
}
- Categorization::Deref(.., mc::BorrowedPtr(_, r)) |
- Categorization::Deref(.., mc::Implicit(_, r)) => {
+ Categorization::Deref(_, mc::BorrowedPtr(_, r)) |
+ Categorization::Deref(_, mc::Implicit(_, r)) => {
r
}
Categorization::Downcast(ref cmt, _) |
- Categorization::Deref(ref cmt, _, mc::Unique) |
+ Categorization::Deref(ref cmt, mc::Unique) |
Categorization::Interior(ref cmt, _) => {
self.scope(cmt)
}
pub fn gather_loans_in_fn<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>,
body: hir::BodyId)
- -> (Vec<Loan<'tcx>>,
- move_data::MoveData<'tcx>) {
+ -> (Vec<Loan<'tcx>>, move_data::MoveData<'tcx>) {
+ let def_id = bccx.tcx.hir.body_owner_def_id(body);
let infcx = bccx.tcx.borrowck_fake_infer_ctxt(body);
+ let param_env = bccx.tcx.param_env(def_id);
let mut glcx = GatherLoanCtxt {
bccx: bccx,
infcx: &infcx,
};
let body = glcx.bccx.tcx.hir.body(body);
- euv::ExprUseVisitor::new(&mut glcx, &bccx.region_maps, &infcx).consume_body(body);
+ euv::ExprUseVisitor::new(&mut glcx, &bccx.region_maps, &infcx, param_env).consume_body(body);
glcx.report_potential_errors();
let GatherLoanCtxt { all_loans, move_data, .. } = glcx;
move_from: mc::cmt<'tcx>)
-> DiagnosticBuilder<'a> {
match move_from.cat {
- Categorization::Deref(.., mc::BorrowedPtr(..)) |
- Categorization::Deref(.., mc::Implicit(..)) |
- Categorization::Deref(.., mc::UnsafePtr(..)) |
+ Categorization::Deref(_, mc::BorrowedPtr(..)) |
+ Categorization::Deref(_, mc::Implicit(..)) |
+ Categorization::Deref(_, mc::UnsafePtr(..)) |
Categorization::StaticItem => {
let mut err = struct_span_err!(bccx, move_from.span, E0507,
"cannot move out of {}",
err
}
- Categorization::Interior(ref b, mc::InteriorElement(ik, _)) => {
+ Categorization::Interior(ref b, mc::InteriorElement(ik)) => {
match (&b.ty.sty, ik) {
(&ty::TySlice(..), _) |
(_, Kind::Index) => {
RestrictionResult::Safe
}
- Categorization::Deref(cmt_base, _, pk) => {
+ Categorization::Deref(cmt_base, pk) => {
match pk {
mc::Unique => {
// R-Deref-Send-Pointer
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub enum InteriorKind {
InteriorField(mc::FieldName),
- InteriorElement(mc::ElementKind),
+ InteriorElement,
}
trait ToInteriorKind { fn cleaned(self) -> InteriorKind; }
fn cleaned(self) -> InteriorKind {
match self {
mc::InteriorField(name) => InteriorField(name),
- mc::InteriorElement(_, elem_kind) => InteriorElement(elem_kind),
+ mc::InteriorElement(_) => InteriorElement,
}
}
}
Some(new_lp(LpUpvar(id)))
}
- Categorization::Deref(ref cmt_base, _, pk) => {
+ Categorization::Deref(ref cmt_base, pk) => {
opt_loan_path(cmt_base).map(|lp| {
new_lp(LpExtend(lp, cmt.mutbl, LpDeref(pk)))
})
lp: &LoanPath<'tcx>,
the_move: &move_data::Move,
moved_lp: &LoanPath<'tcx>,
- _param_env: &ty::ParamEnv<'tcx>) {
+ _param_env: ty::ParamEnv<'tcx>) {
let (verb, verb_participle) = match use_kind {
MovedInUse => ("use", "used"),
MovedInCapture => ("capture", "captured"),
if let Some(_) = statement_scope_span(self.tcx, super_scope) {
db.note("consider using a `let` binding to increase its lifetime");
}
-
-
-
- match err.cmt.cat {
- mc::Categorization::Rvalue(r, or) if r != or => {
- db.note("\
-before rustc 1.16, this temporary lived longer - see issue #39283 \
-(https://github.com/rust-lang/rust/issues/39283)");
- }
- _ => {}
- }
}
err_borrowed_pointer_too_short(loan_scope, ptr_scope) => {
}
}
- LpExtend(ref lp_base, _, LpInterior(_, InteriorElement(..))) => {
+ LpExtend(ref lp_base, _, LpInterior(_, InteriorElement)) => {
self.append_autoderefd_loan_path_to_string(&lp_base, out);
out.push_str("[..]");
}
match *self {
InteriorField(mc::NamedField(fld)) => write!(f, "{}", fld),
InteriorField(mc::PositionalField(i)) => write!(f, "#{}", i),
- InteriorElement(..) => write!(f, "[]"),
+ InteriorElement => write!(f, "[]"),
}
}
}
LpVar(_) | LpUpvar(_) => {
true
}
- LpExtend(.., LpInterior(_, InteriorKind::InteriorElement(..))) => {
+ LpExtend(.., LpInterior(_, InteriorKind::InteriorElement)) => {
// Paths involving element accesses a[i] do not refer to a unique
// location, as there is no accurate tracking of the indices.
//
use rustc::middle::mem_categorization::{cmt};
use rustc::middle::region::RegionMaps;
use rustc::session::Session;
-use rustc::traits::Reveal;
use rustc::ty::{self, Ty, TyCtxt};
use rustc::lint;
use rustc_errors::{Diagnostic, Level, DiagnosticBuilder};
fn visit_local(&mut self, loc: &'tcx hir::Local) {
intravisit::walk_local(self, loc);
- self.check_irrefutable(&loc.pat, false);
+ self.check_irrefutable(&loc.pat, match loc.source {
+ hir::LocalSource::Normal => "local binding",
+ hir::LocalSource::ForLoopDesugar => "`for` loop binding",
+ });
// Check legality of move bindings and `@` patterns.
self.check_patterns(false, slice::ref_slice(&loc.pat));
intravisit::walk_body(self, body);
for arg in &body.arguments {
- self.check_irrefutable(&arg.pat, true);
+ self.check_irrefutable(&arg.pat, "function argument");
self.check_patterns(false, slice::ref_slice(&arg.pat));
}
}
.map(|pat| vec![pat.0])
.collect();
let scrut_ty = self.tables.node_id_to_type(scrut.id);
- check_exhaustive(cx, scrut_ty, scrut.span, &matrix, source);
+ check_exhaustive(cx, scrut_ty, scrut.span, &matrix);
})
}
}
}
- fn check_irrefutable(&self, pat: &Pat, is_fn_arg: bool) {
- let origin = if is_fn_arg {
- "function argument"
- } else {
- "local binding"
- };
-
+ fn check_irrefutable(&self, pat: &Pat, origin: &str) {
let module = self.tcx.hir.get_module_parent(pat.id);
MatchCheckCtxt::create_and_enter(self.tcx, module, |ref mut cx| {
let mut patcx = PatternContext::new(self.tcx, self.tables);
fn check_exhaustive<'a, 'tcx>(cx: &mut MatchCheckCtxt<'a, 'tcx>,
scrut_ty: Ty<'tcx>,
sp: Span,
- matrix: &Matrix<'a, 'tcx>,
- source: hir::MatchSource) {
+ matrix: &Matrix<'a, 'tcx>) {
let wild_pattern = Pattern {
ty: scrut_ty,
span: DUMMY_SP,
} else {
pats.iter().map(|w| w.single_pattern()).collect()
};
- match source {
- hir::MatchSource::ForLoopDesugar => {
- // `witnesses[0]` has the form `Some(<head>)`, peel off the `Some`
- let witness = match *witnesses[0].kind {
- PatternKind::Variant { ref subpatterns, .. } => match &subpatterns[..] {
- &[ref pat] => &pat.pattern,
- _ => bug!(),
- },
- _ => bug!(),
- };
- let pattern_string = witness.to_string();
- struct_span_err!(cx.tcx.sess, sp, E0297,
- "refutable pattern in `for` loop binding: \
- `{}` not covered",
- pattern_string)
- .span_label(sp, format!("pattern `{}` not covered", pattern_string))
- .emit();
+
+ const LIMIT: usize = 3;
+ let joined_patterns = match witnesses.len() {
+ 0 => bug!(),
+ 1 => format!("`{}`", witnesses[0]),
+ 2...LIMIT => {
+ let (tail, head) = witnesses.split_last().unwrap();
+ let head: Vec<_> = head.iter().map(|w| w.to_string()).collect();
+ format!("`{}` and `{}`", head.join("`, `"), tail)
},
_ => {
- const LIMIT: usize = 3;
- let joined_patterns = match witnesses.len() {
- 0 => bug!(),
- 1 => format!("`{}`", witnesses[0]),
- 2...LIMIT => {
- let (tail, head) = witnesses.split_last().unwrap();
- let head: Vec<_> = head.iter().map(|w| w.to_string()).collect();
- format!("`{}` and `{}`", head.join("`, `"), tail)
- },
- _ => {
- let (head, tail) = witnesses.split_at(LIMIT);
- let head: Vec<_> = head.iter().map(|w| w.to_string()).collect();
- format!("`{}` and {} more", head.join("`, `"), tail.len())
- }
- };
-
- let label_text = match witnesses.len() {
- 1 => format!("pattern {} not covered", joined_patterns),
- _ => format!("patterns {} not covered", joined_patterns)
- };
- create_e0004(cx.tcx.sess, sp,
- format!("non-exhaustive patterns: {} not covered",
- joined_patterns))
- .span_label(sp, label_text)
- .emit();
- },
- }
+ let (head, tail) = witnesses.split_at(LIMIT);
+ let head: Vec<_> = head.iter().map(|w| w.to_string()).collect();
+ format!("`{}` and {} more", head.join("`, `"), tail.len())
+ }
+ };
+
+ let label_text = match witnesses.len() {
+ 1 => format!("pattern {} not covered", joined_patterns),
+ _ => format!("patterns {} not covered", joined_patterns)
+ };
+ create_e0004(cx.tcx.sess, sp,
+ format!("non-exhaustive patterns: {} not covered",
+ joined_patterns))
+ .span_label(sp, label_text)
+ .emit();
}
NotUseful => {
// This is good, wildcard pattern isn't reachable
///
/// FIXME: this should be done by borrowck.
fn check_for_mutation_in_guard(cx: &MatchVisitor, guard: &hir::Expr) {
- cx.tcx.infer_ctxt((cx.tables, cx.param_env), Reveal::UserFacing).enter(|infcx| {
+ cx.tcx.infer_ctxt(cx.tables).enter(|infcx| {
let mut checker = MutationChecker {
cx: cx,
};
- ExprUseVisitor::new(&mut checker, cx.region_maps, &infcx).walk_expr(guard);
+ ExprUseVisitor::new(&mut checker, cx.region_maps, &infcx, cx.param_env).walk_expr(guard);
});
}
E0297: r##"
+#### Note: this error code is no longer emitted by the compiler.
+
Patterns used to bind names must be irrefutable. That is, they must guarantee
that a name will be extracted in all cases. Instead of pattern matching the
loop variable, consider using a `match` or `if let` inside the loop body. For
instance:
-```compile_fail,E0297
+```compile_fail,E0005
let xs : Vec<Option<i32>> = vec![Some(1), None];
// This fails because `None` is not covered.
}
}
hir::ExprPath(ref qpath) => {
- let substs = cx.tables.node_id_item_substs(e.id)
- .unwrap_or_else(|| tcx.intern_substs(&[]));
+ let substs = cx.tables.node_substs(e.id);
// Avoid applying substitutions if they're empty, that'd ICE.
let substs = if cx.substs.is_empty() {
debug!("resolve_trait_associated_const: trait_ref={:?}",
trait_ref);
- tcx.infer_ctxt((), Reveal::UserFacing).enter(|infcx| {
+ tcx.infer_ctxt(()).enter(|infcx| {
+ let param_env = ty::ParamEnv::empty(Reveal::UserFacing);
let mut selcx = traits::SelectionContext::new(&infcx);
let obligation = traits::Obligation::new(traits::ObligationCause::dummy(),
+ param_env,
trait_ref.to_poly_trait_predicate());
let selection = match selcx.select(&obligation) {
Ok(Some(vtable)) => vtable,
let kind = match def {
Def::Const(def_id) | Def::AssociatedConst(def_id) => {
let tcx = self.tcx.global_tcx();
- let substs = self.tables.node_id_item_substs(id)
- .unwrap_or_else(|| tcx.intern_substs(&[]));
+ let substs = self.tables.node_substs(id);
match eval::lookup_const_by_id(tcx, def_id, substs) {
Some((def_id, _substs)) => {
// Enter the inlined constant's tables temporarily.
struct Env<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> {
infcx: &'a infer::InferCtxt<'a, 'gcx, 'tcx>,
region_maps: &'a mut RegionMaps,
+ param_env: ty::ParamEnv<'tcx>,
}
struct RH<'a> {
index,
"test_crate",
|tcx| {
- tcx.infer_ctxt((), Reveal::UserFacing).enter(|infcx| {
+ tcx.infer_ctxt(()).enter(|infcx| {
let mut region_maps = RegionMaps::new();
- body(Env { infcx: &infcx, region_maps: &mut region_maps });
+ body(Env {
+ infcx: &infcx,
+ region_maps: &mut region_maps,
+ param_env: ty::ParamEnv::empty(Reveal::UserFacing),
+ });
let free_regions = FreeRegionMap::new();
let def_id = tcx.hir.local_def_id(ast::CRATE_NODE_ID);
infcx.resolve_regions_and_report_errors(def_id, ®ion_maps, &free_regions);
}
pub fn make_subtype(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> bool {
- match self.infcx.sub_types(true, &ObligationCause::dummy(), a, b) {
+ match self.infcx.at(&ObligationCause::dummy(), self.param_env).sub(a, b) {
Ok(_) => true,
Err(ref e) => panic!("Encountered error: {}", e),
}
}
pub fn is_subtype(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> bool {
- self.infcx.can_sub_types(a, b).is_ok()
+ self.infcx.can_sub(self.param_env, a, b).is_ok()
}
pub fn assert_subtype(&self, a: Ty<'tcx>, b: Ty<'tcx>) {
self.tcx().types.isize)
}
- pub fn dummy_type_trace(&self) -> infer::TypeTrace<'tcx> {
- infer::TypeTrace::dummy(self.tcx())
- }
-
- pub fn sub(&self, t1: Ty<'tcx>, t2: Ty<'tcx>) -> InferResult<'tcx, Ty<'tcx>> {
- let trace = self.dummy_type_trace();
- self.infcx.sub(true, trace, &t1, &t2)
+ pub fn sub(&self, t1: Ty<'tcx>, t2: Ty<'tcx>) -> InferResult<'tcx, ()> {
+ self.infcx.at(&ObligationCause::dummy(), self.param_env).sub(t1, t2)
}
pub fn lub(&self, t1: Ty<'tcx>, t2: Ty<'tcx>) -> InferResult<'tcx, Ty<'tcx>> {
- let trace = self.dummy_type_trace();
- self.infcx.lub(true, trace, &t1, &t2)
+ self.infcx.at(&ObligationCause::dummy(), self.param_env).lub(t1, t2)
}
pub fn glb(&self, t1: Ty<'tcx>, t2: Ty<'tcx>) -> InferResult<'tcx, Ty<'tcx>> {
- let trace = self.dummy_type_trace();
- self.infcx.glb(true, trace, &t1, &t2)
+ self.infcx.at(&ObligationCause::dummy(), self.param_env).glb(t1, t2)
}
/// Checks that `t1 <: t2` is true (this may register additional
/// region checks).
pub fn check_sub(&self, t1: Ty<'tcx>, t2: Ty<'tcx>) {
match self.sub(t1, t2) {
- Ok(InferOk { obligations, .. }) => {
+ Ok(InferOk { obligations, value: () }) => {
// None of these tests should require nested obligations:
assert!(obligations.is_empty());
}
self
}
+ pub fn note_trait_signature(&mut self, name: String, signature: String) -> &mut Self {
+ self.highlighted_note(vec![
+ (format!("`{}` from trait: `", name), Style::NoStyle),
+ (signature, Style::Highlight),
+ ("`".to_string(), Style::NoStyle)]);
+ self
+ }
+
pub fn note(&mut self, msg: &str) -> &mut Self {
self.sub(Level::Note, msg, MultiSpan::new(), None);
self
if *sp == DUMMY_SP {
continue;
}
- if cm.span_to_filename(sp.clone()).contains("macros>") {
- let v = sp.macro_backtrace();
- if let Some(use_site) = v.last() {
- before_after.push((sp.clone(), use_site.call_site.clone()));
- }
+ let call_sp = cm.call_span_if_macro(*sp);
+ if call_sp != *sp {
+ before_after.push((sp.clone(), call_sp));
}
for trace in sp.macro_backtrace().iter().rev() {
// Only show macro locations that are local
fn span_to_string(&self, sp: Span) -> String;
fn span_to_filename(&self, sp: Span) -> FileName;
fn merge_spans(&self, sp_lhs: Span, sp_rhs: Span) -> Option<Span>;
+ fn call_span_if_macro(&self, sp: Span) -> Span;
}
impl CodeSuggestion {
use rustc::dep_graph::DepNode;
use rustc::hir;
use rustc::hir::def_id::{CRATE_DEF_INDEX, DefId};
+use rustc::hir::map::DefPathHash;
use rustc::hir::itemlikevisit::ItemLikeVisitor;
use rustc::ich::{Fingerprint, StableHashingContext};
use rustc::ty::TyCtxt;
{
let tcx = self.hcx.tcx();
- let mut impls: Vec<(Fingerprint, Fingerprint)> = krate
+ let mut impls: Vec<(DefPathHash, Fingerprint)> = krate
.trait_impls
.iter()
.map(|(&trait_id, impls)| {
use rustc::dep_graph::{DepNode, WorkProduct, WorkProductId};
use rustc::hir::def_id::DefIndex;
+use rustc::hir::map::DefPathHash;
use rustc::ich::Fingerprint;
use rustc::middle::cstore::EncodedMetadataHash;
use std::sync::Arc;
use rustc_data_structures::fx::FxHashMap;
-
-use super::directory::DefPathIndex;
+use rustc_data_structures::indexed_vec::{IndexVec, Idx};
/// Data for use when recompiling the **current crate**.
#[derive(Debug, RustcEncodable, RustcDecodable)]
pub struct SerializedDepGraph {
- pub edges: Vec<SerializedEdgeSet>,
+ /// The set of all DepNodes in the graph
+ pub nodes: IndexVec<DepNodeIndex, DepNode<DefPathHash>>,
+ /// For each DepNode, stores the list of edges originating from that
+ /// DepNode. Encoded as a [start, end) pair indexing into edge_list_data,
+ /// which holds the actual DepNodeIndices of the target nodes.
+ pub edge_list_indices: Vec<(u32, u32)>,
+ /// A flattened list of all edge targets in the graph. Edge sources are
+ /// implicit in edge_list_indices.
+ pub edge_list_data: Vec<DepNodeIndex>,
/// These are output nodes that have no incoming edges. We track
/// these separately so that when we reload all edges, we don't
/// lose track of these nodes.
- pub bootstrap_outputs: Vec<DepNode<DefPathIndex>>,
+ pub bootstrap_outputs: Vec<DepNode<DefPathHash>>,
/// These are hashes of two things:
/// - the HIR nodes in this crate
pub hashes: Vec<SerializedHash>,
}
-/// Represents a set of "reduced" dependency edge. We group the
-/// outgoing edges from a single source together.
-#[derive(Debug, RustcEncodable, RustcDecodable)]
-pub struct SerializedEdgeSet {
- pub source: DepNode<DefPathIndex>,
- pub targets: Vec<DepNode<DefPathIndex>>
+/// The index of a DepNode in the SerializedDepGraph::nodes array.
+#[derive(Copy, Clone, Hash, Eq, PartialEq, Ord, PartialOrd, Debug,
+ RustcEncodable, RustcDecodable)]
+pub struct DepNodeIndex(pub u32);
+
+impl DepNodeIndex {
+ #[inline]
+ pub fn new(idx: usize) -> DepNodeIndex {
+ assert!(idx <= ::std::u32::MAX as usize);
+ DepNodeIndex(idx as u32)
+ }
+}
+
+impl Idx for DepNodeIndex {
+ #[inline]
+ fn new(idx: usize) -> Self {
+ assert!(idx <= ::std::u32::MAX as usize);
+ DepNodeIndex(idx as u32)
+ }
+
+ #[inline]
+ fn index(self) -> usize {
+ self.0 as usize
+ }
}
#[derive(Debug, RustcEncodable, RustcDecodable)]
pub struct SerializedHash {
/// def-id of thing being hashed
- pub dep_node: DepNode<DefPathIndex>,
+ pub dep_node: DepNode<DefPathHash>,
/// the hash as of previous compilation, computed by code in
/// `hash` module
/// is only populated if -Z query-dep-graph is specified. It will be
/// empty otherwise. Importing crates are perfectly happy with just having
/// the DefIndex.
- pub index_map: FxHashMap<DefIndex, DefPathIndex>
+ pub index_map: FxHashMap<DefIndex, DefPathHash>
}
+++ /dev/null
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Code to convert a DefId into a DefPath (when serializing) and then
-//! back again (when deserializing). Note that the new DefId
-//! necessarily will not be the same as the old (and of course the
-//! item might even be removed in the meantime).
-
-use rustc::dep_graph::DepNode;
-use rustc::hir::map::DefPath;
-use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
-use rustc::ty::TyCtxt;
-use rustc::util::nodemap::DefIdMap;
-use std::fmt::{self, Debug};
-use std::iter::once;
-use std::collections::HashMap;
-
-/// Index into the DefIdDirectory
-#[derive(Copy, Clone, Debug, PartialOrd, Ord, Hash, PartialEq, Eq,
- RustcEncodable, RustcDecodable)]
-pub struct DefPathIndex {
- index: u32
-}
-
-#[derive(RustcEncodable, RustcDecodable)]
-pub struct DefIdDirectory {
- // N.B. don't use Removable here because these def-ids are loaded
- // directly without remapping, so loading them should not fail.
- paths: Vec<DefPath>,
-
- // For each crate, saves the crate-name/disambiguator so that
- // later we can match crate-numbers up again.
- krates: Vec<CrateInfo>,
-}
-
-#[derive(Debug, RustcEncodable, RustcDecodable)]
-pub struct CrateInfo {
- krate: CrateNum,
- name: String,
- disambiguator: String,
-}
-
-impl DefIdDirectory {
- pub fn new(krates: Vec<CrateInfo>) -> DefIdDirectory {
- DefIdDirectory { paths: vec![], krates: krates }
- }
-
- fn max_current_crate(&self, tcx: TyCtxt) -> CrateNum {
- tcx.sess.cstore.crates()
- .into_iter()
- .max()
- .unwrap_or(LOCAL_CRATE)
- }
-
- /// Returns a string form for `index`; useful for debugging
- pub fn def_path_string(&self, tcx: TyCtxt, index: DefPathIndex) -> String {
- let path = &self.paths[index.index as usize];
- if self.krate_still_valid(tcx, self.max_current_crate(tcx), path.krate) {
- path.to_string(tcx)
- } else {
- format!("<crate {} changed>", path.krate)
- }
- }
-
- pub fn krate_still_valid(&self,
- tcx: TyCtxt,
- max_current_crate: CrateNum,
- krate: CrateNum) -> bool {
- // Check that the crate-number still matches. For now, if it
- // doesn't, just return None. We could do better, such as
- // finding the new number.
-
- if krate > max_current_crate {
- false
- } else {
- let old_info = &self.krates[krate.as_usize()];
- assert_eq!(old_info.krate, krate);
- let old_name: &str = &old_info.name;
- let old_disambiguator: &str = &old_info.disambiguator;
- let new_name: &str = &tcx.crate_name(krate).as_str();
- let new_disambiguator: &str = &tcx.crate_disambiguator(krate).as_str();
- old_name == new_name && old_disambiguator == new_disambiguator
- }
- }
-
- pub fn retrace(&self, tcx: TyCtxt) -> RetracedDefIdDirectory {
-
- fn make_key(name: &str, disambiguator: &str) -> String {
- format!("{}/{}", name, disambiguator)
- }
-
- let new_krates: HashMap<_, _> =
- once(LOCAL_CRATE)
- .chain(tcx.sess.cstore.crates())
- .map(|krate| (make_key(&tcx.crate_name(krate).as_str(),
- &tcx.crate_disambiguator(krate).as_str()), krate))
- .collect();
-
- let ids = self.paths.iter()
- .map(|path| {
- let old_krate_id = path.krate.as_usize();
- assert!(old_krate_id < self.krates.len());
- let old_crate_info = &self.krates[old_krate_id];
- let old_crate_key = make_key(&old_crate_info.name,
- &old_crate_info.disambiguator);
- if let Some(&new_crate_key) = new_krates.get(&old_crate_key) {
- tcx.retrace_path(new_crate_key, &path.data)
- } else {
- debug!("crate {:?} no longer exists", old_crate_key);
- None
- }
- })
- .collect();
- RetracedDefIdDirectory { ids: ids }
- }
-}
-
-#[derive(Debug, RustcEncodable, RustcDecodable)]
-pub struct RetracedDefIdDirectory {
- ids: Vec<Option<DefId>>
-}
-
-impl RetracedDefIdDirectory {
- pub fn def_id(&self, index: DefPathIndex) -> Option<DefId> {
- self.ids[index.index as usize]
- }
-
- pub fn map(&self, node: &DepNode<DefPathIndex>) -> Option<DepNode<DefId>> {
- node.map_def(|&index| self.def_id(index))
- }
-}
-
-pub struct DefIdDirectoryBuilder<'a,'tcx:'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- hash: DefIdMap<DefPathIndex>,
- directory: DefIdDirectory,
-}
-
-impl<'a,'tcx> DefIdDirectoryBuilder<'a,'tcx> {
- pub fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> DefIdDirectoryBuilder<'a, 'tcx> {
- let mut krates: Vec<_> =
- once(LOCAL_CRATE)
- .chain(tcx.sess.cstore.crates())
- .map(|krate| {
- CrateInfo {
- krate: krate,
- name: tcx.crate_name(krate).to_string(),
- disambiguator: tcx.crate_disambiguator(krate).to_string()
- }
- })
- .collect();
-
- // the result of crates() is not in order, so sort list of
- // crates so that we can just index it later
- krates.sort_by_key(|k| k.krate);
-
- DefIdDirectoryBuilder {
- tcx: tcx,
- hash: DefIdMap(),
- directory: DefIdDirectory::new(krates),
- }
- }
-
- pub fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx> {
- self.tcx
- }
-
- pub fn add(&mut self, def_id: DefId) -> DefPathIndex {
- debug!("DefIdDirectoryBuilder: def_id={:?}", def_id);
- let tcx = self.tcx;
- let paths = &mut self.directory.paths;
- self.hash.entry(def_id)
- .or_insert_with(|| {
- let def_path = tcx.def_path(def_id);
- let index = paths.len() as u32;
- paths.push(def_path);
- DefPathIndex { index: index }
- })
- .clone()
- }
-
- pub fn map(&mut self, node: &DepNode<DefId>) -> DepNode<DefPathIndex> {
- node.map_def(|&def_id| Some(self.add(def_id))).unwrap()
- }
-
- pub fn directory(&self) -> &DefIdDirectory {
- &self.directory
- }
-}
-
-impl Debug for DefIdDirectory {
- fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {
- fmt.debug_list()
- .entries(self.paths.iter().enumerate())
- .finish()
- }
-}
//! previous revision to compare things to.
//!
-use super::directory::RetracedDefIdDirectory;
use super::load::DirtyNodes;
use rustc::dep_graph::{DepGraphQuery, DepNode};
use rustc::hir;
const CFG: &'static str = "cfg";
pub fn check_dirty_clean_annotations<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- dirty_inputs: &DirtyNodes,
- retraced: &RetracedDefIdDirectory) {
+ dirty_inputs: &DirtyNodes) {
// can't add `#[rustc_dirty]` etc without opting in to this feature
if !tcx.sess.features.borrow().rustc_attrs {
return;
}
let _ignore = tcx.dep_graph.in_ignore();
+ let def_path_hash_to_def_id = tcx.def_path_hash_to_def_id.as_ref().unwrap();
let dirty_inputs: FxHashSet<DepNode<DefId>> =
dirty_inputs.keys()
- .filter_map(|d| retraced.map(d))
+ .filter_map(|dep_node| {
+ dep_node.map_def(|def_path_hash| {
+ def_path_hash_to_def_id.get(def_path_hash).cloned()
+ })
+ })
.collect();
+
let query = tcx.dep_graph.query();
debug!("query-nodes: {:?}", query.nodes());
let krate = tcx.hir.krate();
use rustc::dep_graph::{DepNode, WorkProductId};
use rustc::hir::def_id::DefId;
+use rustc::hir::map::DefPathHash;
use rustc::hir::svh::Svh;
use rustc::ich::Fingerprint;
use rustc::session::Session;
use rustc_data_structures::fx::{FxHashSet, FxHashMap};
use rustc_serialize::Decodable as RustcDecodable;
use rustc_serialize::opaque::Decoder;
+use std::default::Default;
use std::path::{Path};
use std::sync::Arc;
use IncrementalHashesMap;
use super::data::*;
-use super::directory::*;
use super::dirty_clean;
use super::hash::*;
use super::fs::*;
// The key is a dirty node. The value is **some** base-input that we
// can blame it on.
-pub type DirtyNodes = FxHashMap<DepNode<DefPathIndex>, DepNode<DefPathIndex>>;
+pub type DirtyNodes = FxHashMap<DepNode<DefPathHash>, DepNode<DefPathHash>>;
/// If we are in incremental mode, and a previous dep-graph exists,
/// then load up those nodes/edges that are still valid into the
None
}
+/// Try to convert a DepNode from the old dep-graph into a DepNode in the
+/// current graph by mapping the DefPathHash to a valid DefId. This will fail
+/// if the DefPathHash refers to something that has been removed (because
+/// there is no DefId for that thing anymore).
+fn retrace(tcx: TyCtxt, dep_node: &DepNode<DefPathHash>) -> Option<DepNode<DefId>> {
+ dep_node.map_def(|def_path_hash| {
+ tcx.def_path_hash_to_def_id.as_ref().unwrap().get(def_path_hash).cloned()
+ })
+}
+
/// Decode the dep graph and load the edges/nodes that are still clean
/// into `tcx.dep_graph`.
pub fn decode_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
return Ok(());
}
- let directory = DefIdDirectory::decode(&mut dep_graph_decoder)?;
let serialized_dep_graph = SerializedDepGraph::decode(&mut dep_graph_decoder)?;
- let edge_map: FxHashMap<_, _> = serialized_dep_graph.edges
- .into_iter()
- .map(|s| (s.source, s.targets))
- .collect();
+ let edge_map: FxHashMap<DepNode<DefPathHash>, Vec<DepNode<DefPathHash>>> = {
+ let capacity = serialized_dep_graph.edge_list_data.len();
+ let mut edge_map = FxHashMap::with_capacity_and_hasher(capacity, Default::default());
+
+ for (node_index, source) in serialized_dep_graph.nodes.iter().enumerate() {
+ let (start, end) = serialized_dep_graph.edge_list_indices[node_index];
+ let targets =
+ (&serialized_dep_graph.edge_list_data[start as usize .. end as usize])
+ .into_iter()
+ .map(|&node_index| serialized_dep_graph.nodes[node_index].clone())
+ .collect();
+
+ edge_map.insert(source.clone(), targets);
+ }
- // Retrace the paths in the directory to find their current location (if any).
- let retraced = directory.retrace(tcx);
+ edge_map
+ };
// Compute the set of nodes from the old graph where some input
// has changed or been removed. These are "raw" source nodes,
// the current compilation).
let dirty_raw_nodes = initial_dirty_nodes(tcx,
incremental_hashes_map,
- &serialized_dep_graph.hashes,
- &retraced);
+ &serialized_dep_graph.hashes);
let dirty_raw_nodes = transitive_dirty_nodes(&edge_map, dirty_raw_nodes);
// Recreate the edges in the graph that are still clean.
let mut extra_edges = vec![];
for (source, targets) in &edge_map {
for target in targets {
- process_edges(tcx, source, target, &edge_map, &directory, &retraced, &dirty_raw_nodes,
+ process_edges(tcx, source, target, &edge_map, &dirty_raw_nodes,
&mut clean_work_products, &mut dirty_work_products, &mut extra_edges);
}
}
// Recreate bootstrap outputs, which are outputs that have no incoming edges (and hence cannot
// be dirty).
for bootstrap_output in &serialized_dep_graph.bootstrap_outputs {
- if let Some(n) = retraced.map(bootstrap_output) {
+ if let Some(n) = retrace(tcx, bootstrap_output) {
if let DepNode::WorkProduct(ref wp) = n {
clean_work_products.insert(wp.clone());
}
// the edge from `Hir(X)` to `Bar` (or, if `Bar` itself cannot be
// recreated, to the targets of `Bar`).
while let Some((source, target)) = extra_edges.pop() {
- process_edges(tcx, source, target, &edge_map, &directory, &retraced, &dirty_raw_nodes,
+ process_edges(tcx, source, target, &edge_map, &dirty_raw_nodes,
&mut clean_work_products, &mut dirty_work_products, &mut extra_edges);
}
// dirty.
reconcile_work_products(tcx, work_products, &clean_work_products);
- dirty_clean::check_dirty_clean_annotations(tcx, &dirty_raw_nodes, &retraced);
+ dirty_clean::check_dirty_clean_annotations(tcx, &dirty_raw_nodes);
load_prev_metadata_hashes(tcx,
- &retraced,
&mut *incremental_hashes_map.prev_metadata_hashes.borrow_mut());
Ok(())
}
/// a bit vector where the index is the DefPathIndex.
fn initial_dirty_nodes<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
incremental_hashes_map: &IncrementalHashesMap,
- serialized_hashes: &[SerializedHash],
- retraced: &RetracedDefIdDirectory)
+ serialized_hashes: &[SerializedHash])
-> DirtyNodes {
let mut hcx = HashContext::new(tcx, incremental_hashes_map);
let mut dirty_nodes = FxHashMap();
};
for hash in serialized_hashes {
- if let Some(dep_node) = retraced.map(&hash.dep_node) {
+ if let Some(dep_node) = retrace(tcx, &hash.dep_node) {
if let Some(current_hash) = hcx.hash(&dep_node) {
if current_hash == hash.hash {
debug!("initial_dirty_nodes: {:?} is clean (hash={:?})",
dirty_nodes
}
-fn transitive_dirty_nodes(edge_map: &FxHashMap<DepNode<DefPathIndex>, Vec<DepNode<DefPathIndex>>>,
+fn transitive_dirty_nodes(edge_map: &FxHashMap<DepNode<DefPathHash>, Vec<DepNode<DefPathHash>>>,
mut dirty_nodes: DirtyNodes)
-> DirtyNodes
{
- let mut stack: Vec<(DepNode<DefPathIndex>, DepNode<DefPathIndex>)> = vec![];
+ let mut stack: Vec<(DepNode<DefPathHash>, DepNode<DefPathHash>)> = vec![];
stack.extend(dirty_nodes.iter().map(|(s, b)| (s.clone(), b.clone())));
while let Some((source, blame)) = stack.pop() {
// we know the source is dirty (because of the node `blame`)...
}
fn load_prev_metadata_hashes(tcx: TyCtxt,
- retraced: &RetracedDefIdDirectory,
output: &mut FxHashMap<DefId, Fingerprint>) {
if !tcx.sess.opts.debugging_opts.query_dep_graph {
return
debug!("load_prev_metadata_hashes() - Mapping DefIds");
assert_eq!(serialized_hashes.index_map.len(), serialized_hashes.entry_hashes.len());
+ let def_path_hash_to_def_id = tcx.def_path_hash_to_def_id.as_ref().unwrap();
+
for serialized_hash in serialized_hashes.entry_hashes {
- let def_path_index = serialized_hashes.index_map[&serialized_hash.def_index];
- if let Some(def_id) = retraced.def_id(def_path_index) {
+ let def_path_hash = serialized_hashes.index_map[&serialized_hash.def_index];
+ if let Some(&def_id) = def_path_hash_to_def_id.get(&def_path_hash) {
let old = output.insert(def_id, serialized_hash.hash);
assert!(old.is_none(), "already have hash for {:?}", def_id);
}
fn process_edges<'a, 'tcx, 'edges>(
tcx: TyCtxt<'a, 'tcx, 'tcx>,
- source: &'edges DepNode<DefPathIndex>,
- target: &'edges DepNode<DefPathIndex>,
- edges: &'edges FxHashMap<DepNode<DefPathIndex>, Vec<DepNode<DefPathIndex>>>,
- directory: &DefIdDirectory,
- retraced: &RetracedDefIdDirectory,
+ source: &'edges DepNode<DefPathHash>,
+ target: &'edges DepNode<DefPathHash>,
+ edges: &'edges FxHashMap<DepNode<DefPathHash>, Vec<DepNode<DefPathHash>>>,
dirty_raw_nodes: &DirtyNodes,
clean_work_products: &mut FxHashSet<Arc<WorkProductId>>,
dirty_work_products: &mut FxHashSet<Arc<WorkProductId>>,
- extra_edges: &mut Vec<(&'edges DepNode<DefPathIndex>, &'edges DepNode<DefPathIndex>)>)
+ extra_edges: &mut Vec<(&'edges DepNode<DefPathHash>, &'edges DepNode<DefPathHash>)>)
{
// If the target is dirty, skip the edge. If this is an edge
// that targets a work-product, we can print the blame
if let DepNode::WorkProduct(ref wp) = *target {
if tcx.sess.opts.debugging_opts.incremental_info {
if dirty_work_products.insert(wp.clone()) {
- // It'd be nice to pretty-print these paths better than just
- // using the `Debug` impls, but wev.
+ // Try to reconstruct the human-readable version of the
+ // DepNode. This cannot be done for things that where
+ // removed.
+ let readable_blame = if let Some(dep_node) = retrace(tcx, blame) {
+ dep_node.map_def(|&def_id| Some(tcx.def_path(def_id).to_string(tcx)))
+ .unwrap()
+ } else {
+ blame.map_def(|def_path_hash| Some(format!("{:?}", def_path_hash)))
+ .unwrap()
+ };
+
println!("incremental: module {:?} is dirty because {:?} \
changed or was removed",
wp,
- blame.map_def(|&index| {
- Some(directory.def_path_string(tcx, index))
- }).unwrap());
+ readable_blame);
}
}
}
// Retrace the source -> target edges to def-ids and then create
// an edge in the graph. Retracing may yield none if some of the
// data happens to have been removed.
- if let Some(source_node) = retraced.map(source) {
- if let Some(target_node) = retraced.map(target) {
+ if let Some(source_node) = retrace(tcx, source) {
+ if let Some(target_node) = retrace(tcx, target) {
let _task = tcx.dep_graph.in_task(target_node);
tcx.dep_graph.read(source_node);
if let DepNode::WorkProduct(ref wp) = *target {
//! various HIR nodes.
mod data;
-mod directory;
mod dirty_clean;
mod fs;
mod hash;
use rustc::dep_graph::DepNode;
use rustc::hir::def_id::DefId;
use rustc::hir::svh::Svh;
+use rustc::hir::map::DefPathHash;
use rustc::ich::Fingerprint;
use rustc::middle::cstore::EncodedMetadataHashes;
use rustc::session::Session;
use rustc::ty::TyCtxt;
use rustc_data_structures::fx::FxHashMap;
+use rustc_data_structures::graph;
+use rustc_data_structures::indexed_vec::IndexVec;
use rustc_serialize::Encodable as RustcEncodable;
use rustc_serialize::opaque::Encoder;
use std::io::{self, Cursor, Write};
use IncrementalHashesMap;
use super::data::*;
-use super::directory::*;
use super::hash::*;
use super::preds::*;
use super::fs::*;
return;
}
- let mut builder = DefIdDirectoryBuilder::new(tcx);
let query = tcx.dep_graph.query();
if tcx.sess.opts.debugging_opts.incremental_info {
|e| encode_metadata_hashes(tcx,
svh,
metadata_hashes,
- &mut builder,
&mut current_metadata_hashes,
e));
}
save_in(sess,
dep_graph_path(sess),
- |e| encode_dep_graph(&preds, &mut builder, e));
+ |e| encode_dep_graph(tcx, &preds, e));
let prev_metadata_hashes = incremental_hashes_map.prev_metadata_hashes.borrow();
dirty_clean::check_dirty_clean_metadata(tcx,
}
}
-pub fn encode_dep_graph(preds: &Predecessors,
- builder: &mut DefIdDirectoryBuilder,
+pub fn encode_dep_graph(tcx: TyCtxt,
+ preds: &Predecessors,
encoder: &mut Encoder)
-> io::Result<()> {
// First encode the commandline arguments hash
- let tcx = builder.tcx();
tcx.sess.opts.dep_tracking_hash().encode(encoder)?;
- // Create a flat list of (Input, WorkProduct) edges for
- // serialization.
- let mut edges = FxHashMap();
- for edge in preds.reduced_graph.all_edges() {
- let source = *preds.reduced_graph.node_data(edge.source());
- let target = *preds.reduced_graph.node_data(edge.target());
- match *target {
- DepNode::MetaData(ref def_id) => {
- // Metadata *targets* are always local metadata nodes. We have
- // already handled those in `encode_metadata_hashes`.
- assert!(def_id.is_local());
- continue;
- }
- _ => (),
- }
- debug!("serialize edge: {:?} -> {:?}", source, target);
- let source = builder.map(source);
- let target = builder.map(target);
- edges.entry(source).or_insert(vec![]).push(target);
- }
+ let to_hash_based_node = |dep_node: &DepNode<DefId>| {
+ dep_node.map_def(|&def_id| Some(tcx.def_path_hash(def_id))).unwrap()
+ };
- if tcx.sess.opts.debugging_opts.incremental_dump_hash {
- for (dep_node, hash) in &preds.hashes {
- println!("HIR hash for {:?} is {}", dep_node, hash);
+ // NB: We rely on this Vec being indexable by reduced_graph's NodeIndex.
+ let nodes: IndexVec<DepNodeIndex, DepNode<DefPathHash>> = preds
+ .reduced_graph
+ .all_nodes()
+ .iter()
+ .map(|node| to_hash_based_node(node.data))
+ .collect();
+
+ let mut edge_list_indices = Vec::with_capacity(nodes.len());
+ let mut edge_list_data = Vec::with_capacity(preds.reduced_graph.len_edges());
+
+ for node_index in 0 .. nodes.len() {
+ let start = edge_list_data.len() as u32;
+
+ for target in preds.reduced_graph.successor_nodes(graph::NodeIndex(node_index)) {
+ edge_list_data.push(DepNodeIndex::new(target.node_id()));
}
+
+ let end = edge_list_data.len() as u32;
+ debug_assert_eq!(node_index, edge_list_indices.len());
+ edge_list_indices.push((start, end));
}
- // Create the serialized dep-graph.
- let bootstrap_outputs = preds.bootstrap_outputs.iter()
- .map(|n| builder.map(n))
- .collect();
- let edges = edges.into_iter()
- .map(|(k, v)| SerializedEdgeSet { source: k, targets: v })
- .collect();
+ // Let's make we had no overflow there.
+ assert!(edge_list_data.len() <= ::std::u32::MAX as usize);
+ // Check that we have a consistent number of edges.
+ assert_eq!(edge_list_data.len(), preds.reduced_graph.len_edges());
+
+ let bootstrap_outputs = preds
+ .bootstrap_outputs
+ .iter()
+ .map(|n| to_hash_based_node(n))
+ .collect();
+
+ let hashes = preds
+ .hashes
+ .iter()
+ .map(|(&dep_node, &hash)| {
+ SerializedHash {
+ dep_node: to_hash_based_node(dep_node),
+ hash: hash,
+ }
+ })
+ .collect();
+
let graph = SerializedDepGraph {
+ nodes,
+ edge_list_indices,
+ edge_list_data,
bootstrap_outputs,
- edges,
- hashes: preds.hashes
- .iter()
- .map(|(&dep_node, &hash)| {
- SerializedHash {
- dep_node: builder.map(dep_node),
- hash: hash,
- }
- })
- .collect(),
+ hashes,
};
+ // Encode the graph data.
+ graph.encode(encoder)?;
+
if tcx.sess.opts.debugging_opts.incremental_info {
- println!("incremental: {} nodes in reduced dep-graph", preds.reduced_graph.len_nodes());
- println!("incremental: {} edges in serialized dep-graph", graph.edges.len());
+ println!("incremental: {} nodes in reduced dep-graph", graph.nodes.len());
+ println!("incremental: {} edges in serialized dep-graph", graph.edge_list_data.len());
println!("incremental: {} hashes in serialized dep-graph", graph.hashes.len());
}
- debug!("graph = {:#?}", graph);
-
- // Encode the directory and then the graph data.
- builder.directory().encode(encoder)?;
- graph.encode(encoder)?;
+ if tcx.sess.opts.debugging_opts.incremental_dump_hash {
+ for (dep_node, hash) in &preds.hashes {
+ println!("ICH for {:?} is {}", dep_node, hash);
+ }
+ }
Ok(())
}
pub fn encode_metadata_hashes(tcx: TyCtxt,
svh: Svh,
metadata_hashes: &EncodedMetadataHashes,
- builder: &mut DefIdDirectoryBuilder,
current_metadata_hashes: &mut FxHashMap<DefId, Fingerprint>,
encoder: &mut Encoder)
-> io::Result<()> {
let def_id = DefId::local(serialized_hash.def_index);
// Store entry in the index_map
- let def_path_index = builder.add(def_id);
- serialized_hashes.index_map.insert(def_id.index, def_path_index);
+ let def_path_hash = tcx.def_path_hash(def_id);
+ serialized_hashes.index_map.insert(def_id.index, def_path_hash);
// Record hash in current_metadata_hashes
current_metadata_hashes.insert(def_id, serialized_hash.hash);
if def.has_dtor(cx.tcx) {
return;
}
- let param_env = ty::ParamEnv::empty();
+ let param_env = ty::ParamEnv::empty(Reveal::UserFacing);
if !ty.moves_by_default(cx.tcx, param_env, item.span) {
return;
}
-> bool {
use rustc::ty::adjustment::*;
- // Check for method calls and overloaded operators.
- let opt_m = cx.tables.method_map.get(&ty::MethodCall::expr(id)).cloned();
- if let Some(m) = opt_m {
- if method_call_refers_to_method(cx.tcx, method, m.def_id, m.substs, id) {
- return true;
- }
- }
+ // Ignore non-expressions.
+ let expr = if let hir_map::NodeExpr(e) = cx.tcx.hir.get(id) {
+ e
+ } else {
+ return false;
+ };
// Check for overloaded autoderef method calls.
- let opt_adj = cx.tables.adjustments.get(&id).cloned();
- if let Some(Adjustment { kind: Adjust::DerefRef { autoderefs, .. }, .. }) = opt_adj {
- for i in 0..autoderefs {
- let method_call = ty::MethodCall::autoderef(id, i as u32);
- if let Some(m) = cx.tables.method_map.get(&method_call).cloned() {
- if method_call_refers_to_method(cx.tcx, method, m.def_id, m.substs, id) {
- return true;
- }
+ let mut source = cx.tables.expr_ty(expr);
+ for adjustment in cx.tables.expr_adjustments(expr) {
+ if let Adjust::Deref(Some(deref)) = adjustment.kind {
+ let (def_id, substs) = deref.method_call(cx.tcx, source);
+ if method_call_refers_to_method(cx.tcx, method, def_id, substs, id) {
+ return true;
}
}
+ source = adjustment.target;
+ }
+
+ // Check for method calls and overloaded operators.
+ if cx.tables.is_method_call(expr) {
+ let def_id = cx.tables.type_dependent_defs[&id].def_id();
+ let substs = cx.tables.node_substs(id);
+ if method_call_refers_to_method(cx.tcx, method, def_id, substs, id) {
+ return true;
+ }
}
// Check for calls to methods via explicit paths (e.g. `T::method()`).
- match cx.tcx.hir.get(id) {
- hir_map::NodeExpr(&hir::Expr { node: hir::ExprCall(ref callee, _), .. }) => {
+ match expr.node {
+ hir::ExprCall(ref callee, _) => {
let def = if let hir::ExprPath(ref qpath) = callee.node {
cx.tables.qpath_def(qpath, callee.id)
} else {
};
match def {
Def::Method(def_id) => {
- let substs = cx.tables.node_id_item_substs(callee.id)
- .unwrap_or_else(|| cx.tcx.intern_substs(&[]));
+ let substs = cx.tables.node_substs(callee.id);
method_call_refers_to_method(
cx.tcx, method, def_id, substs, id)
}
let trait_ref = ty::TraitRef::from_method(tcx, trait_def_id, callee_substs);
let trait_ref = ty::Binder(trait_ref);
let span = tcx.hir.span(expr_id);
+ let param_env = tcx.param_env(method.def_id);
let obligation =
traits::Obligation::new(traits::ObligationCause::misc(span, expr_id),
+ param_env,
trait_ref.to_poly_trait_predicate());
- let param_env = tcx.param_env(method.def_id);
- tcx.infer_ctxt(param_env, Reveal::UserFacing).enter(|infcx| {
+ tcx.infer_ctxt(()).enter(|infcx| {
let mut selcx = traits::SelectionContext::new(&infcx);
match selcx.select(&obligation) {
// The method comes from a `T: Trait` bound.
use rustc::ty::subst::Substs;
use rustc::ty::{self, AdtKind, Ty, TyCtxt};
use rustc::ty::layout::{Layout, Primitive};
-use rustc::traits::Reveal;
use middle::const_val::ConstVal;
use rustc_const_eval::ConstContext;
use util::nodemap::FxHashSet;
if let hir::ItemEnum(ref enum_definition, ref gens) = it.node {
if gens.ty_params.is_empty() {
// sizes only make sense for non-generic types
- let t = cx.tcx.type_of(cx.tcx.hir.local_def_id(it.id));
- let layout = cx.tcx.infer_ctxt((), Reveal::All).enter(|infcx| {
- let ty = cx.tcx.erase_regions(&t);
- ty.layout(&infcx).unwrap_or_else(|e| {
- bug!("failed to get layout for `{}`: {}", t, e)
- })
+ let item_def_id = cx.tcx.hir.local_def_id(it.id);
+ let t = cx.tcx.type_of(item_def_id);
+ let param_env = cx.tcx.param_env(item_def_id).reveal_all();
+ let ty = cx.tcx.erase_regions(&t);
+ let layout = ty.layout(cx.tcx, param_env).unwrap_or_else(|e| {
+ bug!("failed to get layout for `{}`: {}", t, e)
});
if let Layout::General { ref variants, ref size, discr, .. } = *layout {
_ => return,
}
- if let Some(adjustment) = cx.tables.adjustments.get(&e.id) {
- if let adjustment::Adjust::DerefRef { autoref, .. } = adjustment.kind {
- match autoref {
- Some(adjustment::AutoBorrow::Ref(_, hir::MutImmutable)) => {
- cx.span_lint(UNUSED_ALLOCATION,
- e.span,
- "unnecessary allocation, use & instead");
- }
- Some(adjustment::AutoBorrow::Ref(_, hir::MutMutable)) => {
- cx.span_lint(UNUSED_ALLOCATION,
- e.span,
- "unnecessary allocation, use &mut instead");
- }
- _ => (),
- }
+ for adj in cx.tables.expr_adjustments(e) {
+ if let adjustment::Adjust::Borrow(adjustment::AutoBorrow::Ref(_, m)) = adj.kind {
+ let msg = match m {
+ hir::MutImmutable => "unnecessary allocation, use & instead",
+ hir::MutMutable => "unnecessary allocation, use &mut instead"
+ };
+ cx.span_lint(UNUSED_ALLOCATION, e.span, msg);
}
}
}
[build-dependencies]
build_helper = { path = "../build_helper" }
-gcc = "0.3.27"
+gcc = "0.3.50"
let mut cmeta = cstore::CrateMetadata {
name: name,
extern_crate: Cell::new(None),
- def_path_table: def_path_table,
+ def_path_table: Rc::new(def_path_table),
exported_symbols: exported_symbols,
trait_impls: trait_impls,
proc_macros: crate_root.macro_derive_registrar.map(|_| {
/// hashmap, which gives the reverse mapping. This allows us to
/// quickly retrace a `DefPath`, which is needed for incremental
/// compilation support.
- pub def_path_table: DefPathTable,
+ pub def_path_table: Rc<DefPathTable>,
pub exported_symbols: Tracked<FxHashSet<DefIndex>>,
ExternCrate, NativeLibrary, MetadataLoader, LinkMeta,
LinkagePreference, LoadedMacro, EncodedMetadata};
use rustc::hir::def;
-use rustc::ich;
use rustc::middle::lang_items;
use rustc::session::Session;
use rustc::ty::{self, TyCtxt};
use rustc::hir::def_id::{CrateNum, DefId, DefIndex, CRATE_DEF_INDEX, LOCAL_CRATE};
use rustc::dep_graph::{DepNode, GlobalMetaDataKind};
-use rustc::hir::map::{DefKey, DefPath, DisambiguatedDefPathData};
+use rustc::hir::map::{DefKey, DefPath, DisambiguatedDefPathData, DefPathHash};
+use rustc::hir::map::definitions::DefPathTable;
use rustc::util::nodemap::{NodeSet, DefIdMap};
use rustc_back::PanicStrategy;
self.get_crate_data(def.krate).def_path(def.index)
}
- fn def_path_hash(&self, def: DefId) -> ich::Fingerprint {
+ fn def_path_hash(&self, def: DefId) -> DefPathHash {
self.get_crate_data(def.krate).def_path_hash(def.index)
}
+ fn def_path_table(&self, cnum: CrateNum) -> Rc<DefPathTable> {
+ self.get_crate_data(cnum).def_path_table.clone()
+ }
+
fn struct_field_names(&self, def: DefId) -> Vec<ast::Name>
{
self.dep_graph.read(DepNode::MetaData(def));
use schema::*;
use rustc::dep_graph::{DepGraph, DepNode, GlobalMetaDataKind};
-use rustc::hir::map::{DefKey, DefPath, DefPathData};
+use rustc::hir::map::{DefKey, DefPath, DefPathData, DefPathHash};
use rustc::hir;
-use rustc::ich;
use rustc::middle::cstore::LinkagePreference;
use rustc::hir::def::{self, Def, CtorKind};
}
#[inline]
- pub fn def_path_hash(&self, index: DefIndex) -> ich::Fingerprint {
+ pub fn def_path_hash(&self, index: DefIndex) -> DefPathHash {
self.def_path_table.def_path_hash(index)
}
fn expr_as_constant(&mut self, expr: Expr<'tcx>) -> Constant<'tcx> {
let this = self;
- let Expr { ty, temp_lifetime: _, temp_lifetime_was_shrunk: _, span, kind }
+ let Expr { ty, temp_lifetime: _, span, kind }
= expr;
match kind {
ExprKind::Scope { extent: _, value } =>
let temp = this.temp(expr_ty.clone(), expr_span);
let source_info = this.source_info(expr_span);
- if expr.temp_lifetime_was_shrunk && this.hir.needs_drop(expr_ty) {
- this.hir.tcx().sess.span_warn(
- expr_span,
- "this temporary used to live longer - see issue #39283 \
-(https://github.com/rust-lang/rust/issues/39283)");
- }
-
if !expr_ty.is_never() && temp_lifetime.is_some() {
this.cfg.push(block, Statement {
source_info: source_info,
exit_block.unit()
}
ExprKind::Call { ty, fun, args } => {
- let diverges = match ty.sty {
- ty::TyFnDef(_, _, ref f) | ty::TyFnPtr(ref f) => {
- // FIXME(canndrew): This is_never should probably be an is_uninhabited
- f.output().skip_binder().is_never()
- }
- _ => false
- };
+ // FIXME(canndrew): This is_never should probably be an is_uninhabited
+ let diverges = expr.ty.is_never();
let intrinsic = match ty.sty {
ty::TyFnDef(def_id, _, ref f) if
f.abi() == Abi::RustIntrinsic ||
use rustc::mir::*;
use rustc::mir::transform::MirSource;
use rustc::mir::visit::MutVisitor;
-use rustc::traits::Reveal;
use rustc::ty::{self, Ty, TyCtxt};
use rustc::ty::subst::Substs;
use rustc::util::nodemap::NodeMap;
};
let src = MirSource::from_node(tcx, id);
- tcx.infer_ctxt(body_id, Reveal::UserFacing).enter(|infcx| {
+ tcx.infer_ctxt(body_id).enter(|infcx| {
let cx = Cx::new(&infcx, src);
let mut mir = if cx.tables().tainted_by_errors {
build::construct_error(cx, body_id)
{
let span = tcx.hir.span(ctor_id);
if let hir::VariantData::Tuple(ref fields, ctor_id) = *v {
- let pe = tcx.param_env(tcx.hir.local_def_id(ctor_id));
- tcx.infer_ctxt(pe, Reveal::UserFacing).enter(|infcx| {
+ tcx.infer_ctxt(()).enter(|infcx| {
let (mut mir, src) =
shim::build_adt_ctor(&infcx, ctor_id, fields, span);
block: &'tcx hir::Block)
-> ExprRef<'tcx> {
let block_ty = cx.tables().node_id_to_type(block.id);
- let (temp_lifetime, was_shrunk) = cx.region_maps.temporary_scope2(block.id);
+ let temp_lifetime = cx.region_maps.temporary_scope(block.id);
let expr = Expr {
ty: block_ty,
temp_lifetime: temp_lifetime,
- temp_lifetime_was_shrunk: was_shrunk,
span: block.span,
kind: ExprKind::Block { body: block },
};
use rustc::hir::def::{Def, CtorKind};
use rustc::middle::const_val::ConstVal;
use rustc::ty::{self, AdtKind, VariantDef, Ty};
+use rustc::ty::adjustment::{Adjustment, Adjust, AutoBorrow};
use rustc::ty::cast::CastKind as TyCastKind;
+use rustc::ty::subst::Subst;
use rustc::hir;
-use syntax::ptr::P;
impl<'tcx> Mirror<'tcx> for &'tcx hir::Expr {
type Output = Expr<'tcx>;
fn make_mirror<'a, 'gcx>(self, cx: &mut Cx<'a, 'gcx, 'tcx>) -> Expr<'tcx> {
- let (temp_lifetime, was_shrunk) = cx.region_maps.temporary_scope2(self.id);
+ let temp_lifetime = cx.region_maps.temporary_scope(self.id);
let expr_extent = CodeExtent::Misc(self.id);
debug!("Expr::make_mirror(): id={}, span={:?}", self.id, self.span);
let mut expr = make_mirror_unadjusted(cx, self);
- let adj = cx.tables().adjustments.get(&self.id).cloned();
-
- debug!("make_mirror: unadjusted-expr={:?} applying adjustments={:?}",
- expr,
- adj);
// Now apply adjustments, if any.
- match adj.map(|adj| (adj.kind, adj.target)) {
- None => {}
- Some((ty::adjustment::Adjust::ReifyFnPointer, adjusted_ty)) => {
- expr = Expr {
- temp_lifetime: temp_lifetime,
- temp_lifetime_was_shrunk: was_shrunk,
- ty: adjusted_ty,
- span: self.span,
- kind: ExprKind::ReifyFnPointer { source: expr.to_ref() },
- };
- }
- Some((ty::adjustment::Adjust::UnsafeFnPointer, adjusted_ty)) => {
- expr = Expr {
- temp_lifetime: temp_lifetime,
- temp_lifetime_was_shrunk: was_shrunk,
- ty: adjusted_ty,
- span: self.span,
- kind: ExprKind::UnsafeFnPointer { source: expr.to_ref() },
- };
- }
- Some((ty::adjustment::Adjust::ClosureFnPointer, adjusted_ty)) => {
- expr = Expr {
- temp_lifetime: temp_lifetime,
- temp_lifetime_was_shrunk: was_shrunk,
- ty: adjusted_ty,
- span: self.span,
- kind: ExprKind::ClosureFnPointer { source: expr.to_ref() },
- };
- }
- Some((ty::adjustment::Adjust::NeverToAny, adjusted_ty)) => {
- expr = Expr {
- temp_lifetime: temp_lifetime,
- temp_lifetime_was_shrunk: was_shrunk,
- ty: adjusted_ty,
- span: self.span,
- kind: ExprKind::NeverToAny { source: expr.to_ref() },
- };
- }
- Some((ty::adjustment::Adjust::MutToConstPointer, adjusted_ty)) => {
- expr = Expr {
- temp_lifetime: temp_lifetime,
- temp_lifetime_was_shrunk: was_shrunk,
- ty: adjusted_ty,
- span: self.span,
- kind: ExprKind::Cast { source: expr.to_ref() },
- };
- }
- Some((ty::adjustment::Adjust::DerefRef { autoderefs, autoref, unsize },
- adjusted_ty)) => {
- for i in 0..autoderefs {
- let i = i as u32;
- let adjusted_ty =
- expr.ty.adjust_for_autoderef(cx.tcx, self.id, self.span, i, |mc| {
- cx.tables().method_map.get(&mc).map(|m| m.ty)
- });
- debug!("make_mirror: autoderef #{}, adjusted_ty={:?}",
- i,
- adjusted_ty);
- let method_key = ty::MethodCall::autoderef(self.id, i);
- let meth_ty = cx.tables().method_map.get(&method_key).map(|m| m.ty);
- let kind = if let Some(meth_ty) = meth_ty {
- debug!("make_mirror: overloaded autoderef (meth_ty={:?})", meth_ty);
-
- let ref_ty = cx.tcx.no_late_bound_regions(&meth_ty.fn_ret());
- let (region, mutbl) = match ref_ty {
- Some(&ty::TyS { sty: ty::TyRef(region, mt), .. }) => (region, mt.mutbl),
- _ => span_bug!(expr.span, "autoderef returned bad type"),
- };
-
- expr = Expr {
- temp_lifetime: temp_lifetime,
- temp_lifetime_was_shrunk: was_shrunk,
- ty: cx.tcx.mk_ref(region,
- ty::TypeAndMut {
- ty: expr.ty,
- mutbl: mutbl,
- }),
- span: expr.span,
- kind: ExprKind::Borrow {
- region: region,
- borrow_kind: to_borrow_kind(mutbl),
- arg: expr.to_ref(),
- },
- };
-
- overloaded_lvalue(cx,
- self,
- method_key,
- PassArgs::ByRef,
- expr.to_ref(),
- vec![])
- } else {
- debug!("make_mirror: built-in autoderef");
- ExprKind::Deref { arg: expr.to_ref() }
- };
- expr = Expr {
- temp_lifetime: temp_lifetime,
- temp_lifetime_was_shrunk: was_shrunk,
- ty: adjusted_ty,
- span: self.span,
- kind: kind,
- };
- }
-
- if let Some(autoref) = autoref {
- let adjusted_ty = expr.ty.adjust_for_autoref(cx.tcx, Some(autoref));
- match autoref {
- ty::adjustment::AutoBorrow::Ref(r, m) => {
- expr = Expr {
- temp_lifetime: temp_lifetime,
- temp_lifetime_was_shrunk: was_shrunk,
- ty: adjusted_ty,
- span: self.span,
- kind: ExprKind::Borrow {
- region: r,
- borrow_kind: to_borrow_kind(m),
- arg: expr.to_ref(),
- },
- };
- }
- ty::adjustment::AutoBorrow::RawPtr(m) => {
- // Convert this to a suitable `&foo` and
- // then an unsafe coercion. Limit the region to be just this
- // expression.
- let region = ty::ReScope(expr_extent);
- let region = cx.tcx.mk_region(region);
- expr = Expr {
- temp_lifetime: temp_lifetime,
- temp_lifetime_was_shrunk: was_shrunk,
- ty: cx.tcx.mk_ref(region,
- ty::TypeAndMut {
- ty: expr.ty,
- mutbl: m,
- }),
- span: self.span,
- kind: ExprKind::Borrow {
- region: region,
- borrow_kind: to_borrow_kind(m),
- arg: expr.to_ref(),
- },
- };
- expr = Expr {
- temp_lifetime: temp_lifetime,
- temp_lifetime_was_shrunk: was_shrunk,
- ty: adjusted_ty,
- span: self.span,
- kind: ExprKind::Cast { source: expr.to_ref() },
- };
- }
- }
- }
-
- if unsize {
- expr = Expr {
- temp_lifetime: temp_lifetime,
- temp_lifetime_was_shrunk: was_shrunk,
- ty: adjusted_ty,
- span: self.span,
- kind: ExprKind::Unsize { source: expr.to_ref() },
- };
- }
- }
+ for adjustment in cx.tables().expr_adjustments(self) {
+ debug!("make_mirror: expr={:?} applying adjustment={:?}",
+ expr,
+ adjustment);
+ expr = apply_adjustment(cx, self, expr, adjustment);
}
// Next, wrap this up in the expr's scope.
expr = Expr {
temp_lifetime: temp_lifetime,
- temp_lifetime_was_shrunk: was_shrunk,
ty: expr.ty,
span: self.span,
kind: ExprKind::Scope {
if let Some(extent) = cx.region_maps.opt_destruction_extent(self.id) {
expr = Expr {
temp_lifetime: temp_lifetime,
- temp_lifetime_was_shrunk: was_shrunk,
ty: expr.ty,
span: self.span,
kind: ExprKind::Scope {
}
}
+fn apply_adjustment<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>,
+ hir_expr: &'tcx hir::Expr,
+ mut expr: Expr<'tcx>,
+ adjustment: &Adjustment<'tcx>)
+ -> Expr<'tcx> {
+ let Expr { temp_lifetime, span, .. } = expr;
+ let kind = match adjustment.kind {
+ Adjust::ReifyFnPointer => {
+ ExprKind::ReifyFnPointer { source: expr.to_ref() }
+ }
+ Adjust::UnsafeFnPointer => {
+ ExprKind::UnsafeFnPointer { source: expr.to_ref() }
+ }
+ Adjust::ClosureFnPointer => {
+ ExprKind::ClosureFnPointer { source: expr.to_ref() }
+ }
+ Adjust::NeverToAny => {
+ ExprKind::NeverToAny { source: expr.to_ref() }
+ }
+ Adjust::MutToConstPointer => {
+ ExprKind::Cast { source: expr.to_ref() }
+ }
+ Adjust::Deref(None) => {
+ ExprKind::Deref { arg: expr.to_ref() }
+ }
+ Adjust::Deref(Some(deref)) => {
+ let call = deref.method_call(cx.tcx, expr.ty);
+
+ expr = Expr {
+ temp_lifetime,
+ ty: cx.tcx.mk_ref(deref.region,
+ ty::TypeAndMut {
+ ty: expr.ty,
+ mutbl: deref.mutbl,
+ }),
+ span,
+ kind: ExprKind::Borrow {
+ region: deref.region,
+ borrow_kind: to_borrow_kind(deref.mutbl),
+ arg: expr.to_ref(),
+ },
+ };
+
+ overloaded_lvalue(cx, hir_expr, adjustment.target, Some(call), vec![expr.to_ref()])
+ }
+ Adjust::Borrow(AutoBorrow::Ref(r, m)) => {
+ ExprKind::Borrow {
+ region: r,
+ borrow_kind: to_borrow_kind(m),
+ arg: expr.to_ref(),
+ }
+ }
+ Adjust::Borrow(AutoBorrow::RawPtr(m)) => {
+ // Convert this to a suitable `&foo` and
+ // then an unsafe coercion. Limit the region to be just this
+ // expression.
+ let region = ty::ReScope(CodeExtent::Misc(hir_expr.id));
+ let region = cx.tcx.mk_region(region);
+ expr = Expr {
+ temp_lifetime,
+ ty: cx.tcx.mk_ref(region,
+ ty::TypeAndMut {
+ ty: expr.ty,
+ mutbl: m,
+ }),
+ span,
+ kind: ExprKind::Borrow {
+ region: region,
+ borrow_kind: to_borrow_kind(m),
+ arg: expr.to_ref(),
+ },
+ };
+ ExprKind::Cast { source: expr.to_ref() }
+ }
+ Adjust::Unsize => {
+ ExprKind::Unsize { source: expr.to_ref() }
+ }
+ };
+
+ Expr {
+ temp_lifetime,
+ ty: adjustment.target,
+ span,
+ kind,
+ }
+}
+
fn make_mirror_unadjusted<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>,
expr: &'tcx hir::Expr)
-> Expr<'tcx> {
let expr_ty = cx.tables().expr_ty(expr);
- let (temp_lifetime, was_shrunk) = cx.region_maps.temporary_scope2(expr.id);
+ let temp_lifetime = cx.region_maps.temporary_scope(expr.id);
let kind = match expr.node {
// Here comes the interesting stuff:
hir::ExprMethodCall(.., ref args) => {
// Rewrite a.b(c) into UFCS form like Trait::b(a, c)
- let expr = method_callee(cx, expr, ty::MethodCall::expr(expr.id));
+ let expr = method_callee(cx, expr, None);
let args = args.iter()
.map(|e| e.to_ref())
.collect();
}
hir::ExprCall(ref fun, ref args) => {
- if cx.tables().is_method_call(expr.id) {
+ if cx.tables().is_method_call(expr) {
// The callee is something implementing Fn, FnMut, or FnOnce.
// Find the actual method implementation being called and
// build the appropriate UFCS call expression with the
// rewrite f(u, v) into FnOnce::call_once(f, (u, v))
- let method = method_callee(cx, expr, ty::MethodCall::expr(expr.id));
-
- let sig = method.ty.fn_sig();
-
- let sig = cx.tcx
- .no_late_bound_regions(&sig)
- .unwrap_or_else(|| span_bug!(expr.span, "method call has late-bound regions"));
-
- assert_eq!(sig.inputs().len(), 2);
+ let method = method_callee(cx, expr, None);
+ let arg_tys = args.iter().map(|e| cx.tables().expr_ty_adjusted(e));
let tupled_args = Expr {
- ty: sig.inputs()[1],
+ ty: cx.tcx.mk_tup(arg_tys, false),
temp_lifetime: temp_lifetime,
- temp_lifetime_was_shrunk: was_shrunk,
span: expr.span,
kind: ExprKind::Tuple { fields: args.iter().map(ToRef::to_ref).collect() },
};
None
};
if let Some((adt_def, index)) = adt_data {
- let substs = cx.tables().node_id_item_substs(fun.id)
- .unwrap_or_else(|| cx.tcx.intern_substs(&[]));
+ let substs = cx.tables().node_substs(fun.id);
let field_refs = args.iter()
.enumerate()
.map(|(idx, e)| {
}
hir::ExprAssignOp(op, ref lhs, ref rhs) => {
- if cx.tables().is_method_call(expr.id) {
- let pass_args = if op.node.is_by_value() {
- PassArgs::ByValue
- } else {
- PassArgs::ByRef
- };
- overloaded_operator(cx,
- expr,
- ty::MethodCall::expr(expr.id),
- pass_args,
- lhs.to_ref(),
- vec![rhs])
+ if cx.tables().is_method_call(expr) {
+ overloaded_operator(cx, expr, vec![lhs.to_ref(), rhs.to_ref()])
} else {
ExprKind::AssignOp {
op: bin_op(op.node),
hir::ExprLit(..) => ExprKind::Literal { literal: cx.const_eval_literal(expr) },
hir::ExprBinary(op, ref lhs, ref rhs) => {
- if cx.tables().is_method_call(expr.id) {
- let pass_args = if op.node.is_by_value() {
- PassArgs::ByValue
- } else {
- PassArgs::ByRef
- };
- overloaded_operator(cx,
- expr,
- ty::MethodCall::expr(expr.id),
- pass_args,
- lhs.to_ref(),
- vec![rhs])
+ if cx.tables().is_method_call(expr) {
+ overloaded_operator(cx, expr, vec![lhs.to_ref(), rhs.to_ref()])
} else {
// FIXME overflow
match (op.node, cx.constness) {
}
hir::ExprIndex(ref lhs, ref index) => {
- if cx.tables().is_method_call(expr.id) {
- overloaded_lvalue(cx,
- expr,
- ty::MethodCall::expr(expr.id),
- PassArgs::ByValue,
- lhs.to_ref(),
- vec![index])
+ if cx.tables().is_method_call(expr) {
+ overloaded_lvalue(cx, expr, expr_ty, None, vec![lhs.to_ref(), index.to_ref()])
} else {
ExprKind::Index {
lhs: lhs.to_ref(),
}
hir::ExprUnary(hir::UnOp::UnDeref, ref arg) => {
- if cx.tables().is_method_call(expr.id) {
- overloaded_lvalue(cx,
- expr,
- ty::MethodCall::expr(expr.id),
- PassArgs::ByValue,
- arg.to_ref(),
- vec![])
+ if cx.tables().is_method_call(expr) {
+ overloaded_lvalue(cx, expr, expr_ty, None, vec![arg.to_ref()])
} else {
ExprKind::Deref { arg: arg.to_ref() }
}
}
hir::ExprUnary(hir::UnOp::UnNot, ref arg) => {
- if cx.tables().is_method_call(expr.id) {
- overloaded_operator(cx,
- expr,
- ty::MethodCall::expr(expr.id),
- PassArgs::ByValue,
- arg.to_ref(),
- vec![])
+ if cx.tables().is_method_call(expr) {
+ overloaded_operator(cx, expr, vec![arg.to_ref()])
} else {
ExprKind::Unary {
op: UnOp::Not,
}
hir::ExprUnary(hir::UnOp::UnNeg, ref arg) => {
- if cx.tables().is_method_call(expr.id) {
- overloaded_operator(cx,
- expr,
- ty::MethodCall::expr(expr.id),
- PassArgs::ByValue,
- arg.to_ref(),
- vec![])
+ if cx.tables().is_method_call(expr) {
+ overloaded_operator(cx, expr, vec![arg.to_ref()])
} else {
// FIXME runtime-overflow
if let hir::ExprLit(_) = arg.node {
Expr {
temp_lifetime: temp_lifetime,
- temp_lifetime_was_shrunk: was_shrunk,
ty: expr_ty,
span: expr.span,
kind: kind,
fn method_callee<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>,
expr: &hir::Expr,
- method_call: ty::MethodCall)
+ custom_callee: Option<(DefId, &'tcx Substs<'tcx>)>)
-> Expr<'tcx> {
- let callee = cx.tables().method_map[&method_call];
- let (temp_lifetime, was_shrunk) = cx.region_maps.temporary_scope2(expr.id);
+ let temp_lifetime = cx.region_maps.temporary_scope(expr.id);
+ let (def_id, substs) = custom_callee.unwrap_or_else(|| {
+ (cx.tables().type_dependent_defs[&expr.id].def_id(),
+ cx.tables().node_substs(expr.id))
+ });
Expr {
temp_lifetime: temp_lifetime,
- temp_lifetime_was_shrunk: was_shrunk,
- ty: callee.ty,
+ ty: cx.tcx.type_of(def_id).subst(cx.tcx, substs),
span: expr.span,
kind: ExprKind::Literal {
literal: Literal::Value {
- value: ConstVal::Function(callee.def_id, callee.substs),
+ value: ConstVal::Function(def_id, substs),
},
},
}
expr: &'tcx hir::Expr,
def: Def)
-> ExprKind<'tcx> {
- let substs = cx.tables().node_id_item_substs(expr.id)
- .unwrap_or_else(|| cx.tcx.intern_substs(&[]));
+ let substs = cx.tables().node_substs(expr.id);
match def {
// A regular function, constructor function or a constant.
Def::Fn(def_id) |
expr: &'tcx hir::Expr,
def: Def)
-> ExprKind<'tcx> {
- let (temp_lifetime, was_shrunk) = cx.region_maps.temporary_scope2(expr.id);
+ let temp_lifetime = cx.region_maps.temporary_scope(expr.id);
match def {
Def::Local(def_id) => {
Expr {
ty: closure_ty,
temp_lifetime: temp_lifetime,
- temp_lifetime_was_shrunk: was_shrunk,
span: expr.span,
kind: ExprKind::Deref {
arg: Expr {
ty: ref_closure_ty,
temp_lifetime: temp_lifetime,
- temp_lifetime_was_shrunk: was_shrunk,
span: expr.span,
kind: ExprKind::SelfRef,
}
Expr {
ty: closure_ty,
temp_lifetime: temp_lifetime,
- temp_lifetime_was_shrunk: was_shrunk,
span: expr.span,
kind: ExprKind::Deref {
arg: Expr {
ty: ref_closure_ty,
temp_lifetime: temp_lifetime,
- temp_lifetime_was_shrunk: was_shrunk,
span: expr.span,
kind: ExprKind::SelfRef,
}.to_ref(),
Expr {
ty: closure_ty,
temp_lifetime: temp_lifetime,
- temp_lifetime_was_shrunk: was_shrunk,
span: expr.span,
kind: ExprKind::SelfRef,
}
ExprKind::Deref {
arg: Expr {
temp_lifetime: temp_lifetime,
- temp_lifetime_was_shrunk: was_shrunk,
ty: cx.tcx.mk_ref(borrow.region,
ty::TypeAndMut {
ty: var_ty,
}
}
-enum PassArgs {
- ByValue,
- ByRef,
-}
-
fn overloaded_operator<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>,
expr: &'tcx hir::Expr,
- method_call: ty::MethodCall,
- pass_args: PassArgs,
- receiver: ExprRef<'tcx>,
- args: Vec<&'tcx P<hir::Expr>>)
+ args: Vec<ExprRef<'tcx>>)
-> ExprKind<'tcx> {
- // the receiver has all the adjustments that are needed, so we can
- // just push a reference to it
- let mut argrefs = vec![receiver];
-
- // the arguments, unfortunately, do not, so if this is a ByRef
- // operator, we have to gin up the autorefs (but by value is easy)
- match pass_args {
- PassArgs::ByValue => argrefs.extend(args.iter().map(|arg| arg.to_ref())),
-
- PassArgs::ByRef => {
- let region = cx.tcx.node_scope_region(expr.id);
- let (temp_lifetime, was_shrunk) =
- cx.region_maps.temporary_scope2(expr.id);
- argrefs.extend(args.iter()
- .map(|arg| {
- let arg_ty = cx.tables().expr_ty_adjusted(arg);
- let adjusted_ty = cx.tcx.mk_ref(region,
- ty::TypeAndMut {
- ty: arg_ty,
- mutbl: hir::MutImmutable,
- });
- Expr {
- temp_lifetime: temp_lifetime,
- temp_lifetime_was_shrunk: was_shrunk,
- ty: adjusted_ty,
- span: expr.span,
- kind: ExprKind::Borrow {
- region: region,
- borrow_kind: BorrowKind::Shared,
- arg: arg.to_ref(),
- },
- }
- .to_ref()
- }))
- }
- }
-
- // now create the call itself
- let fun = method_callee(cx, expr, method_call);
+ let fun = method_callee(cx, expr, None);
ExprKind::Call {
ty: fun.ty,
fun: fun.to_ref(),
- args: argrefs,
+ args,
}
}
fn overloaded_lvalue<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>,
expr: &'tcx hir::Expr,
- method_call: ty::MethodCall,
- pass_args: PassArgs,
- receiver: ExprRef<'tcx>,
- args: Vec<&'tcx P<hir::Expr>>)
+ lvalue_ty: Ty<'tcx>,
+ custom_callee: Option<(DefId, &'tcx Substs<'tcx>)>,
+ args: Vec<ExprRef<'tcx>>)
-> ExprKind<'tcx> {
// For an overloaded *x or x[y] expression of type T, the method
// call returns an &T and we must add the deref so that the types
// line up (this is because `*x` and `x[y]` represent lvalues):
- // to find the type &T of the content returned by the method;
- let ref_ty = cx.tables().method_map[&method_call].ty.fn_ret();
- let ref_ty = cx.tcx.no_late_bound_regions(&ref_ty).unwrap();
- // callees always have all late-bound regions fully instantiated,
+ let recv_ty = match args[0] {
+ ExprRef::Hair(e) => cx.tables().expr_ty_adjusted(e),
+ ExprRef::Mirror(ref e) => e.ty
+ };
+
+ // Reconstruct the output assuming it's a reference with the
+ // same region and mutability as the receiver. This holds for
+ // `Deref(Mut)::Deref(_mut)` and `Index(Mut)::index(_mut)`.
+ let (region, mt) = match recv_ty.sty {
+ ty::TyRef(region, mt) => (region, mt),
+ _ => span_bug!(expr.span, "overloaded_lvalue: receiver is not a reference"),
+ };
+ let ref_ty = cx.tcx.mk_ref(region, ty::TypeAndMut {
+ ty: lvalue_ty,
+ mutbl: mt.mutbl,
+ });
// construct the complete expression `foo()` for the overloaded call,
// which will yield the &T type
- let (temp_lifetime, was_shrunk) = cx.region_maps.temporary_scope2(expr.id);
- let ref_kind = overloaded_operator(cx, expr, method_call, pass_args, receiver, args);
+ let temp_lifetime = cx.region_maps.temporary_scope(expr.id);
+ let fun = method_callee(cx, expr, custom_callee);
let ref_expr = Expr {
temp_lifetime: temp_lifetime,
- temp_lifetime_was_shrunk: was_shrunk,
ty: ref_ty,
span: expr.span,
- kind: ref_kind,
+ kind: ExprKind::Call {
+ ty: fun.ty,
+ fun: fun.to_ref(),
+ args,
+ },
};
// construct and return a deref wrapper `*foo()`
closure_expr_id: closure_expr.id,
};
let upvar_capture = cx.tables().upvar_capture(upvar_id).unwrap();
- let (temp_lifetime, was_shrunk) = cx.region_maps.temporary_scope2(closure_expr.id);
+ let temp_lifetime = cx.region_maps.temporary_scope(closure_expr.id);
let var_ty = cx.tables().node_id_to_type(id_var);
let captured_var = Expr {
temp_lifetime: temp_lifetime,
- temp_lifetime_was_shrunk: was_shrunk,
ty: var_ty,
span: closure_expr.span,
kind: convert_var(cx, closure_expr, freevar.def),
};
Expr {
temp_lifetime: temp_lifetime,
- temp_lifetime_was_shrunk: was_shrunk,
ty: freevar_ty,
span: closure_expr.span,
kind: ExprKind::Borrow {
pub struct Cx<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> {
tcx: TyCtxt<'a, 'gcx, 'tcx>,
infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
+ pub param_env: ty::ParamEnv<'tcx>,
pub region_maps: Rc<RegionMaps>,
/// This is `Constness::Const` if we are compiling a `static`,
let src_id = src.item_id();
let src_def_id = tcx.hir.local_def_id(src_id);
+ let param_env = tcx.param_env(src_def_id);
let region_maps = tcx.region_maps(src_def_id);
let attrs = tcx.hir.attrs(src_id);
// Constants and const fn's always need overflow checks.
check_overflow |= constness == hir::Constness::Const;
- Cx { tcx, infcx, region_maps, constness, src, check_overflow }
+ Cx { tcx, infcx, param_env, region_maps, constness, src, check_overflow }
}
}
}
pub fn needs_drop(&mut self, ty: Ty<'tcx>) -> bool {
- let ty = self.tcx.lift_to_global(&ty).unwrap_or_else(|| {
- bug!("MIR: Cx::needs_drop({}) got \
+ let (ty, param_env) = self.tcx.lift_to_global(&(ty, self.param_env)).unwrap_or_else(|| {
+ bug!("MIR: Cx::needs_drop({:?}, {:?}) got \
type with inference types/regions",
- ty);
+ ty, self.param_env);
});
- ty.needs_drop(self.tcx.global_tcx(), self.infcx.param_env)
+ ty.needs_drop(self.tcx.global_tcx(), param_env)
}
pub fn tcx(&self) -> TyCtxt<'a, 'gcx, 'tcx> {
/// temporary; should be None only if in a constant context
pub temp_lifetime: Option<CodeExtent>,
- /// whether this temp lifetime was shrunk by #36082.
- pub temp_lifetime_was_shrunk: bool,
-
/// span of the expression in the source
pub span: Span,
use rustc::mir::*;
use rustc::mir::transform::{MirPass, MirSource};
use rustc::mir::visit::*;
-use rustc::traits;
use rustc::ty::{self, Ty, TyCtxt};
use rustc::ty::subst::{Subst,Substs};
}
}
-fn type_size_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, param_env: ty::ParamEnv<'tcx>,
+fn type_size_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
ty: Ty<'tcx>) -> Option<u64> {
- tcx.infer_ctxt(param_env, traits::Reveal::All).enter(|infcx| {
- ty.layout(&infcx).ok().map(|layout| {
- layout.size(&tcx.data_layout).bytes()
- })
+ ty.layout(tcx, param_env).ok().map(|layout| {
+ layout.size(&tcx.data_layout).bytes()
})
}
// Statics must be Sync.
if mode == Mode::Static {
let ty = mir.return_ty;
- tcx.infer_ctxt((), Reveal::UserFacing).enter(|infcx| {
+ tcx.infer_ctxt(()).enter(|infcx| {
+ let param_env = ty::ParamEnv::empty(Reveal::UserFacing);
let cause = traits::ObligationCause::new(mir.span, id, traits::SharedStatic);
let mut fulfillment_cx = traits::FulfillmentContext::new();
- fulfillment_cx.register_bound(&infcx, ty,
+ fulfillment_cx.register_bound(&infcx,
+ param_env,
+ ty,
tcx.require_lang_item(lang_items::SyncTraitLangItem),
cause);
if let Err(err) = fulfillment_cx.select_all_or_error(&infcx) {
#![allow(unreachable_code)]
use rustc::infer::{self, InferCtxt, InferOk};
-use rustc::traits::{self, Reveal};
+use rustc::traits;
use rustc::ty::fold::TypeFoldable;
use rustc::ty::{self, Ty, TyCtxt, TypeVariants};
use rustc::middle::const_val::ConstVal;
pub struct TypeChecker<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
+ param_env: ty::ParamEnv<'gcx>,
fulfillment_cx: traits::FulfillmentContext<'tcx>,
last_span: Span,
body_id: ast::NodeId,
}
impl<'a, 'gcx, 'tcx> TypeChecker<'a, 'gcx, 'tcx> {
- fn new(infcx: &'a InferCtxt<'a, 'gcx, 'tcx>, body_id: ast::NodeId) -> Self {
+ fn new(infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
+ body_id: ast::NodeId,
+ param_env: ty::ParamEnv<'gcx>)
+ -> Self {
TypeChecker {
infcx: infcx,
fulfillment_cx: traits::FulfillmentContext::new(),
last_span: DUMMY_SP,
- body_id: body_id,
+ body_id,
+ param_env,
reported_errors: FxHashSet(),
}
}
infer_ok.value
}
- fn sub_types(&mut self, sup: Ty<'tcx>, sub: Ty<'tcx>)
+ fn sub_types(&mut self, sub: Ty<'tcx>, sup: Ty<'tcx>)
-> infer::UnitResult<'tcx>
{
- self.infcx.sub_types(false, &self.misc(self.last_span), sup, sub)
- .map(|ok| self.register_infer_ok_obligations(ok))
+ self.infcx.at(&self.misc(self.last_span), self.param_env)
+ .sup(sup, sub)
+ .map(|ok| self.register_infer_ok_obligations(ok))
}
fn eq_types(&mut self, span: Span, a: Ty<'tcx>, b: Ty<'tcx>)
-> infer::UnitResult<'tcx>
{
- self.infcx.eq_types(false, &self.misc(span), a, b)
- .map(|ok| self.register_infer_ok_obligations(ok))
+ self.infcx.at(&self.misc(span), self.param_env)
+ .eq(b, a)
+ .map(|ok| self.register_infer_ok_obligations(ok))
}
fn tcx(&self) -> TyCtxt<'a, 'gcx, 'tcx> {
let span = local_decl.source_info.span;
let ty = local_decl.ty;
- if !ty.is_sized(self.tcx().global_tcx(), self.infcx.param_env(), span) {
+ if !ty.is_sized(self.tcx().global_tcx(), self.param_env, span) {
// in current MIR construction, all non-control-flow rvalue
// expressions evaluate through `as_temp` or `into` a return
// slot or local, so to find all unsized rvalues it is enough
let mut selcx = traits::SelectionContext::new(self.infcx);
let cause = traits::ObligationCause::misc(self.last_span, ast::CRATE_NODE_ID);
let traits::Normalized { value, obligations } =
- traits::normalize(&mut selcx, cause, value);
+ traits::normalize(&mut selcx, self.param_env, cause, value);
debug!("normalize: value={:?} obligations={:?}",
value,
return;
}
let param_env = tcx.param_env(def_id);
- tcx.infer_ctxt(param_env, Reveal::UserFacing).enter(|infcx| {
- let mut checker = TypeChecker::new(&infcx, item_id);
+ tcx.infer_ctxt(()).enter(|infcx| {
+ let mut checker = TypeChecker::new(&infcx, item_id, param_env);
{
let mut verifier = TypeVerifier::new(&mut checker, mir);
verifier.visit_mir(mir);
debug!("drop_loop_pair({:?}, {:?})", ety, ptr_based);
let tcx = self.tcx();
let iter_ty = if ptr_based {
- tcx.mk_ptr(ty::TypeAndMut { ty: ety, mutbl: hir::Mutability::MutMutable })
+ tcx.mk_mut_ptr(ety)
} else {
tcx.types.usize
};
let mut drop_block_stmts = vec![];
drop_block_stmts.push(self.assign(&length, Rvalue::Len(self.lvalue.clone())));
if ptr_based {
- // cur = &LV[0];
- // end = &LV[len];
- drop_block_stmts.push(self.assign(&cur, Rvalue::Ref(
- tcx.types.re_erased, BorrowKind::Mut,
- self.lvalue.clone().index(zero.clone())
+ let tmp_ty = tcx.mk_mut_ptr(self.lvalue_ty(self.lvalue));
+ let tmp = Lvalue::Local(self.new_temp(tmp_ty));
+ // tmp = &LV;
+ // cur = tmp as *mut T;
+ // end = Offset(cur, len);
+ drop_block_stmts.push(self.assign(&tmp, Rvalue::Ref(
+ tcx.types.re_erased, BorrowKind::Mut, self.lvalue.clone()
)));
- drop_block_stmts.push(self.assign(&length_or_end, Rvalue::Ref(
- tcx.types.re_erased, BorrowKind::Mut,
- self.lvalue.clone().index(Operand::Consume(length.clone()))
+ drop_block_stmts.push(self.assign(&cur, Rvalue::Cast(
+ CastKind::Misc, Operand::Consume(tmp.clone()), iter_ty
+ )));
+ drop_block_stmts.push(self.assign(&length_or_end,
+ Rvalue::BinaryOp(BinOp::Offset,
+ Operand::Consume(cur.clone()), Operand::Consume(length.clone())
)));
} else {
// index = 0 (length already pushed)
let p = Path::new(file_dir);
file_path.push(p);
};
+ let _ = fs::create_dir_all(&file_path);
let file_name = format!("rustc.node{}{}{}.{}.{}.mir",
source.item_id(), promotion_id, pass_num, pass_name, disambiguator);
file_path.push(&file_name);
self.check_const_eval(&body.value);
}
- let outer_penv = self.tcx.infer_ctxt(body_id, Reveal::UserFacing).enter(|infcx| {
- let param_env = infcx.param_env.clone();
+ let outer_penv = self.tcx.infer_ctxt(body_id).enter(|infcx| {
+ let param_env = self.tcx.param_env(item_def_id);
let outer_penv = mem::replace(&mut self.param_env, param_env);
let region_maps = &self.tcx.region_maps(item_def_id);
- euv::ExprUseVisitor::new(self, region_maps, &infcx).consume_body(body);
+ euv::ExprUseVisitor::new(self, region_maps, &infcx, param_env).consume_body(body);
outer_penv
});
_ => {}
}
- let method_call = ty::MethodCall::expr(e.id);
match e.node {
hir::ExprUnary(..) |
hir::ExprBinary(..) |
- hir::ExprIndex(..) if v.tables.method_map.contains_key(&method_call) => {
+ hir::ExprIndex(..) if v.tables.is_method_call(e) => {
v.promotable = false;
}
hir::ExprBox(_) => {
}
}
hir::ExprMethodCall(..) => {
- let method = v.tables.method_map[&method_call];
- match v.tcx.associated_item(method.def_id).container {
- ty::ImplContainer(_) => v.handle_const_fn_call(method.def_id, node_ty),
+ let def_id = v.tables.type_dependent_defs[&e.id].def_id();
+ match v.tcx.associated_item(def_id).container {
+ ty::ImplContainer(_) => v.handle_const_fn_call(def_id, node_ty),
ty::TraitContainer(_) => v.promotable = false
}
}
fn check_adjustments<'a, 'tcx>(v: &mut CheckCrateVisitor<'a, 'tcx>, e: &hir::Expr) {
use rustc::ty::adjustment::*;
- match v.tables.adjustments.get(&e.id).map(|adj| adj.kind) {
- None |
- Some(Adjust::NeverToAny) |
- Some(Adjust::ReifyFnPointer) |
- Some(Adjust::UnsafeFnPointer) |
- Some(Adjust::ClosureFnPointer) |
- Some(Adjust::MutToConstPointer) => {}
-
- Some(Adjust::DerefRef { autoderefs, .. }) => {
- if (0..autoderefs as u32)
- .any(|autoderef| v.tables.is_overloaded_autoderef(e.id, autoderef)) {
- v.promotable = false;
+ for adjustment in v.tables.expr_adjustments(e) {
+ match adjustment.kind {
+ Adjust::NeverToAny |
+ Adjust::ReifyFnPointer |
+ Adjust::UnsafeFnPointer |
+ Adjust::ClosureFnPointer |
+ Adjust::MutToConstPointer |
+ Adjust::Borrow(_) |
+ Adjust::Unsize => {}
+
+ Adjust::Deref(ref overloaded) => {
+ if overloaded.is_some() {
+ v.promotable = false;
+ break;
+ }
}
}
}
in_fn: false,
promotable: false,
mut_rvalue_borrows: NodeSet(),
- param_env: ty::ParamEnv::empty(),
+ param_env: ty::ParamEnv::empty(Reveal::UserFacing),
}.as_deep_visitor());
tcx.sess.abort_if_errors();
}
Categorization::StaticItem => {
break;
}
- Categorization::Deref(ref cmt, ..) |
+ Categorization::Deref(ref cmt, _) |
Categorization::Downcast(ref cmt, _) |
Categorization::Interior(ref cmt, _) => {
cur = cmt;
```
"##,
+E0603: r##"
+A private item was used outside its scope.
+
+Erroneous code example:
+
+```compile_fail,E0603
+mod SomeModule {
+ const PRIVATE: u32 = 0x_a_bad_1dea_u32; // This const is private, so we
+ // can't use it outside of the
+ // `SomeModule` module.
+}
+
+println!("const value: {}", SomeModule::PRIVATE); // error: constant `CONSTANT`
+ // is private
+```
+
+In order to fix this error, you need to make the item public by using the `pub`
+keyword. Example:
+
+```
+mod SomeModule {
+ pub const PRIVATE: u32 = 0x_a_bad_1dea_u32; // We set it public by using the
+ // `pub` keyword.
+}
+
+println!("const value: {}", SomeModule::PRIVATE); // ok!
+```
+"##,
+
}
register_diagnostics! {
pub found_unresolved_macro: bool,
// List of crate local macros that we need to warn about as being unused.
- // Right now this only includes macro_rules! macros.
+ // Right now this only includes macro_rules! macros, and macros 2.0.
unused_macros: FxHashSet<DefId>,
// Maps the `Mark` of an expansion to its containing module or block.
for &PrivacyError(span, name, binding) in &self.privacy_errors {
if !reported_spans.insert(span) { continue }
- self.session.span_err(span, &format!("{} `{}` is private", binding.descr(), name));
+ span_err!(self.session, span, E0603, "{} `{}` is private", binding.descr(), name);
}
}
for did in self.unused_macros.iter() {
let id_span = match *self.macro_map[did] {
SyntaxExtension::NormalTT(_, isp, _) => isp,
+ SyntaxExtension::DeclMacro(.., osp) => osp,
_ => None,
};
if let Some((id, span)) = id_span {
let module = self.current_module;
let def = Def::Macro(def_id, MacroKind::Bang);
let vis = self.resolve_visibility(&item.vis);
+ if vis != ty::Visibility::Public {
+ self.unused_macros.insert(def_id);
+ }
self.define(module, ident, MacroNS, (def, vis, item.span, expansion));
}
}
}
}
ast::ExprKind::MethodCall(..) => {
- let method_call = ty::MethodCall::expr(expr.id);
- let method_id = self.tables.method_map[&method_call].def_id;
+ let method_id = self.tables.type_dependent_defs[&expr.id].def_id();
let (def_id, decl_id) = match self.tcx.associated_item(method_id).container {
ty::ImplContainer(_) => (Some(method_id), None),
ty::TraitContainer(_) => (None, Some(method_id)),
serialize = { path = "../libserialize" }
syntax = { path = "../libsyntax" }
syntax_pos = { path = "../libsyntax_pos" }
+
+[target."cfg(windows)".dependencies]
+gcc = "0.3.50"
use super::linker::Linker;
use super::rpath::RPathConfig;
use super::rpath;
-use super::msvc;
use metadata::METADATA_FILENAME;
use rustc::session::config::{self, NoDebugInfo, OutputFilenames, Input, OutputType};
use rustc::session::filesearch;
return r;
}
-// The third parameter is for an extra path to add to PATH for MSVC
-// cross linkers for host toolchain DLL dependencies
-pub fn get_linker(sess: &Session) -> (String, Command, Option<PathBuf>) {
+// The third parameter is for an env vars, used to set up the path for MSVC
+// to find its DLLs
+pub fn get_linker(sess: &Session) -> (String, Command, Vec<(OsString, OsString)>) {
if let Some(ref linker) = sess.opts.cg.linker {
- (linker.clone(), Command::new(linker), None)
+ (linker.clone(), Command::new(linker), vec![])
} else if sess.target.target.options.is_like_msvc {
- let (cmd, host) = msvc::link_exe_cmd(sess);
- ("link.exe".to_string(), cmd, host)
+ let (cmd, envs) = msvc_link_exe_cmd(sess);
+ ("link.exe".to_string(), cmd, envs)
} else {
(sess.target.target.options.linker.clone(),
- Command::new(&sess.target.target.options.linker), None)
+ Command::new(&sess.target.target.options.linker), vec![])
}
}
+#[cfg(windows)]
+pub fn msvc_link_exe_cmd(sess: &Session) -> (Command, Vec<(OsString, OsString)>) {
+ use gcc::windows_registry;
+
+ let target = &sess.opts.target_triple;
+ let tool = windows_registry::find_tool(target, "link.exe");
+
+ if let Some(tool) = tool {
+ let envs = tool.env().to_vec();
+ (tool.to_command(), envs)
+ } else {
+ debug!("Failed to locate linker.");
+ (Command::new("link.exe"), vec![])
+ }
+}
+
+#[cfg(not(windows))]
+pub fn msvc_link_exe_cmd(_sess: &Session) -> (Command, Vec<(OsString, OsString)>) {
+ (Command::new("link.exe"), vec![])
+}
+
pub fn get_ar_prog(sess: &Session) -> String {
sess.opts.cg.ar.clone().unwrap_or_else(|| {
sess.target.target.options.ar.clone()
let flavor = sess.linker_flavor();
// The invocations of cc share some flags across platforms
- let (pname, mut cmd, extra) = get_linker(sess);
- cmd.env("PATH", command_path(sess, extra));
+ let (pname, mut cmd, envs) = get_linker(sess);
+ // This will set PATH on MSVC
+ cmd.envs(envs);
let root = sess.target_filesearch(PathKind::Native).get_lib_path();
if let Some(args) = sess.target.target.options.pre_link_args.get(&flavor) {
+++ /dev/null
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-#![allow(non_camel_case_types, non_snake_case)]
-
-use libc::c_void;
-use std::mem;
-
-type DWORD = u32;
-type WORD = u16;
-type LPVOID = *mut c_void;
-type DWORD_PTR = usize;
-
-const PROCESSOR_ARCHITECTURE_INTEL: WORD = 0;
-const PROCESSOR_ARCHITECTURE_AMD64: WORD = 9;
-
-#[repr(C)]
-struct SYSTEM_INFO {
- wProcessorArchitecture: WORD,
- _wReserved: WORD,
- _dwPageSize: DWORD,
- _lpMinimumApplicationAddress: LPVOID,
- _lpMaximumApplicationAddress: LPVOID,
- _dwActiveProcessorMask: DWORD_PTR,
- _dwNumberOfProcessors: DWORD,
- _dwProcessorType: DWORD,
- _dwAllocationGranularity: DWORD,
- _wProcessorLevel: WORD,
- _wProcessorRevision: WORD,
-}
-
-extern "system" {
- fn GetNativeSystemInfo(lpSystemInfo: *mut SYSTEM_INFO);
-}
-
-pub enum Arch {
- X86,
- Amd64,
-}
-
-pub fn host_arch() -> Option<Arch> {
- let mut info = unsafe { mem::zeroed() };
- unsafe { GetNativeSystemInfo(&mut info) };
- match info.wProcessorArchitecture {
- PROCESSOR_ARCHITECTURE_INTEL => Some(Arch::X86),
- PROCESSOR_ARCHITECTURE_AMD64 => Some(Arch::Amd64),
- _ => None,
- }
-}
+++ /dev/null
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! MSVC-specific logic for linkers and such.
-//!
-//! This module contains a cross-platform interface but has a blank unix
-//! implementation. The Windows implementation builds on top of Windows native
-//! libraries (reading registry keys), so it otherwise wouldn't link on unix.
-//!
-//! Note that we don't have much special logic for finding the system linker on
-//! any other platforms, so it may seem a little odd to single out MSVC to have
-//! a good deal of code just to find the linker. Unlike Unix systems, however,
-//! the MSVC linker is not in the system PATH by default. It also additionally
-//! needs a few environment variables or command line flags to be able to link
-//! against system libraries.
-//!
-//! In order to have a nice smooth experience on Windows, the logic in this file
-//! is here to find the MSVC linker and set it up in the default configuration
-//! one would need to set up anyway. This means that the Rust compiler can be
-//! run not only in the developer shells of MSVC but also the standard cmd.exe
-//! shell or MSYS shells.
-//!
-//! As a high-level note, all logic in this module for looking up various
-//! paths/files is based on Microsoft's logic in their vcvars bat files, but
-//! comments can also be found below leading through the various code paths.
-
-// A simple macro to make this option mess easier to read
-#[cfg(windows)]
-macro_rules! otry {
- ($expr:expr) => (match $expr {
- Some(val) => val,
- None => return None,
- })
-}
-
-#[cfg(windows)]
-mod registry;
-#[cfg(windows)]
-mod arch;
-
-#[cfg(windows)]
-mod platform {
- use std::env;
- use std::ffi::OsString;
- use std::fs;
- use std::path::{Path, PathBuf};
- use std::process::Command;
- use rustc::session::Session;
- use super::arch::{host_arch, Arch};
- use super::registry::LOCAL_MACHINE;
-
- // First we need to figure out whether the environment is already correctly
- // configured by vcvars. We do this by looking at the environment variable
- // `VCINSTALLDIR` which is always set by vcvars, and unlikely to be set
- // otherwise. If it is defined, then we find `link.exe` in `PATH and trust
- // that everything else is configured correctly.
- //
- // If `VCINSTALLDIR` wasn't defined (or we couldn't find the linker where
- // it claimed it should be), then we resort to finding everything
- // ourselves. First we find where the latest version of MSVC is installed
- // and what version it is. Then based on the version we find the
- // appropriate SDKs.
- //
- // If despite our best efforts we are still unable to find MSVC then we
- // just blindly call `link.exe` and hope for the best.
- //
- // This code only supports VC 11 through 15. For versions older than that
- // the user will need to manually execute the appropriate vcvars bat file
- // and it should hopefully work.
- //
- // The second member of the tuple we return is the directory for the host
- // linker toolchain, which is necessary when using the cross linkers.
- pub fn link_exe_cmd(sess: &Session) -> (Command, Option<PathBuf>) {
- let arch = &sess.target.target.arch;
- env::var_os("VCINSTALLDIR").and_then(|_| {
- debug!("Detected that vcvars was already run.");
- let path = otry!(env::var_os("PATH"));
- // Mingw has its own link which is not the link we want so we
- // look for `cl.exe` too as a precaution.
- env::split_paths(&path).find(|path| {
- path.join("cl.exe").is_file()
- && path.join("link.exe").is_file()
- }).map(|path| {
- (Command::new(path.join("link.exe")), None)
- })
- }).or_else(|| {
- None.or_else(|| {
- find_msvc_latest(arch, "15.0")
- }).or_else(|| {
- find_msvc_latest(arch, "14.0")
- }).or_else(|| {
- find_msvc_12(arch)
- }).or_else(|| {
- find_msvc_11(arch)
- }).map(|(cmd, path)| (cmd, Some(path)))
- }).unwrap_or_else(|| {
- debug!("Failed to locate linker.");
- (Command::new("link.exe"), None)
- })
- }
-
- // For MSVC 14 or newer we need to find the Universal CRT as well as either
- // the Windows 10 SDK or Windows 8.1 SDK.
- fn find_msvc_latest(arch: &str, ver: &str) -> Option<(Command, PathBuf)> {
- let vcdir = otry!(get_vc_dir(ver));
- let (mut cmd, host) = otry!(get_linker(&vcdir, arch));
- let sub = otry!(lib_subdir(arch));
- let ucrt = otry!(get_ucrt_dir());
- debug!("Found Universal CRT {:?}", ucrt);
- add_lib(&mut cmd, &ucrt.join("ucrt").join(sub));
- if let Some(dir) = get_sdk10_dir() {
- debug!("Found Win10 SDK {:?}", dir);
- add_lib(&mut cmd, &dir.join("um").join(sub));
- } else if let Some(dir) = get_sdk81_dir() {
- debug!("Found Win8.1 SDK {:?}", dir);
- add_lib(&mut cmd, &dir.join("um").join(sub));
- } else {
- return None
- }
- Some((cmd, host))
- }
-
- // For MSVC 12 we need to find the Windows 8.1 SDK.
- fn find_msvc_12(arch: &str) -> Option<(Command, PathBuf)> {
- let vcdir = otry!(get_vc_dir("12.0"));
- let (mut cmd, host) = otry!(get_linker(&vcdir, arch));
- let sub = otry!(lib_subdir(arch));
- let sdk81 = otry!(get_sdk81_dir());
- debug!("Found Win8.1 SDK {:?}", sdk81);
- add_lib(&mut cmd, &sdk81.join("um").join(sub));
- Some((cmd, host))
- }
-
- // For MSVC 11 we need to find the Windows 8 SDK.
- fn find_msvc_11(arch: &str) -> Option<(Command, PathBuf)> {
- let vcdir = otry!(get_vc_dir("11.0"));
- let (mut cmd, host) = otry!(get_linker(&vcdir, arch));
- let sub = otry!(lib_subdir(arch));
- let sdk8 = otry!(get_sdk8_dir());
- debug!("Found Win8 SDK {:?}", sdk8);
- add_lib(&mut cmd, &sdk8.join("um").join(sub));
- Some((cmd, host))
- }
-
- // A convenience function to append library paths.
- fn add_lib(cmd: &mut Command, lib: &Path) {
- let mut arg: OsString = "/LIBPATH:".into();
- arg.push(lib);
- cmd.arg(arg);
- }
-
- // Given a possible MSVC installation directory, we look for the linker and
- // then add the MSVC library path.
- fn get_linker(path: &Path, arch: &str) -> Option<(Command, PathBuf)> {
- debug!("Looking for linker in {:?}", path);
- bin_subdir(arch).into_iter().map(|(sub, host)| {
- (path.join("bin").join(sub).join("link.exe"),
- path.join("bin").join(host))
- }).filter(|&(ref path, _)| {
- path.is_file()
- }).map(|(path, host)| {
- (Command::new(path), host)
- }).filter_map(|(mut cmd, host)| {
- let sub = otry!(vc_lib_subdir(arch));
- add_lib(&mut cmd, &path.join("lib").join(sub));
- Some((cmd, host))
- }).next()
- }
-
- // To find MSVC we look in a specific registry key for the version we are
- // trying to find.
- fn get_vc_dir(ver: &str) -> Option<PathBuf> {
- let key = otry!(LOCAL_MACHINE
- .open(r"SOFTWARE\Microsoft\VisualStudio\SxS\VC7".as_ref()).ok());
- let path = otry!(key.query_str(ver).ok());
- Some(path.into())
- }
-
- // To find the Universal CRT we look in a specific registry key for where
- // all the Universal CRTs are located and then sort them asciibetically to
- // find the newest version. While this sort of sorting isn't ideal, it is
- // what vcvars does so that's good enough for us.
- fn get_ucrt_dir() -> Option<PathBuf> {
- let key = otry!(LOCAL_MACHINE
- .open(r"SOFTWARE\Microsoft\Windows Kits\Installed Roots".as_ref()).ok());
- let root = otry!(key.query_str("KitsRoot10").ok());
- let readdir = otry!(fs::read_dir(Path::new(&root).join("lib")).ok());
- readdir.filter_map(|dir| {
- dir.ok()
- }).map(|dir| {
- dir.path()
- }).filter(|dir| {
- dir.components().last().and_then(|c| {
- c.as_os_str().to_str()
- }).map(|c| {
- c.starts_with("10.") && dir.join("ucrt").is_dir()
- }).unwrap_or(false)
- }).max()
- }
-
- // Vcvars finds the correct version of the Windows 10 SDK by looking
- // for the include `um\Windows.h` because sometimes a given version will
- // only have UCRT bits without the rest of the SDK. Since we only care about
- // libraries and not includes, we instead look for `um\x64\kernel32.lib`.
- // Since the 32-bit and 64-bit libraries are always installed together we
- // only need to bother checking x64, making this code a tiny bit simpler.
- // Like we do for the Universal CRT, we sort the possibilities
- // asciibetically to find the newest one as that is what vcvars does.
- fn get_sdk10_dir() -> Option<PathBuf> {
- let key = otry!(LOCAL_MACHINE
- .open(r"SOFTWARE\Microsoft\Microsoft SDKs\Windows\v10.0".as_ref()).ok());
- let root = otry!(key.query_str("InstallationFolder").ok());
- let readdir = otry!(fs::read_dir(Path::new(&root).join("lib")).ok());
- let mut dirs: Vec<_> = readdir.filter_map(|dir| dir.ok())
- .map(|dir| dir.path()).collect();
- dirs.sort();
- dirs.into_iter().rev().filter(|dir| {
- dir.join("um").join("x64").join("kernel32.lib").is_file()
- }).next()
- }
-
- // Interestingly there are several subdirectories, `win7` `win8` and
- // `winv6.3`. Vcvars seems to only care about `winv6.3` though, so the same
- // applies to us. Note that if we were targetting kernel mode drivers
- // instead of user mode applications, we would care.
- fn get_sdk81_dir() -> Option<PathBuf> {
- let key = otry!(LOCAL_MACHINE
- .open(r"SOFTWARE\Microsoft\Microsoft SDKs\Windows\v8.1".as_ref()).ok());
- let root = otry!(key.query_str("InstallationFolder").ok());
- Some(Path::new(&root).join("lib").join("winv6.3"))
- }
-
- fn get_sdk8_dir() -> Option<PathBuf> {
- let key = otry!(LOCAL_MACHINE
- .open(r"SOFTWARE\Microsoft\Microsoft SDKs\Windows\v8.0".as_ref()).ok());
- let root = otry!(key.query_str("InstallationFolder").ok());
- Some(Path::new(&root).join("lib").join("win8"))
- }
-
- // When choosing the linker toolchain to use, we have to choose the one
- // which matches the host architecture. Otherwise we end up in situations
- // where someone on 32-bit Windows is trying to cross compile to 64-bit and
- // it tries to invoke the native 64-bit linker which won't work.
- //
- // For the return value of this function, the first member of the tuple is
- // the folder of the linker we will be invoking, while the second member
- // is the folder of the host toolchain for that linker which is essential
- // when using a cross linker. We return a Vec since on x64 there are often
- // two linkers that can target the architecture we desire. The 64-bit host
- // linker is preferred, and hence first, due to 64-bit allowing it more
- // address space to work with and potentially being faster.
- //
- // FIXME - Figure out what happens when the host architecture is arm.
- fn bin_subdir(arch: &str) -> Vec<(&'static str, &'static str)> {
- match (arch, host_arch()) {
- ("x86", Some(Arch::X86)) => vec![("", "")],
- ("x86", Some(Arch::Amd64)) => vec![("amd64_x86", "amd64"), ("", "")],
- ("x86_64", Some(Arch::X86)) => vec![("x86_amd64", "")],
- ("x86_64", Some(Arch::Amd64)) => vec![("amd64", "amd64"), ("x86_amd64", "")],
- ("arm", Some(Arch::X86)) => vec![("x86_arm", "")],
- ("arm", Some(Arch::Amd64)) => vec![("amd64_arm", "amd64"), ("x86_arm", "")],
- _ => vec![],
- }
- }
-
- fn lib_subdir(arch: &str) -> Option<&'static str> {
- match arch {
- "x86" => Some("x86"),
- "x86_64" => Some("x64"),
- "arm" => Some("arm"),
- _ => None,
- }
- }
-
- // MSVC's x86 libraries are not in a subfolder
- fn vc_lib_subdir(arch: &str) -> Option<&'static str> {
- match arch {
- "x86" => Some(""),
- "x86_64" => Some("amd64"),
- "arm" => Some("arm"),
- _ => None,
- }
- }
-}
-
-// If we're not on Windows, then there's no registry to search through and MSVC
-// wouldn't be able to run, so we just call `link.exe` and hope for the best.
-#[cfg(not(windows))]
-mod platform {
- use std::path::PathBuf;
- use std::process::Command;
- use rustc::session::Session;
- pub fn link_exe_cmd(_sess: &Session) -> (Command, Option<PathBuf>) {
- (Command::new("link.exe"), None)
- }
-}
-
-pub use self::platform::*;
+++ /dev/null
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use std::io;
-use std::ffi::{OsString, OsStr};
-use std::os::windows::prelude::*;
-use std::ptr;
-use libc::c_long;
-
-pub type DWORD = u32;
-type LPCWSTR = *const u16;
-type LONG = c_long;
-type LPDWORD = *mut DWORD;
-type LPBYTE = *mut u8;
-
-
-const HKEY_LOCAL_MACHINE: HKEY = 0x80000002 as HKEY;
-const KEY_WOW64_32KEY: REGSAM = 0x0200;
-const KEY_READ: REGSAM = (STANDARD_RIGTS_READ | KEY_QUERY_VALUE |
- KEY_ENUMERATE_SUB_KEYS | KEY_NOTIFY) & !SYNCHRONIZE;
-const STANDARD_RIGTS_READ: REGSAM = READ_CONTROL;
-const READ_CONTROL: REGSAM = 0x00020000;
-const KEY_QUERY_VALUE: REGSAM = 0x0001;
-const KEY_ENUMERATE_SUB_KEYS: REGSAM = 0x0008;
-const KEY_NOTIFY: REGSAM = 0x0010;
-const SYNCHRONIZE: REGSAM = 0x00100000;
-const REG_SZ: DWORD = 1;
-const ERROR_SUCCESS: i32 = 0;
-
-pub enum __HKEY__ {}
-pub type HKEY = *mut __HKEY__;
-pub type PHKEY = *mut HKEY;
-pub type REGSAM = DWORD;
-
-#[link(name = "advapi32")]
-extern "system" {
- fn RegOpenKeyExW(hKey: HKEY,
- lpSubKey: LPCWSTR,
- ulOptions: DWORD,
- samDesired: REGSAM,
- phkResult: PHKEY) -> LONG;
- fn RegQueryValueExW(hKey: HKEY,
- lpValueName: LPCWSTR,
- lpReserved: LPDWORD,
- lpType: LPDWORD,
- lpData: LPBYTE,
- lpcbData: LPDWORD) -> LONG;
- fn RegCloseKey(hKey: HKEY) -> LONG;
-}
-
-pub struct RegistryKey(Repr);
-
-struct OwnedKey(HKEY);
-
-enum Repr {
- Const(HKEY),
- Owned(OwnedKey),
-}
-
-unsafe impl Sync for RegistryKey {}
-unsafe impl Send for RegistryKey {}
-
-pub static LOCAL_MACHINE: RegistryKey = RegistryKey(Repr::Const(HKEY_LOCAL_MACHINE));
-
-impl RegistryKey {
- fn raw(&self) -> HKEY {
- match self.0 {
- Repr::Const(val) => val,
- Repr::Owned(ref val) => val.0,
- }
- }
-
- pub fn open(&self, key: &OsStr) -> io::Result<RegistryKey> {
- let key = key.encode_wide().chain(Some(0)).collect::<Vec<_>>();
- let mut ret = ptr::null_mut();
- let err = unsafe {
- RegOpenKeyExW(self.raw(), key.as_ptr(), 0,
- KEY_READ | KEY_WOW64_32KEY, &mut ret)
- };
- if err == ERROR_SUCCESS {
- Ok(RegistryKey(Repr::Owned(OwnedKey(ret))))
- } else {
- Err(io::Error::from_raw_os_error(err as i32))
- }
- }
-
- pub fn query_str(&self, name: &str) -> io::Result<OsString> {
- let name: &OsStr = name.as_ref();
- let name = name.encode_wide().chain(Some(0)).collect::<Vec<_>>();
- let mut len = 0;
- let mut kind = 0;
- unsafe {
- let err = RegQueryValueExW(self.raw(), name.as_ptr(), ptr::null_mut(),
- &mut kind, ptr::null_mut(), &mut len);
- if err != ERROR_SUCCESS {
- return Err(io::Error::from_raw_os_error(err as i32))
- }
- if kind != REG_SZ {
- return Err(io::Error::new(io::ErrorKind::Other,
- "registry key wasn't a string"))
- }
-
- // The length here is the length in bytes, but we're using wide
- // characters so we need to be sure to halve it for the capacity
- // passed in.
- let mut v = Vec::with_capacity(len as usize / 2);
- let err = RegQueryValueExW(self.raw(), name.as_ptr(), ptr::null_mut(),
- ptr::null_mut(), v.as_mut_ptr() as *mut _,
- &mut len);
- if err != ERROR_SUCCESS {
- return Err(io::Error::from_raw_os_error(err as i32))
- }
- v.set_len(len as usize / 2);
-
- // Some registry keys may have a terminating nul character, but
- // we're not interested in that, so chop it off if it's there.
- if v[v.len() - 1] == 0 {
- v.pop();
- }
- Ok(OsString::from_wide(&v))
- }
- }
-}
-
-impl Drop for OwnedKey {
- fn drop(&mut self) {
- unsafe { RegCloseKey(self.0); }
- }
-}
use rustc::hir::map as hir_map;
use rustc::util::common::time;
use rustc::session::config::{self, NoDebugInfo};
-use rustc::session::{self, DataTypeKind, Session};
+use rustc::session::Session;
use rustc_incremental::IncrementalHashesMap;
use abi;
use mir::lvalue::LvalueRef;
use syntax_pos::Span;
use syntax::attr;
use rustc::hir;
-use rustc::ty::layout::{self, Layout};
use syntax::ast;
use mir::lvalue::Alignment;
&exported_symbols);
});
- if tcx.sess.opts.debugging_opts.print_type_sizes {
- gather_type_sizes(tcx);
- }
-
if sess.target.target.options.is_like_msvc &&
sess.crate_types.borrow().iter().any(|ct| *ct == config::CrateTypeRlib) {
create_imps(sess, &llvm_modules);
}
}
-fn gather_type_sizes<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
- let layout_cache = tcx.layout_cache.borrow();
- for (ty, layout) in layout_cache.iter() {
-
- // (delay format until we actually need it)
- let record = |kind, opt_discr_size, variants| {
- let type_desc = format!("{:?}", ty);
- let overall_size = layout.size(tcx);
- let align = layout.align(tcx);
- tcx.sess.code_stats.borrow_mut().record_type_size(kind,
- type_desc,
- align,
- overall_size,
- opt_discr_size,
- variants);
- };
-
- let (adt_def, substs) = match ty.sty {
- ty::TyAdt(ref adt_def, substs) => {
- debug!("print-type-size t: `{:?}` process adt", ty);
- (adt_def, substs)
- }
-
- ty::TyClosure(..) => {
- debug!("print-type-size t: `{:?}` record closure", ty);
- record(DataTypeKind::Closure, None, vec![]);
- continue;
- }
-
- _ => {
- debug!("print-type-size t: `{:?}` skip non-nominal", ty);
- continue;
- }
- };
-
- let adt_kind = adt_def.adt_kind();
-
- let build_field_info = |(field_name, field_ty): (ast::Name, Ty), offset: &layout::Size| {
- match layout_cache.get(&field_ty) {
- None => bug!("no layout found for field {} type: `{:?}`", field_name, field_ty),
- Some(field_layout) => {
- session::FieldInfo {
- name: field_name.to_string(),
- offset: offset.bytes(),
- size: field_layout.size(tcx).bytes(),
- align: field_layout.align(tcx).abi(),
- }
- }
- }
- };
-
- let build_primitive_info = |name: ast::Name, value: &layout::Primitive| {
- session::VariantInfo {
- name: Some(name.to_string()),
- kind: session::SizeKind::Exact,
- align: value.align(tcx).abi(),
- size: value.size(tcx).bytes(),
- fields: vec![],
- }
- };
-
- enum Fields<'a> {
- WithDiscrim(&'a layout::Struct),
- NoDiscrim(&'a layout::Struct),
- }
-
- let build_variant_info = |n: Option<ast::Name>, flds: &[(ast::Name, Ty)], layout: Fields| {
- let (s, field_offsets) = match layout {
- Fields::WithDiscrim(s) => (s, &s.offsets[1..]),
- Fields::NoDiscrim(s) => (s, &s.offsets[0..]),
- };
- let field_info: Vec<_> = flds.iter()
- .zip(field_offsets.iter())
- .map(|(&field_name_ty, offset)| build_field_info(field_name_ty, offset))
- .collect();
-
- session::VariantInfo {
- name: n.map(|n|n.to_string()),
- kind: if s.sized {
- session::SizeKind::Exact
- } else {
- session::SizeKind::Min
- },
- align: s.align.abi(),
- size: s.min_size.bytes(),
- fields: field_info,
- }
- };
-
- match **layout {
- Layout::StructWrappedNullablePointer { nonnull: ref variant_layout,
- nndiscr,
- discrfield: _,
- discrfield_source: _ } => {
- debug!("print-type-size t: `{:?}` adt struct-wrapped nullable nndiscr {} is {:?}",
- ty, nndiscr, variant_layout);
- let variant_def = &adt_def.variants[nndiscr as usize];
- let fields: Vec<_> = variant_def.fields.iter()
- .map(|field_def| (field_def.name, field_def.ty(tcx, substs)))
- .collect();
- record(adt_kind.into(),
- None,
- vec![build_variant_info(Some(variant_def.name),
- &fields,
- Fields::NoDiscrim(variant_layout))]);
- }
- Layout::RawNullablePointer { nndiscr, value } => {
- debug!("print-type-size t: `{:?}` adt raw nullable nndiscr {} is {:?}",
- ty, nndiscr, value);
- let variant_def = &adt_def.variants[nndiscr as usize];
- record(adt_kind.into(), None,
- vec![build_primitive_info(variant_def.name, &value)]);
- }
- Layout::Univariant { variant: ref variant_layout, non_zero: _ } => {
- let variant_names = || {
- adt_def.variants.iter().map(|v|format!("{}", v.name)).collect::<Vec<_>>()
- };
- debug!("print-type-size t: `{:?}` adt univariant {:?} variants: {:?}",
- ty, variant_layout, variant_names());
- assert!(adt_def.variants.len() <= 1,
- "univariant with variants {:?}", variant_names());
- if adt_def.variants.len() == 1 {
- let variant_def = &adt_def.variants[0];
- let fields: Vec<_> = variant_def.fields.iter()
- .map(|field_def| (field_def.name, field_def.ty(tcx, substs)))
- .collect();
- record(adt_kind.into(),
- None,
- vec![build_variant_info(Some(variant_def.name),
- &fields,
- Fields::NoDiscrim(variant_layout))]);
- } else {
- // (This case arises for *empty* enums; so give it
- // zero variants.)
- record(adt_kind.into(), None, vec![]);
- }
- }
-
- Layout::General { ref variants, discr, .. } => {
- debug!("print-type-size t: `{:?}` adt general variants def {} layouts {} {:?}",
- ty, adt_def.variants.len(), variants.len(), variants);
- let variant_infos: Vec<_> = adt_def.variants.iter()
- .zip(variants.iter())
- .map(|(variant_def, variant_layout)| {
- let fields: Vec<_> = variant_def.fields.iter()
- .map(|field_def| (field_def.name, field_def.ty(tcx, substs)))
- .collect();
- build_variant_info(Some(variant_def.name),
- &fields,
- Fields::WithDiscrim(variant_layout))
- })
- .collect();
- record(adt_kind.into(), Some(discr.size()), variant_infos);
- }
-
- Layout::UntaggedUnion { ref variants } => {
- debug!("print-type-size t: `{:?}` adt union variants {:?}",
- ty, variants);
- // layout does not currently store info about each
- // variant...
- record(adt_kind.into(), None, Vec::new());
- }
-
- Layout::CEnum { discr, .. } => {
- debug!("print-type-size t: `{:?}` adt c-like enum", ty);
- let variant_infos: Vec<_> = adt_def.variants.iter()
- .map(|variant_def| {
- build_primitive_info(variant_def.name,
- &layout::Primitive::Int(discr))
- })
- .collect();
- record(adt_kind.into(), Some(discr.size()), variant_infos);
- }
-
- // other cases provide little interesting (i.e. adjustable
- // via representation tweaks) size info beyond total size.
- Layout::Scalar { .. } |
- Layout::Vector { .. } |
- Layout::Array { .. } |
- Layout::FatPointer { .. } => {
- debug!("print-type-size t: `{:?}` adt other", ty);
- record(adt_kind.into(), None, Vec::new())
- }
- }
- }
-}
-
#[inline(never)] // give this a place in the profiler
fn assert_symbols_are_distinct<'a, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>, trans_items: I)
where I: Iterator<Item=&'a TransItem<'tcx>>
use partitioning::CodegenUnit;
use type_::Type;
use rustc_data_structures::base_n;
-use rustc::ty::subst::Substs;
-use rustc::ty::{self, Ty, TyCtxt};
-use rustc::ty::layout::{LayoutTyper, TyLayout};
use rustc::session::config::{self, NoDebugInfo};
use rustc::session::Session;
+use rustc::ty::subst::Substs;
+use rustc::ty::{self, Ty, TyCtxt};
+use rustc::ty::layout::{LayoutCx, LayoutError, LayoutTyper, TyLayout};
use rustc::util::nodemap::{NodeSet, DefIdMap, FxHashMap};
use std::ffi::{CStr, CString};
}
pub fn type_needs_drop(&self, ty: Ty<'tcx>) -> bool {
- ty.needs_drop(self.tcx, ty::ParamEnv::empty())
+ ty.needs_drop(self.tcx, ty::ParamEnv::empty(traits::Reveal::All))
}
pub fn type_is_sized(&self, ty: Ty<'tcx>) -> bool {
- ty.is_sized(self.tcx, ty::ParamEnv::empty(), DUMMY_SP)
+ ty.is_sized(self.tcx, ty::ParamEnv::empty(traits::Reveal::All), DUMMY_SP)
}
pub fn type_is_freeze(&self, ty: Ty<'tcx>) -> bool {
- ty.is_freeze(self.tcx, ty::ParamEnv::empty(), DUMMY_SP)
+ ty.is_freeze(self.tcx, ty::ParamEnv::empty(traits::Reveal::All), DUMMY_SP)
}
pub fn exported_symbols<'a>(&'a self) -> &'a NodeSet {
}
}
-impl<'a, 'tcx> ty::layout::HasTyCtxt<'tcx> for &'a SharedCrateContext<'a, 'tcx> {
- fn tcx<'b>(&'b self) -> TyCtxt<'b, 'tcx, 'tcx> {
- self.tcx
- }
-}
-
impl<'a, 'tcx> ty::layout::HasDataLayout for &'a CrateContext<'a, 'tcx> {
fn data_layout(&self) -> &ty::layout::TargetDataLayout {
&self.shared.tcx.data_layout
}
}
-impl<'a, 'tcx> ty::layout::HasTyCtxt<'tcx> for &'a CrateContext<'a, 'tcx> {
- fn tcx<'b>(&'b self) -> TyCtxt<'b, 'tcx, 'tcx> {
- self.shared.tcx
- }
-}
-
impl<'a, 'tcx> LayoutTyper<'tcx> for &'a SharedCrateContext<'a, 'tcx> {
type TyLayout = TyLayout<'tcx>;
- fn layout_of(self, ty: Ty<'tcx>) -> Self::TyLayout {
- if let Some(&layout) = self.tcx().layout_cache.borrow().get(&ty) {
- return TyLayout { ty: ty, layout: layout, variant_index: None };
- }
+ fn tcx<'b>(&'b self) -> TyCtxt<'b, 'tcx, 'tcx> {
+ self.tcx
+ }
- self.tcx().infer_ctxt((), traits::Reveal::All).enter(|infcx| {
- infcx.layout_of(ty).unwrap_or_else(|e| {
- match e {
- ty::layout::LayoutError::SizeOverflow(_) =>
- self.sess().fatal(&e.to_string()),
- _ => bug!("failed to get layout for `{}`: {}", ty, e)
- }
+ fn layout_of(self, ty: Ty<'tcx>) -> Self::TyLayout {
+ let param_env = ty::ParamEnv::empty(traits::Reveal::All);
+ LayoutCx::new(self.tcx, param_env)
+ .layout_of(ty)
+ .unwrap_or_else(|e| match e {
+ LayoutError::SizeOverflow(_) => self.sess().fatal(&e.to_string()),
+ _ => bug!("failed to get layout for `{}`: {}", ty, e)
})
- })
}
fn normalize_projections(self, ty: Ty<'tcx>) -> Ty<'tcx> {
impl<'a, 'tcx> LayoutTyper<'tcx> for &'a CrateContext<'a, 'tcx> {
type TyLayout = TyLayout<'tcx>;
+ fn tcx<'b>(&'b self) -> TyCtxt<'b, 'tcx, 'tcx> {
+ self.shared.tcx
+ }
+
fn layout_of(self, ty: Ty<'tcx>) -> Self::TyLayout {
self.shared.layout_of(ty)
}
ifn!("llvm.x86.seh.recoverfp", fn(i8p, i8p) -> i8p);
ifn!("llvm.assume", fn(i1) -> void);
+ ifn!("llvm.prefetch", fn(i8p, t_i32, t_i32, t_i32) -> void);
if ccx.sess().opts.debuginfo != NoDebugInfo {
ifn!("llvm.dbg.declare", fn(Type::metadata(ccx), Type::metadata(ccx)) -> void);
ty::TyAdt(def, _) if def.is_box() => {
let typ = t.boxed_ty();
if !scx.type_needs_drop(typ) && scx.type_is_sized(typ) {
- scx.tcx().infer_ctxt((), traits::Reveal::All).enter(|infcx| {
- let layout = t.layout(&infcx).unwrap();
- if layout.size(scx).bytes() == 0 {
- // `Box<ZeroSizeType>` does not allocate.
- false
- } else {
- true
- }
- })
+ let layout = t.layout(scx.tcx(), ty::ParamEnv::empty(traits::Reveal::All)).unwrap();
+ if layout.size(scx).bytes() == 0 {
+ // `Box<ZeroSizeType>` does not allocate.
+ false
+ } else {
+ true
+ }
} else {
true
}
}
C_nil(ccx)
},
-
+ "prefetch_read_data" | "prefetch_write_data" |
+ "prefetch_read_instruction" | "prefetch_write_instruction" => {
+ let expect = ccx.get_intrinsic(&("llvm.prefetch"));
+ let (rw, cache_type) = match name {
+ "prefetch_read_data" => (0, 1),
+ "prefetch_write_data" => (1, 1),
+ "prefetch_read_instruction" => (0, 0),
+ "prefetch_write_instruction" => (1, 0),
+ _ => bug!()
+ };
+ bcx.call(expect, &[llargs[0], C_i32(ccx, rw), llargs[1], C_i32(ccx, cache_type)], None)
+ },
"ctlz" | "cttz" | "ctpop" | "bswap" |
"add_with_overflow" | "sub_with_overflow" | "mul_with_overflow" |
"overflowing_add" | "overflowing_sub" | "overflowing_mul" |
#![feature(slice_patterns)]
#![feature(unicode)]
#![feature(conservative_impl_trait)]
+#![feature(command_envs)]
#![cfg_attr(stage0, unstable(feature = "rustc_private", issue = "27812"))]
#![cfg_attr(stage0, feature(rustc_private))]
extern crate syntax_pos;
extern crate rustc_errors as errors;
extern crate serialize;
+#[cfg(windows)]
+extern crate gcc; // Used to locate MSVC, not gcc :)
pub use base::trans_crate;
pub use back::symbol_names::provide;
pub(crate) mod symbol_export;
pub(crate) mod symbol_names;
pub mod write;
- mod msvc;
- mod rpath;
+ pub mod rpath;
}
mod diagnostics;
// Type check the path.
let pat_ty = self.instantiate_value_path(segments, opt_ty, def, pat.span, pat.id);
// Replace constructor type with constructed type for tuple struct patterns.
- let pat_ty = tcx.no_late_bound_regions(&pat_ty.fn_ret()).expect("expected fn type");
+ let pat_ty = pat_ty.fn_sig().output();
+ let pat_ty = tcx.no_late_bound_regions(&pat_ty).expect("expected fn type");
self.demand_eqtype(pat.span, expected, pat_ty);
// Type check subpatterns.
use astconv::AstConv;
use super::{FnCtxt, LvalueOp};
+use super::method::MethodCallee;
-use check::coercion::AsCoercionSite;
use rustc::infer::InferOk;
use rustc::traits;
use rustc::ty::{self, Ty, TraitRef};
use rustc::ty::{ToPredicate, TypeFoldable};
-use rustc::ty::{MethodCall, MethodCallee};
use rustc::ty::{LvaluePreference, NoPreference};
-use rustc::hir;
+use rustc::ty::adjustment::{Adjustment, Adjust, OverloadedDeref};
use syntax_pos::Span;
use syntax::symbol::Symbol;
+use std::iter;
+
#[derive(Copy, Clone, Debug)]
enum AutoderefKind {
Builtin,
let cause = traits::ObligationCause::misc(self.span, self.fcx.body_id);
let mut selcx = traits::SelectionContext::new(self.fcx);
- let obligation = traits::Obligation::new(cause.clone(), trait_ref.to_predicate());
+ let obligation = traits::Obligation::new(cause.clone(),
+ self.fcx.param_env,
+ trait_ref.to_predicate());
if !selcx.evaluate_obligation(&obligation) {
debug!("overloaded_deref_ty: cannot match obligation");
return None;
}
let normalized = traits::normalize_projection_type(&mut selcx,
+ self.fcx.param_env,
ty::ProjectionTy::from_ref_and_name(
tcx,
trait_ref,
self.fcx.resolve_type_vars_if_possible(&self.cur_ty)
}
- pub fn finalize(self, pref: LvaluePreference, expr: &hir::Expr) {
- let fcx = self.fcx;
- fcx.register_infer_ok_obligations(self.finalize_as_infer_ok(pref, &[expr]));
+ pub fn step_count(&self) -> usize {
+ self.steps.len()
+ }
+
+ /// Returns the adjustment steps.
+ pub fn adjust_steps(&self, pref: LvaluePreference)
+ -> Vec<Adjustment<'tcx>> {
+ self.fcx.register_infer_ok_obligations(self.adjust_steps_as_infer_ok(pref))
}
- pub fn finalize_as_infer_ok<E>(self, pref: LvaluePreference, exprs: &[E])
- -> InferOk<'tcx, ()>
- where E: AsCoercionSite
- {
- let Autoderef { fcx, span, mut obligations, steps, .. } = self;
- let methods: Vec<_> = steps
- .iter()
- .map(|&(ty, kind)| {
- if let AutoderefKind::Overloaded = kind {
- fcx.try_overloaded_deref(span, None, ty, pref)
- .map(|InferOk { value, obligations: o }| {
- obligations.extend(o);
- value
- })
- } else {
- None
- }
- })
- .collect();
-
- debug!("finalize({:?}) - {:?},{:?}",
- pref,
- methods,
- obligations);
-
- for expr in exprs {
- let expr = expr.as_coercion_site();
- debug!("finalize - finalizing #{} - {:?}", expr.id, expr);
- for (n, method) in methods.iter().enumerate() {
- if let &Some(method) = method {
- let method_call = MethodCall::autoderef(expr.id, n as u32);
- fcx.tables.borrow_mut().method_map.insert(method_call, method);
- }
+ pub fn adjust_steps_as_infer_ok(&self, pref: LvaluePreference)
+ -> InferOk<'tcx, Vec<Adjustment<'tcx>>> {
+ let mut obligations = vec![];
+ let targets = self.steps.iter().skip(1).map(|&(ty, _)| ty)
+ .chain(iter::once(self.cur_ty));
+ let steps: Vec<_> = self.steps.iter().map(|&(source, kind)| {
+ if let AutoderefKind::Overloaded = kind {
+ self.fcx.try_overloaded_deref(self.span, source, pref)
+ .and_then(|InferOk { value: method, obligations: o }| {
+ obligations.extend(o);
+ if let ty::TyRef(region, mt) = method.sig.output().sty {
+ Some(OverloadedDeref {
+ region,
+ mutbl: mt.mutbl,
+ })
+ } else {
+ None
+ }
+ })
+ } else {
+ None
}
- }
+ }).zip(targets).map(|(autoderef, target)| {
+ Adjustment {
+ kind: Adjust::Deref(autoderef),
+ target
+ }
+ }).collect();
InferOk {
- value: (),
- obligations
+ obligations,
+ value: steps
}
}
+
+ pub fn finalize(self) {
+ let fcx = self.fcx;
+ fcx.register_predicates(self.into_obligations());
+ }
+
+ pub fn into_obligations(self) -> Vec<traits::PredicateObligation<'tcx>> {
+ self.obligations
+ }
}
impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
pub fn try_overloaded_deref(&self,
span: Span,
- base_expr: Option<&hir::Expr>,
base_ty: Ty<'tcx>,
pref: LvaluePreference)
-> Option<InferOk<'tcx, MethodCallee<'tcx>>> {
- let rcvr = base_expr.map(|base_expr| super::AdjustedRcvr {
- rcvr_expr: base_expr, autoderefs: 0, unsize: false
- });
-
- self.try_overloaded_lvalue_op(span, rcvr, base_ty, &[], pref, LvalueOp::Deref)
+ self.try_overloaded_lvalue_op(span, base_ty, &[], pref, LvalueOp::Deref)
}
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use super::{DeferredCallResolution, Expectation, FnCtxt, TupleArgumentsFlag};
+use super::{Expectation, FnCtxt, TupleArgumentsFlag};
+use super::autoderef::Autoderef;
+use super::method::MethodCallee;
use hir::def::Def;
use hir::def_id::{DefId, LOCAL_CRATE};
use rustc::{infer, traits};
use rustc::ty::{self, TyCtxt, LvaluePreference, Ty};
use rustc::ty::subst::Subst;
+use rustc::ty::adjustment::{Adjustment, Adjust, AutoBorrow};
use syntax::abi;
use syntax::symbol::Symbol;
use syntax_pos::Span;
}
enum CallStep<'tcx> {
- Builtin,
+ Builtin(Ty<'tcx>),
DeferredClosure(ty::FnSig<'tcx>),
- Overloaded(ty::MethodCallee<'tcx>),
+ Overloaded(MethodCallee<'tcx>),
}
impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
let expr_ty = self.structurally_resolved_type(call_expr.span, original_callee_ty);
let mut autoderef = self.autoderef(callee_expr.span, expr_ty);
- let result = autoderef.by_ref()
- .flat_map(|(adj_ty, idx)| {
- self.try_overloaded_call_step(call_expr, callee_expr, adj_ty, idx)
- })
- .next();
- let callee_ty = autoderef.unambiguous_final_ty();
- autoderef.finalize(LvaluePreference::NoPreference, callee_expr);
+ let mut result = None;
+ while result.is_none() && autoderef.next().is_some() {
+ result = self.try_overloaded_call_step(call_expr, callee_expr, &autoderef);
+ }
+ autoderef.finalize();
let output = match result {
None => {
self.confirm_builtin_call(call_expr, original_callee_ty, arg_exprs, expected)
}
- Some(CallStep::Builtin) => {
+ Some(CallStep::Builtin(callee_ty)) => {
self.confirm_builtin_call(call_expr, callee_ty, arg_exprs, expected)
}
}
Some(CallStep::Overloaded(method_callee)) => {
- self.confirm_overloaded_call(call_expr,
- callee_expr,
- arg_exprs,
- expected,
- method_callee)
+ self.confirm_overloaded_call(call_expr, arg_exprs, expected, method_callee)
}
};
fn try_overloaded_call_step(&self,
call_expr: &'gcx hir::Expr,
callee_expr: &'gcx hir::Expr,
- adjusted_ty: Ty<'tcx>,
- autoderefs: usize)
+ autoderef: &Autoderef<'a, 'gcx, 'tcx>)
-> Option<CallStep<'tcx>> {
- debug!("try_overloaded_call_step(call_expr={:?}, adjusted_ty={:?}, autoderefs={})",
+ let adjusted_ty = autoderef.unambiguous_final_ty();
+ debug!("try_overloaded_call_step(call_expr={:?}, adjusted_ty={:?})",
call_expr,
- adjusted_ty,
- autoderefs);
+ adjusted_ty);
// If the callee is a bare function or a closure, then we're all set.
- match self.structurally_resolved_type(callee_expr.span, adjusted_ty).sty {
+ match adjusted_ty.sty {
ty::TyFnDef(..) | ty::TyFnPtr(_) => {
- self.apply_autoderef_adjustment(callee_expr.id, autoderefs, adjusted_ty);
- return Some(CallStep::Builtin);
+ let adjustments = autoderef.adjust_steps(LvaluePreference::NoPreference);
+ self.apply_adjustments(callee_expr, adjustments);
+ return Some(CallStep::Builtin(adjusted_ty));
}
ty::TyClosure(def_id, substs) => {
infer::FnCall,
&closure_ty)
.0;
- self.record_deferred_call_resolution(def_id,
- Box::new(CallResolution {
- call_expr: call_expr,
- callee_expr: callee_expr,
- adjusted_ty: adjusted_ty,
- autoderefs: autoderefs,
- fn_sig: fn_sig.clone(),
- closure_def_id: def_id,
- }));
+ let adjustments = autoderef.adjust_steps(LvaluePreference::NoPreference);
+ self.record_deferred_call_resolution(def_id, DeferredCallResolution {
+ call_expr,
+ callee_expr,
+ adjusted_ty,
+ adjustments,
+ fn_sig,
+ closure_def_id: def_id,
+ });
return Some(CallStep::DeferredClosure(fn_sig));
}
}
// over the top. The simplest fix by far is to just ignore
// this case and deref again, so we wind up with
// `FnMut::call_mut(&mut *x, ())`.
- ty::TyRef(..) if autoderefs == 0 => {
+ ty::TyRef(..) if autoderef.step_count() == 0 => {
return None;
}
_ => {}
}
- self.try_overloaded_call_traits(call_expr, callee_expr, adjusted_ty, autoderefs)
- .map(|method_callee| CallStep::Overloaded(method_callee))
+ self.try_overloaded_call_traits(call_expr, adjusted_ty).map(|(autoref, method)| {
+ let mut adjustments = autoderef.adjust_steps(LvaluePreference::NoPreference);
+ adjustments.extend(autoref);
+ self.apply_adjustments(callee_expr, adjustments);
+ CallStep::Overloaded(method)
+ })
}
fn try_overloaded_call_traits(&self,
call_expr: &hir::Expr,
- callee_expr: &hir::Expr,
- adjusted_ty: Ty<'tcx>,
- autoderefs: usize)
- -> Option<ty::MethodCallee<'tcx>> {
+ adjusted_ty: Ty<'tcx>)
+ -> Option<(Option<Adjustment<'tcx>>,
+ MethodCallee<'tcx>)> {
// Try the options that are least restrictive on the caller first.
- for &(opt_trait_def_id, method_name) in
- &[(self.tcx.lang_items.fn_trait(), Symbol::intern("call")),
- (self.tcx.lang_items.fn_mut_trait(), Symbol::intern("call_mut")),
- (self.tcx.lang_items.fn_once_trait(), Symbol::intern("call_once"))] {
+ for &(opt_trait_def_id, method_name, borrow) in
+ &[(self.tcx.lang_items.fn_trait(), Symbol::intern("call"), true),
+ (self.tcx.lang_items.fn_mut_trait(), Symbol::intern("call_mut"), true),
+ (self.tcx.lang_items.fn_once_trait(), Symbol::intern("call_once"), false)] {
let trait_def_id = match opt_trait_def_id {
Some(def_id) => def_id,
None => continue,
};
- match self.lookup_method_in_trait_adjusted(call_expr.span,
- Some(super::AdjustedRcvr {
- rcvr_expr: callee_expr,
- autoderefs,
- unsize: false
- }),
- method_name,
- trait_def_id,
- adjusted_ty,
- None) {
+ match self.lookup_method_in_trait(call_expr.span,
+ method_name,
+ trait_def_id,
+ adjusted_ty,
+ None) {
None => continue,
Some(ok) => {
- let method_callee = self.register_infer_ok_obligations(ok);
- return Some(method_callee);
+ let method = self.register_infer_ok_obligations(ok);
+ let mut autoref = None;
+ if borrow {
+ if let ty::TyRef(region, mt) = method.sig.inputs()[0].sty {
+ autoref = Some(Adjustment {
+ kind: Adjust::Borrow(AutoBorrow::Ref(region, mt.mutbl)),
+ target: method.sig.inputs()[0]
+ });
+ }
+ }
+ return Some((autoref, method));
}
}
}
fn confirm_overloaded_call(&self,
call_expr: &hir::Expr,
- callee_expr: &'gcx hir::Expr,
arg_exprs: &'gcx [hir::Expr],
expected: Expectation<'tcx>,
- method_callee: ty::MethodCallee<'tcx>)
+ method_callee: MethodCallee<'tcx>)
-> Ty<'tcx> {
let output_type = self.check_method_argument_types(call_expr.span,
- method_callee.ty,
- callee_expr,
+ Ok(method_callee),
arg_exprs,
TupleArgumentsFlag::TupleArguments,
expected);
- self.write_overloaded_call_method_map(call_expr, method_callee);
+ self.write_method_call(call_expr.id, method_callee);
output_type
}
-
- fn write_overloaded_call_method_map(&self,
- call_expr: &hir::Expr,
- method_callee: ty::MethodCallee<'tcx>) {
- let method_call = ty::MethodCall::expr(call_expr.id);
- self.tables.borrow_mut().method_map.insert(method_call, method_callee);
- }
}
#[derive(Debug)]
-struct CallResolution<'gcx: 'tcx, 'tcx> {
+pub struct DeferredCallResolution<'gcx: 'tcx, 'tcx> {
call_expr: &'gcx hir::Expr,
callee_expr: &'gcx hir::Expr,
adjusted_ty: Ty<'tcx>,
- autoderefs: usize,
+ adjustments: Vec<Adjustment<'tcx>>,
fn_sig: ty::FnSig<'tcx>,
closure_def_id: DefId,
}
-impl<'gcx, 'tcx> DeferredCallResolution<'gcx, 'tcx> for CallResolution<'gcx, 'tcx> {
- fn resolve<'a>(&mut self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) {
+impl<'a, 'gcx, 'tcx> DeferredCallResolution<'gcx, 'tcx> {
+ pub fn resolve(self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) {
debug!("DeferredCallResolution::resolve() {:?}", self);
// we should not be invoked until the closure kind has been
// We may now know enough to figure out fn vs fnmut etc.
match fcx.try_overloaded_call_traits(self.call_expr,
- self.callee_expr,
- self.adjusted_ty,
- self.autoderefs) {
- Some(method_callee) => {
+ self.adjusted_ty) {
+ Some((autoref, method_callee)) => {
// One problem is that when we get here, we are going
// to have a newly instantiated function signature
// from the call trait. This has to be reconciled with
// can't because of the annoying need for a TypeTrace.
// (This always bites me, should find a way to
// refactor it.)
- let method_sig = fcx.tcx
- .no_late_bound_regions(&method_callee.ty.fn_sig())
- .unwrap();
+ let method_sig = method_callee.sig;
debug!("attempt_resolution: method_callee={:?}", method_callee);
fcx.demand_eqtype(self.call_expr.span, method_sig.output(), self.fn_sig.output());
- fcx.write_overloaded_call_method_map(self.call_expr, method_callee);
+ let mut adjustments = self.adjustments;
+ adjustments.extend(autoref);
+ fcx.apply_adjustments(self.callee_expr, adjustments);
+
+ fcx.write_method_call(self.call_expr.id, method_callee);
}
None => {
span_bug!(self.call_expr.span,
impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
fn type_is_known_to_be_sized(&self, ty: Ty<'tcx>, span: Span) -> bool {
let lang_item = self.tcx.require_lang_item(lang_items::SizedTraitLangItem);
- traits::type_known_to_meet_bound(self, ty, lang_item, span)
+ traits::type_known_to_meet_bound(self, self.param_env, ty, lang_item, span)
}
}
let fn_sig = self.liberate_late_bound_regions(expr_def_id, &sig);
let fn_sig = self.inh.normalize_associated_types_in(body.value.span,
- body.value.id, &fn_sig);
+ body.value.id,
+ self.param_env,
+ &fn_sig);
- check_fn(self, fn_sig, decl, expr.id, body);
+ check_fn(self, self.param_env, fn_sig, decl, expr.id, body);
// Tuple up the arguments and insert the resulting function type into
// the `closures` table.
use rustc::hir;
use rustc::hir::def_id::DefId;
-use rustc::infer::{Coercion, InferResult, InferOk, TypeTrace};
+use rustc::infer::{Coercion, InferResult, InferOk};
use rustc::infer::type_variable::TypeVariableOrigin;
use rustc::traits::{self, ObligationCause, ObligationCauseCode};
use rustc::ty::adjustment::{Adjustment, Adjust, AutoBorrow};
}
}
-type CoerceResult<'tcx> = InferResult<'tcx, Adjustment<'tcx>>;
+type CoerceResult<'tcx> = InferResult<'tcx, (Vec<Adjustment<'tcx>>, Ty<'tcx>)>;
fn coerce_mutbls<'tcx>(from_mutbl: hir::Mutability,
to_mutbl: hir::Mutability)
}
}
-fn identity<'tcx>() -> Adjust<'tcx> {
- Adjust::DerefRef {
- autoderefs: 0,
- autoref: None,
- unsize: false,
- }
+fn identity(_: Ty) -> Vec<Adjustment> { vec![] }
+
+fn simple<'tcx>(kind: Adjust<'tcx>) -> impl FnOnce(Ty<'tcx>) -> Vec<Adjustment<'tcx>> {
+ move |target| vec![Adjustment { kind, target }]
}
-fn success<'tcx>(kind: Adjust<'tcx>,
+fn success<'tcx>(adj: Vec<Adjustment<'tcx>>,
target: Ty<'tcx>,
obligations: traits::PredicateObligations<'tcx>)
-> CoerceResult<'tcx> {
Ok(InferOk {
- value: Adjustment {
- kind,
- target
- },
+ value: (adj, target),
obligations
})
}
fn unify(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> InferResult<'tcx, Ty<'tcx>> {
self.commit_if_ok(|_| {
- let trace = TypeTrace::types(&self.cause, false, a, b);
if self.use_lub {
- self.lub(false, trace, &a, &b)
+ self.at(&self.cause, self.fcx.param_env)
+ .lub(b, a)
} else {
- self.sub(false, trace, &a, &b)
+ self.at(&self.cause, self.fcx.param_env)
+ .sup(b, a)
+ .map(|InferOk { value: (), obligations }| InferOk { value: a, obligations })
}
})
}
/// Unify two types (using sub or lub) and produce a specific coercion.
- fn unify_and(&self, a: Ty<'tcx>, b: Ty<'tcx>, kind: Adjust<'tcx>)
- -> CoerceResult<'tcx> {
+ fn unify_and<F>(&self, a: Ty<'tcx>, b: Ty<'tcx>, f: F)
+ -> CoerceResult<'tcx>
+ where F: FnOnce(Ty<'tcx>) -> Vec<Adjustment<'tcx>>
+ {
self.unify(&a, &b).and_then(|InferOk { value: ty, obligations }| {
- success(kind, ty, obligations)
+ success(f(ty), ty, obligations)
})
}
- fn coerce<E>(&self,
- exprs: &[E],
- a: Ty<'tcx>,
- b: Ty<'tcx>)
- -> CoerceResult<'tcx>
- where E: AsCoercionSite
- {
+ fn coerce(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> CoerceResult<'tcx> {
let a = self.shallow_resolve(a);
debug!("Coerce.tys({:?} => {:?})", a, b);
// Just ignore error types.
if a.references_error() || b.references_error() {
- return success(identity(), b, vec![]);
+ return success(vec![], b, vec![]);
}
if a.is_never() {
// already resolved in some way.
let diverging_ty = self.next_diverging_ty_var(
TypeVariableOrigin::AdjustmentType(self.cause.span));
- self.unify_and(&b, &diverging_ty, Adjust::NeverToAny)
+ self.unify_and(&b, &diverging_ty, simple(Adjust::NeverToAny))
} else {
- success(Adjust::NeverToAny, b, vec![])
+ success(simple(Adjust::NeverToAny)(b), b, vec![])
};
}
}
ty::TyRef(r_b, mt_b) => {
- return self.coerce_borrowed_pointer(exprs, a, b, r_b, mt_b);
+ return self.coerce_borrowed_pointer(a, b, r_b, mt_b);
}
_ => {}
}
_ => {
// Otherwise, just use unification rules.
- self.unify_and(a, b, identity())
+ self.unify_and(a, b, identity)
}
}
}
/// Reborrows `&mut A` to `&mut B` and `&(mut) A` to `&B`.
/// To match `A` with `B`, autoderef will be performed,
/// calling `deref`/`deref_mut` where necessary.
- fn coerce_borrowed_pointer<E>(&self,
- exprs: &[E],
- a: Ty<'tcx>,
- b: Ty<'tcx>,
- r_b: ty::Region<'tcx>,
- mt_b: TypeAndMut<'tcx>)
- -> CoerceResult<'tcx>
- where E: AsCoercionSite
- {
+ fn coerce_borrowed_pointer(&self,
+ a: Ty<'tcx>,
+ b: Ty<'tcx>,
+ r_b: ty::Region<'tcx>,
+ mt_b: TypeAndMut<'tcx>)
+ -> CoerceResult<'tcx> {
debug!("coerce_borrowed_pointer(a={:?}, b={:?})", a, b);
coerce_mutbls(mt_a.mutbl, mt_b.mutbl)?;
(r_a, mt_a)
}
- _ => return self.unify_and(a, b, identity()),
+ _ => return self.unify_and(a, b, identity),
};
let span = self.cause.span;
});
match self.unify(derefd_ty_a, b) {
Ok(ok) => {
- found = Some((ok, autoderefs));
+ found = Some(ok);
break;
}
Err(err) => {
// (e.g., in example above, the failure from relating `Vec<T>`
// to the target type), since that should be the least
// confusing.
- let (InferOk { value: ty, mut obligations }, autoderefs) = match found {
+ let InferOk { value: ty, mut obligations } = match found {
Some(d) => d,
None => {
let err = first_error.expect("coerce_borrowed_pointer had no error");
}
};
- if ty == a && mt_a.mutbl == hir::MutImmutable && autoderefs == 1 {
+ if ty == a && mt_a.mutbl == hir::MutImmutable && autoderef.step_count() == 1 {
// As a special case, if we would produce `&'a *x`, that's
// a total no-op. We end up with the type `&'a T` just as
// we started with. In that case, just skip it
// `self.x`, but we auto-coerce it to `foo(&mut *self.x)`,
// which is a borrow.
assert_eq!(mt_b.mutbl, hir::MutImmutable); // can only coerce &T -> &U
- return success(identity(), ty, obligations);
+ return success(vec![], ty, obligations);
}
+ let pref = LvaluePreference::from_mutbl(mt_b.mutbl);
+ let InferOk { value: mut adjustments, obligations: o }
+ = autoderef.adjust_steps_as_infer_ok(pref);
+ obligations.extend(o);
+ obligations.extend(autoderef.into_obligations());
+
// Now apply the autoref. We have to extract the region out of
// the final ref type we got.
let r_borrow = match ty.sty {
ty::TyRef(r_borrow, _) => r_borrow,
_ => span_bug!(span, "expected a ref type, got {:?}", ty),
};
- let autoref = Some(AutoBorrow::Ref(r_borrow, mt_b.mutbl));
- debug!("coerce_borrowed_pointer: succeeded ty={:?} autoderefs={:?} autoref={:?}",
- ty,
- autoderefs,
- autoref);
+ adjustments.push(Adjustment {
+ kind: Adjust::Borrow(AutoBorrow::Ref(r_borrow, mt_b.mutbl)),
+ target: ty
+ });
- let pref = LvaluePreference::from_mutbl(mt_b.mutbl);
- obligations.extend(autoderef.finalize_as_infer_ok(pref, exprs).obligations);
+ debug!("coerce_borrowed_pointer: succeeded ty={:?} adjustments={:?}",
+ ty,
+ adjustments);
- success(Adjust::DerefRef {
- autoderefs: autoderefs,
- autoref: autoref,
- unsize: false,
- }, ty, obligations)
+ success(adjustments, ty, obligations)
}
// that, at which point we will need extra checks on the target here.
// Handle reborrows before selecting `Source: CoerceUnsized<Target>`.
- let (source, reborrow) = match (&source.sty, &target.sty) {
+ let reborrow = match (&source.sty, &target.sty) {
(&ty::TyRef(_, mt_a), &ty::TyRef(_, mt_b)) => {
coerce_mutbls(mt_a.mutbl, mt_b.mutbl)?;
let coercion = Coercion(self.cause.span);
let r_borrow = self.next_region_var(coercion);
- (mt_a.ty, Some(AutoBorrow::Ref(r_borrow, mt_b.mutbl)))
+ Some((Adjustment {
+ kind: Adjust::Deref(None),
+ target: mt_a.ty
+ }, Adjustment {
+ kind: Adjust::Borrow(AutoBorrow::Ref(r_borrow, mt_b.mutbl)),
+ target: self.tcx.mk_ref(r_borrow, ty::TypeAndMut {
+ mutbl: mt_b.mutbl,
+ ty: mt_a.ty
+ })
+ }))
}
(&ty::TyRef(_, mt_a), &ty::TyRawPtr(mt_b)) => {
coerce_mutbls(mt_a.mutbl, mt_b.mutbl)?;
- (mt_a.ty, Some(AutoBorrow::RawPtr(mt_b.mutbl)))
- }
- _ => (source, None),
- };
- let coerce_source = source.adjust_for_autoref(self.tcx, reborrow);
- let adjust = Adjust::DerefRef {
- autoderefs: if reborrow.is_some() { 1 } else { 0 },
- autoref: reborrow,
- unsize: true,
+ Some((Adjustment {
+ kind: Adjust::Deref(None),
+ target: mt_a.ty
+ }, Adjustment {
+ kind: Adjust::Borrow(AutoBorrow::RawPtr(mt_b.mutbl)),
+ target: self.tcx.mk_ptr(ty::TypeAndMut {
+ mutbl: mt_b.mutbl,
+ ty: mt_a.ty
+ })
+ }))
+ }
+ _ => None,
};
+ let coerce_source = reborrow.as_ref().map_or(source, |&(_, ref r)| r.target);
// Setup either a subtyping or a LUB relationship between
// the `CoerceUnsized` target type and the expected type.
// for the former and let type inference do the rest.
let origin = TypeVariableOrigin::MiscVariable(self.cause.span);
let coerce_target = self.next_ty_var(origin);
- let mut coercion = self.unify_and(coerce_target, target, adjust)?;
+ let mut coercion = self.unify_and(coerce_target, target, |target| {
+ let unsize = Adjustment {
+ kind: Adjust::Unsize,
+ target
+ };
+ match reborrow {
+ None => vec![unsize],
+ Some((ref deref, ref autoref)) => {
+ vec![deref.clone(), autoref.clone(), unsize]
+ }
+ }
+ })?;
let mut selcx = traits::SelectionContext::new(self);
// Create an obligation for `Source: CoerceUnsized<Target>`.
let cause = ObligationCause::misc(self.cause.span, self.body_id);
- queue.push_back(self.tcx
- .predicate_for_trait_def(cause, coerce_unsized_did, 0,
- coerce_source, &[coerce_target]));
+ queue.push_back(self.tcx.predicate_for_trait_def(self.fcx.param_env,
+ cause,
+ coerce_unsized_did,
+ 0,
+ coerce_source,
+ &[coerce_target]));
// Keep resolving `CoerceUnsized` and `Unsize` predicates to avoid
// emitting a coercion in cases like `Foo<$1>` -> `Foo<$2>`, where
Ok(coercion)
}
- fn coerce_from_safe_fn(&self,
- a: Ty<'tcx>,
- fn_ty_a: ty::PolyFnSig<'tcx>,
- b: Ty<'tcx>,
- to_unsafe: Adjust<'tcx>,
- normal: Adjust<'tcx>)
- -> CoerceResult<'tcx> {
+ fn coerce_from_safe_fn<F, G>(&self,
+ a: Ty<'tcx>,
+ fn_ty_a: ty::PolyFnSig<'tcx>,
+ b: Ty<'tcx>,
+ to_unsafe: F,
+ normal: G)
+ -> CoerceResult<'tcx>
+ where F: FnOnce(Ty<'tcx>) -> Vec<Adjustment<'tcx>>,
+ G: FnOnce(Ty<'tcx>) -> Vec<Adjustment<'tcx>>
+ {
if let ty::TyFnPtr(fn_ty_b) = b.sty {
match (fn_ty_a.unsafety(), fn_ty_b.unsafety()) {
(hir::Unsafety::Normal, hir::Unsafety::Unsafe) => {
debug!("coerce_from_fn_pointer(a={:?}, b={:?})", a, b);
self.coerce_from_safe_fn(a, fn_ty_a, b,
- Adjust::UnsafeFnPointer, identity())
+ simple(Adjust::UnsafeFnPointer), identity)
}
fn coerce_from_fn_item(&self,
ty::TyFnPtr(_) => {
let a_fn_pointer = self.tcx.mk_fn_ptr(fn_ty_a);
self.coerce_from_safe_fn(a_fn_pointer, fn_ty_a, b,
- Adjust::ReifyFnPointer, Adjust::ReifyFnPointer)
+ simple(Adjust::ReifyFnPointer), simple(Adjust::ReifyFnPointer))
}
- _ => self.unify_and(a, b, identity()),
+ _ => self.unify_and(a, b, identity),
}
}
let pointer_ty = self.tcx.mk_fn_ptr(converted_sig);
debug!("coerce_closure_to_fn(a={:?}, b={:?}, pty={:?})",
a, b, pointer_ty);
- self.unify_and(pointer_ty, b, Adjust::ClosureFnPointer)
+ self.unify_and(pointer_ty, b, simple(Adjust::ClosureFnPointer))
}
- _ => self.unify_and(a, b, identity()),
+ _ => self.unify_and(a, b, identity),
}
}
ty::TyRef(_, mt) => (true, mt),
ty::TyRawPtr(mt) => (false, mt),
_ => {
- return self.unify_and(a, b, identity());
+ return self.unify_and(a, b, identity);
}
};
// Although references and unsafe ptrs have the same
// representation, we still register an Adjust::DerefRef so that
// regionck knows that the region for `a` must be valid here.
- self.unify_and(a_unsafe, b, if is_ref {
- Adjust::DerefRef {
- autoderefs: 1,
- autoref: Some(AutoBorrow::RawPtr(mutbl_b)),
- unsize: false,
- }
+ if is_ref {
+ self.unify_and(a_unsafe, b, |target| {
+ vec![Adjustment {
+ kind: Adjust::Deref(None),
+ target: mt_a.ty
+ }, Adjustment {
+ kind: Adjust::Borrow(AutoBorrow::RawPtr(mutbl_b)),
+ target
+ }]
+ })
} else if mt_a.mutbl != mutbl_b {
- Adjust::MutToConstPointer
+ self.unify_and(a_unsafe, b, simple(Adjust::MutToConstPointer))
} else {
- identity()
- })
+ self.unify_and(a_unsafe, b, identity)
+ }
}
}
let cause = self.cause(expr.span, ObligationCauseCode::ExprAssignable);
let coerce = Coerce::new(self, cause);
- let ok = self.commit_if_ok(|_| coerce.coerce(&[expr], source, target))?;
-
- let adjustment = self.register_infer_ok_obligations(ok);
- self.apply_adjustment(expr.id, adjustment);
+ let ok = self.commit_if_ok(|_| coerce.coerce(source, target))?;
- // We should now have added sufficient adjustments etc to
- // ensure that the type of expression, post-adjustment, is
- // a subtype of target.
+ let (adjustments, _) = self.register_infer_ok_obligations(ok);
+ self.apply_adjustments(expr, adjustments);
Ok(target)
}
let cause = self.cause(syntax_pos::DUMMY_SP, ObligationCauseCode::ExprAssignable);
let coerce = Coerce::new(self, cause);
- self.probe(|_| coerce.coerce::<hir::Expr>(&[], source, target)).is_ok()
+ self.probe(|_| coerce.coerce(source, target)).is_ok()
}
/// Given some expressions, their known unified type and another expression,
return Ok(prev_ty);
}
- let trace = TypeTrace::types(cause, true, prev_ty, new_ty);
-
// Special-case that coercion alone cannot handle:
// Two function item types of differing IDs or Substs.
match (&prev_ty.sty, &new_ty.sty) {
(&ty::TyFnDef(a_def_id, a_substs, a_fty), &ty::TyFnDef(b_def_id, b_substs, b_fty)) => {
// The signature must always match.
- let fty = self.lub(true, trace.clone(), &a_fty, &b_fty)
+ let fty = self.at(cause, self.param_env)
+ .trace(prev_ty, new_ty)
+ .lub(&a_fty, &b_fty)
.map(|ok| self.register_infer_ok_obligations(ok))?;
if a_def_id == b_def_id {
// Same function, maybe the parameters match.
let substs = self.commit_if_ok(|_| {
- self.lub(true, trace.clone(), &a_substs, &b_substs)
+ self.at(cause, self.param_env)
+ .trace(prev_ty, new_ty)
+ .lub(&a_substs, &b_substs)
.map(|ok| self.register_infer_ok_obligations(ok))
});
for expr in exprs.iter().map(|e| e.as_coercion_site()).chain(Some(new)) {
// The only adjustment that can produce an fn item is
// `NeverToAny`, so this should always be valid.
- self.apply_adjustment(expr.id, Adjustment {
+ self.apply_adjustments(expr, vec![Adjustment {
kind: Adjust::ReifyFnPointer,
target: fn_ptr
- });
+ }]);
}
return Ok(fn_ptr);
}
// but only if the new expression has no coercion already applied to it.
let mut first_error = None;
if !self.tables.borrow().adjustments.contains_key(&new.id) {
- let result = self.commit_if_ok(|_| coerce.coerce(&[new], new_ty, prev_ty));
+ let result = self.commit_if_ok(|_| coerce.coerce(new_ty, prev_ty));
match result {
Ok(ok) => {
- let adjustment = self.register_infer_ok_obligations(ok);
- self.apply_adjustment(new.id, adjustment);
- return Ok(adjustment.target);
+ let (adjustments, target) = self.register_infer_ok_obligations(ok);
+ self.apply_adjustments(new, adjustments);
+ return Ok(target);
}
Err(e) => first_error = Some(e),
}
// previous expressions, other than noop reborrows (ignoring lifetimes).
for expr in exprs {
let expr = expr.as_coercion_site();
- let noop = match self.tables.borrow().adjustments.get(&expr.id).map(|adj| adj.kind) {
- Some(Adjust::DerefRef {
- autoderefs: 1,
- autoref: Some(AutoBorrow::Ref(_, mutbl_adj)),
- unsize: false
- }) => {
+ let noop = match self.tables.borrow().expr_adjustments(expr) {
+ &[
+ Adjustment { kind: Adjust::Deref(_), .. },
+ Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(_, mutbl_adj)), .. }
+ ] => {
match self.node_ty(expr.id).sty {
ty::TyRef(_, mt_orig) => {
// Reborrow that we can safely ignore, because
- // the next adjustment can only be a DerefRef
+ // the next adjustment can only be a Deref
// which will be merged into it.
mutbl_adj == mt_orig.mutbl
}
_ => false,
}
}
- Some(Adjust::NeverToAny) => true,
- Some(_) => false,
- None => true,
+ &[Adjustment { kind: Adjust::NeverToAny, .. }] | &[] => true,
+ _ => false,
};
if !noop {
return self.commit_if_ok(|_| {
- self.lub(true, trace.clone(), &prev_ty, &new_ty)
+ self.at(cause, self.param_env)
+ .lub(prev_ty, new_ty)
.map(|ok| self.register_infer_ok_obligations(ok))
});
}
}
- match self.commit_if_ok(|_| coerce.coerce(&exprs, prev_ty, new_ty)) {
+ match self.commit_if_ok(|_| coerce.coerce(prev_ty, new_ty)) {
Err(_) => {
// Avoid giving strange errors on failed attempts.
if let Some(e) = first_error {
Err(e)
} else {
self.commit_if_ok(|_| {
- self.lub(true, trace, &prev_ty, &new_ty)
+ self.at(cause, self.param_env)
+ .lub(prev_ty, new_ty)
.map(|ok| self.register_infer_ok_obligations(ok))
})
}
}
Ok(ok) => {
- let adjustment = self.register_infer_ok_obligations(ok);
+ let (adjustments, target) = self.register_infer_ok_obligations(ok);
for expr in exprs {
let expr = expr.as_coercion_site();
- self.apply_adjustment(expr.id, adjustment);
+ self.apply_adjustments(expr, adjustments.clone());
}
- Ok(adjustment.target)
+ Ok(target)
}
}
}
// Another example is `break` with no argument expression.
assert!(expression_ty.is_nil());
assert!(expression_ty.is_nil(), "if let hack without unit type");
- fcx.eq_types(label_expression_as_expected, cause, expression_ty, self.merged_ty())
+ fcx.at(cause, fcx.param_env)
+ .eq_exp(label_expression_as_expected, expression_ty, self.merged_ty())
.map(|infer_ok| {
fcx.register_infer_ok_obligations(infer_ok);
expression_ty
// The key step here is to update the caller_bounds's predicates to be
// the new hybrid bounds we computed.
let normalize_cause = traits::ObligationCause::misc(impl_m_span, impl_m_node_id);
- let param_env = ty::ParamEnv::new(tcx.intern_predicates(&hybrid_preds.predicates));
+ let param_env = ty::ParamEnv::new(tcx.intern_predicates(&hybrid_preds.predicates),
+ Reveal::UserFacing);
let param_env = traits::normalize_param_env_or_error(tcx,
impl_m.def_id,
param_env,
normalize_cause.clone());
- tcx.infer_ctxt(param_env, Reveal::UserFacing).enter(|infcx| {
+ tcx.infer_ctxt(()).enter(|infcx| {
let inh = Inherited::new(infcx, impl_m.def_id);
let infcx = &inh.infcx;
debug!("compare_impl_method: caller_bounds={:?}",
- infcx.param_env.caller_bounds);
+ param_env.caller_bounds);
let mut selcx = traits::SelectionContext::new(&infcx);
&ty::Binder(impl_m_own_bounds.predicates));
for predicate in impl_m_own_bounds {
let traits::Normalized { value: predicate, obligations } =
- traits::normalize(&mut selcx, normalize_cause.clone(), &predicate);
+ traits::normalize(&mut selcx, param_env, normalize_cause.clone(), &predicate);
inh.register_predicates(obligations);
- inh.register_predicate(traits::Obligation::new(cause.clone(), predicate));
+ inh.register_predicate(traits::Obligation::new(cause.clone(), param_env, predicate));
}
// We now need to check that the signature of the impl method is
let impl_sig =
inh.normalize_associated_types_in(impl_m_span,
impl_m_node_id,
+ param_env,
&impl_sig);
let impl_fty = tcx.mk_fn_ptr(ty::Binder(impl_sig));
debug!("compare_impl_method: impl_fty={:?}", impl_fty);
let trait_sig =
inh.normalize_associated_types_in(impl_m_span,
impl_m_node_id,
+ param_env,
&trait_sig);
let trait_fty = tcx.mk_fn_ptr(ty::Binder(trait_sig));
debug!("compare_impl_method: trait_fty={:?}", trait_fty);
- let sub_result = infcx.sub_types(false, &cause, impl_fty, trait_fty)
+ let sub_result = infcx.at(&cause, param_env)
+ .sup(trait_fty, impl_fty)
.map(|InferOk { obligations, .. }| {
inh.register_predicates(obligations);
});
trait_fty);
let (impl_err_span, trait_err_span) = extract_spans_for_error_reporting(&infcx,
+ param_env,
&terr,
&cause,
impl_m,
// pass around temporarily.
let region_maps = RegionMaps::new();
let mut free_regions = FreeRegionMap::new();
- free_regions.relate_free_regions_from_predicates(
- &infcx.param_env.caller_bounds);
+ free_regions.relate_free_regions_from_predicates(¶m_env.caller_bounds);
infcx.resolve_regions_and_report_errors(impl_m.def_id, ®ion_maps, &free_regions);
} else {
- let fcx = FnCtxt::new(&inh, impl_m_node_id);
+ let fcx = FnCtxt::new(&inh, param_env, impl_m_node_id);
fcx.regionck_item(impl_m_node_id, impl_m_span, &[]);
}
}
fn extract_spans_for_error_reporting<'a, 'gcx, 'tcx>(infcx: &infer::InferCtxt<'a, 'gcx, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
terr: &TypeError,
cause: &ObligationCause<'tcx>,
impl_m: &ty::AssociatedItem,
impl_iter.zip(trait_iter)
.zip(impl_m_iter)
.zip(trait_m_iter)
- .filter_map(|(((impl_arg_ty, trait_arg_ty), impl_arg), trait_arg)| {
- match infcx.sub_types(true, &cause, trait_arg_ty, impl_arg_ty) {
+ .filter_map(|(((&impl_arg_ty, &trait_arg_ty), impl_arg), trait_arg)| {
+ match infcx.at(&cause, param_env).sub(trait_arg_ty, impl_arg_ty) {
Ok(_) => None,
Err(_) => Some((impl_arg.span, Some(trait_arg.span))),
}
})
.next()
.unwrap_or_else(|| {
- if infcx.sub_types(false, &cause, impl_sig.output(),
- trait_sig.output())
- .is_err() {
- (impl_m_output.span(), Some(trait_m_output.span()))
- } else {
- (cause.span, tcx.hir.span_if_local(trait_m.def_id))
- }
+ if
+ infcx.at(&cause, param_env)
+ .sup(trait_sig.output(), impl_sig.output())
+ .is_err()
+ {
+ (impl_m_output.span(), Some(trait_m_output.span()))
+ } else {
+ (cause.span, tcx.hir.span_if_local(trait_m.def_id))
+ }
})
} else {
(cause.span, tcx.hir.span_if_local(trait_m.def_id))
format!("expected `{}` in impl", self_descr));
if let Some(span) = tcx.hir.span_if_local(trait_m.def_id) {
err.span_label(span, format!("`{}` used in trait", self_descr));
+ } else {
+ err.note_trait_signature(trait_m.name.to_string(),
+ trait_m.signature(&tcx));
}
err.emit();
return Err(ErrorReported);
} else {
format!("{} parameter", trait_number_args)
}));
+ } else {
+ err.note_trait_signature(trait_m.name.to_string(),
+ trait_m.signature(&tcx));
}
err.span_label(impl_span,
format!("expected {}, found {}",
impl_trait_ref: ty::TraitRef<'tcx>) {
debug!("compare_const_impl(impl_trait_ref={:?})", impl_trait_ref);
- tcx.infer_ctxt((), Reveal::UserFacing).enter(|infcx| {
+ tcx.infer_ctxt(()).enter(|infcx| {
+ let param_env = ty::ParamEnv::empty(Reveal::UserFacing);
let inh = Inherited::new(infcx, impl_c.def_id);
let infcx = &inh.infcx;
// There is no "body" here, so just pass dummy id.
let impl_ty = inh.normalize_associated_types_in(impl_c_span,
impl_c_node_id,
+ param_env,
&impl_ty);
debug!("compare_const_impl: impl_ty={:?}", impl_ty);
let trait_ty = inh.normalize_associated_types_in(impl_c_span,
impl_c_node_id,
+ param_env,
&trait_ty);
debug!("compare_const_impl: trait_ty={:?}", trait_ty);
- let err = infcx.sub_types(false, &cause, impl_ty, trait_ty)
- .map(|ok| inh.register_infer_ok_obligations(ok));
+ let err = infcx.at(&cause, param_env)
+ .sup(trait_ty, impl_ty)
+ .map(|ok| inh.register_infer_ok_obligations(ok));
if let Err(terr) = err {
debug!("checking associated const for compatibility: impl ty {:?}, trait ty {:?}",
return;
}
- let fcx = FnCtxt::new(&inh, impl_c_node_id);
+ let fcx = FnCtxt::new(&inh, param_env, impl_c_node_id);
fcx.regionck_item(impl_c_node_id, impl_c_span, &[]);
});
}
use rustc::hir;
use rustc::hir::def::Def;
use rustc::ty::{self, Ty, AssociatedItem};
-use errors::DiagnosticBuilder;
+use errors::{DiagnosticBuilder, CodeMapper};
use super::method::probe;
// Requires that the two types unify, and prints an error message if
// they don't.
pub fn demand_suptype(&self, sp: Span, expected: Ty<'tcx>, actual: Ty<'tcx>) {
- let cause = self.misc(sp);
- match self.sub_types(false, &cause, actual, expected) {
+ let cause = &self.misc(sp);
+ match self.at(cause, self.param_env).sup(expected, actual) {
Ok(InferOk { obligations, value: () }) => {
self.register_predicates(obligations);
},
cause: &ObligationCause<'tcx>,
expected: Ty<'tcx>,
actual: Ty<'tcx>) -> Option<DiagnosticBuilder<'tcx>> {
- match self.eq_types(false, cause, actual, expected) {
+ match self.at(cause, self.param_env).eq(expected, actual) {
Ok(InferOk { obligations, value: () }) => {
self.register_predicates(obligations);
None
checked_ty),
};
if self.can_coerce(ref_ty, expected) {
- if let Ok(src) = self.tcx.sess.codemap().span_to_snippet(expr.span) {
+ // Use the callsite's span if this is a macro call. #41858
+ let sp = self.sess().codemap().call_span_if_macro(expr.span);
+ if let Ok(src) = self.tcx.sess.codemap().span_to_snippet(sp) {
return Some(format!("try with `{}{}`",
match mutability.mutbl {
hir::Mutability::MutMutable => "&mut ",
use rustc::middle::region::{self, RegionMaps};
use rustc::ty::subst::{Subst, Substs};
use rustc::ty::{self, Ty, TyCtxt};
-use rustc::traits::{self, ObligationCause, Reveal};
+use rustc::traits::{self, ObligationCause};
use util::common::ErrorReported;
use util::nodemap::FxHashSet;
// check that the impl type can be made to match the trait type.
- let impl_param_env = tcx.param_env(self_type_did);
- tcx.infer_ctxt(impl_param_env, Reveal::UserFacing).enter(|ref infcx| {
+ tcx.infer_ctxt(()).enter(|ref infcx| {
+ let impl_param_env = tcx.param_env(self_type_did);
let tcx = infcx.tcx;
let mut fulfillment_cx = traits::FulfillmentContext::new();
let fresh_impl_self_ty = drop_impl_ty.subst(tcx, fresh_impl_substs);
let cause = &ObligationCause::misc(drop_impl_span, drop_impl_node_id);
- match infcx.eq_types(true, cause, named_type, fresh_impl_self_ty) {
+ match infcx.at(cause, impl_param_env).eq(named_type, fresh_impl_self_ty) {
Ok(InferOk { obligations, .. }) => {
fulfillment_cx.register_predicate_obligations(infcx, obligations);
}
],
tcx.mk_nil())
}
+ "prefetch_read_data" | "prefetch_write_data" |
+ "prefetch_read_instruction" | "prefetch_write_instruction" => {
+ (1, vec![tcx.mk_ptr(ty::TypeAndMut {
+ ty: param(0),
+ mutbl: hir::MutImmutable
+ }), tcx.types.i32],
+ tcx.mk_nil())
+ }
"drop_in_place" => {
(1, vec![tcx.mk_mut_ptr(param(0))], tcx.mk_nil())
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use super::probe;
+use super::{probe, MethodCallee};
use check::{FnCtxt, LvalueOp, callee};
use hir::def_id::DefId;
use rustc::ty::subst::Substs;
use rustc::traits;
use rustc::ty::{self, LvaluePreference, NoPreference, PreferMutLvalue, Ty};
-use rustc::ty::adjustment::{Adjustment, Adjust, AutoBorrow};
+use rustc::ty::adjustment::{Adjustment, Adjust, AutoBorrow, OverloadedDeref};
use rustc::ty::fold::TypeFoldable;
use rustc::infer::{self, InferOk};
use syntax_pos::Span;
unadjusted_self_ty: Ty<'tcx>,
pick: probe::Pick<'tcx>,
supplied_method_types: Vec<Ty<'tcx>>)
- -> ty::MethodCallee<'tcx> {
+ -> MethodCallee<'tcx> {
debug!("confirm(unadjusted_self_ty={:?}, pick={:?}, supplied_method_types={:?})",
unadjusted_self_ty,
pick,
unadjusted_self_ty: Ty<'tcx>,
pick: probe::Pick<'tcx>,
supplied_method_types: Vec<Ty<'tcx>>)
- -> ty::MethodCallee<'tcx> {
+ -> MethodCallee<'tcx> {
// Adjust the self expression the user provided and obtain the adjusted type.
let self_ty = self.adjust_self_ty(unadjusted_self_ty, &pick);
debug!("all_substs={:?}", all_substs);
// Create the final signature for the method, replacing late-bound regions.
- let (method_ty, method_predicates) = self.instantiate_method_sig(&pick, all_substs);
+ let (method_sig, method_predicates) = self.instantiate_method_sig(&pick, all_substs);
// Unify the (adjusted) self type with what the method expects.
- self.unify_receivers(self_ty, method_ty.fn_sig().input(0).skip_binder());
+ self.unify_receivers(self_ty, method_sig.inputs()[0]);
// Add any trait/regions obligations specified on the method's type parameters.
+ let method_ty = self.tcx.mk_fn_ptr(ty::Binder(method_sig));
self.add_obligations(method_ty, all_substs, &method_predicates);
// Create the final `MethodCallee`.
- let callee = ty::MethodCallee {
+ let callee = MethodCallee {
def_id: pick.item.def_id,
- ty: method_ty,
substs: all_substs,
+ sig: method_sig,
};
if let Some(hir::MutMutable) = pick.autoref {
unadjusted_self_ty: Ty<'tcx>,
pick: &probe::Pick<'tcx>)
-> Ty<'tcx> {
- let autoref = if let Some(mutbl) = pick.autoref {
+ // Commit the autoderefs by calling `autoderef` again, but this
+ // time writing the results into the various tables.
+ let mut autoderef = self.autoderef(self.span, unadjusted_self_ty);
+ let (_, n) = autoderef.nth(pick.autoderefs).unwrap();
+ assert_eq!(n, pick.autoderefs);
+
+ let mut adjustments = autoderef.adjust_steps(LvaluePreference::NoPreference);
+
+ let mut target = autoderef.unambiguous_final_ty();
+
+ if let Some(mutbl) = pick.autoref {
let region = self.next_region_var(infer::Autoref(self.span));
- Some(AutoBorrow::Ref(region, mutbl))
+ target = self.tcx.mk_ref(region, ty::TypeAndMut {
+ mutbl,
+ ty: target
+ });
+ adjustments.push(Adjustment {
+ kind: Adjust::Borrow(AutoBorrow::Ref(region, mutbl)),
+ target
+ });
+
+ if let Some(unsize_target) = pick.unsize {
+ target = self.tcx.mk_ref(region, ty::TypeAndMut {
+ mutbl,
+ ty: unsize_target
+ });
+ adjustments.push(Adjustment {
+ kind: Adjust::Unsize,
+ target
+ });
+ }
} else {
// No unsizing should be performed without autoref (at
// least during method dispach). This is because we
// currently only unsize `[T;N]` to `[T]`, and naturally
// that must occur being a reference.
assert!(pick.unsize.is_none());
- None
- };
-
-
- // Commit the autoderefs by calling `autoderef` again, but this
- // time writing the results into the various tables.
- let mut autoderef = self.autoderef(self.span, unadjusted_self_ty);
- let (autoderefd_ty, n) = autoderef.nth(pick.autoderefs).unwrap();
- assert_eq!(n, pick.autoderefs);
-
- autoderef.unambiguous_final_ty();
- autoderef.finalize(LvaluePreference::NoPreference, self.self_expr);
+ }
- let target = pick.unsize.unwrap_or(autoderefd_ty);
- let target = target.adjust_for_autoref(self.tcx, autoref);
+ autoderef.finalize();
- // Write out the final adjustment.
- self.apply_adjustment(self.self_expr.id, Adjustment {
- kind: Adjust::DerefRef {
- autoderefs: pick.autoderefs,
- autoref: autoref,
- unsize: pick.unsize.is_some(),
- },
- target: target
- });
+ // Write out the final adjustments.
+ self.apply_adjustments(self.self_expr, adjustments);
target
}
}
fn unify_receivers(&mut self, self_ty: Ty<'tcx>, method_self_ty: Ty<'tcx>) {
- match self.sub_types(false, &self.misc(self.span), self_ty, method_self_ty) {
+ match self.at(&self.misc(self.span), self.param_env).sup(method_self_ty, self_ty) {
Ok(InferOk { obligations, value: () }) => {
self.register_predicates(obligations);
}
fn instantiate_method_sig(&mut self,
pick: &probe::Pick<'tcx>,
all_substs: &'tcx Substs<'tcx>)
- -> (Ty<'tcx>, ty::InstantiatedPredicates<'tcx>) {
+ -> (ty::FnSig<'tcx>, ty::InstantiatedPredicates<'tcx>) {
debug!("instantiate_method_sig(pick={:?}, all_substs={:?})",
pick,
all_substs);
let method_sig = self.instantiate_type_scheme(self.span, all_substs, &method_sig);
debug!("type scheme substituted, method_sig={:?}", method_sig);
- (self.tcx.mk_fn_def(def_id, all_substs, ty::Binder(method_sig)),
- method_predicates)
+ (method_sig, method_predicates)
}
fn add_obligations(&mut self,
// Fix up the autoderefs. Autorefs can only occur immediately preceding
// overloaded lvalue ops, and will be fixed by them in order to get
// the correct region.
- let autoderefs = match self.tables.borrow().adjustments.get(&expr.id) {
- Some(&Adjustment { kind: Adjust::DerefRef { autoderefs, .. }, .. }) => autoderefs,
- Some(_) | None => 0
- };
-
- if autoderefs > 0 {
- let mut autoderef = self.autoderef(expr.span, self.node_ty(expr.id));
- autoderef.nth(autoderefs).unwrap_or_else(|| {
- span_bug!(expr.span,
- "expr was deref-able {} times but now isn't?",
- autoderefs);
- });
- autoderef.finalize(PreferMutLvalue, expr);
+ let mut source = self.node_ty(expr.id);
+ if let Some(adjustments) = self.tables.borrow_mut().adjustments.get_mut(&expr.id) {
+ let pref = LvaluePreference::PreferMutLvalue;
+ for adjustment in adjustments {
+ if let Adjust::Deref(Some(ref mut deref)) = adjustment.kind {
+ if let Some(ok) = self.try_overloaded_deref(expr.span, source, pref) {
+ let method = self.register_infer_ok_obligations(ok);
+ if let ty::TyRef(region, mt) = method.sig.output().sty {
+ *deref = OverloadedDeref {
+ region,
+ mutbl: mt.mutbl
+ };
+ }
+ }
+ }
+ source = adjustment.target;
+ }
}
match expr.node {
{
debug!("convert_lvalue_op_to_mutable({:?}, {:?}, {:?}, {:?})",
op, expr, base_expr, arg_tys);
- let method_call = ty::MethodCall::expr(expr.id);
- if !self.tables.borrow().method_map.contains_key(&method_call) {
+ if !self.tables.borrow().is_method_call(expr) {
debug!("convert_lvalue_op_to_mutable - builtin, nothing to do");
return
}
- let base_ty = self.tables.borrow().adjustments.get(&base_expr.id)
+ let base_ty = self.tables.borrow().expr_adjustments(base_expr).last()
.map_or_else(|| self.node_ty(expr.id), |adj| adj.target);
let base_ty = self.resolve_type_vars_if_possible(&base_ty);
.ty;
let method = self.try_overloaded_lvalue_op(
- expr.span, None, base_ty, arg_tys, PreferMutLvalue, op);
- let ok = match method {
- Some(method) => method,
+ expr.span, base_ty, arg_tys, PreferMutLvalue, op);
+ let method = match method {
+ Some(ok) => self.register_infer_ok_obligations(ok),
None => return self.tcx.sess.delay_span_bug(expr.span, "re-trying op failed")
};
- let method = self.register_infer_ok_obligations(ok);
debug!("convert_lvalue_op_to_mutable: method={:?}", method);
- self.tables.borrow_mut().method_map.insert(method_call, method);
+ self.write_method_call(expr.id, method);
+
+ let (region, mutbl) = if let ty::TyRef(r, mt) = method.sig.inputs()[0].sty {
+ (r, mt.mutbl)
+ } else {
+ span_bug!(expr.span, "input to lvalue op is not a ref?");
+ };
// Convert the autoref in the base expr to mutable with the correct
// region and mutability.
- if let Some(&mut Adjustment {
- ref mut target, kind: Adjust::DerefRef {
- autoref: Some(AutoBorrow::Ref(ref mut r, ref mut mutbl)), ..
+ let base_expr_ty = self.node_ty(base_expr.id);
+ if let Some(adjustments) = self.tables.borrow_mut().adjustments.get_mut(&base_expr.id) {
+ let mut source = base_expr_ty;
+ for adjustment in &mut adjustments[..] {
+ if let Adjust::Borrow(AutoBorrow::Ref(..)) = adjustment.kind {
+ debug!("convert_lvalue_op_to_mutable: converting autoref {:?}", adjustment);
+ adjustment.kind = Adjust::Borrow(AutoBorrow::Ref(region, mutbl));
+ adjustment.target = self.tcx.mk_ref(region, ty::TypeAndMut {
+ ty: source,
+ mutbl
+ });
+ }
+ source = adjustment.target;
}
- }) = self.tables.borrow_mut().adjustments.get_mut(&base_expr.id) {
- debug!("convert_lvalue_op_to_mutable: converting autoref of {:?}", target);
-
- // extract method return type, which will be &mut T;
- // all LB regions should have been instantiated during method lookup
- let method_sig = self.tcx.no_late_bound_regions(&method.ty.fn_sig()).unwrap();
- *target = method_sig.inputs()[0];
- if let ty::TyRef(r_, mt) = target.sty {
- *r = r_;
- *mutbl = mt.mutbl;
- } else {
- span_bug!(expr.span, "input to lvalue op is not a ref?");
+ // If we have an autoref followed by unsizing at the end, fix the unsize target.
+ match adjustments[..] {
+ [.., Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(..)), .. },
+ Adjustment { kind: Adjust::Unsize, ref mut target }] => {
+ *target = method.sig.inputs()[0];
+ }
+ _ => {}
}
}
}
//! Method lookup: the secret sauce of Rust. See `README.md`.
-use check::{FnCtxt, AdjustedRcvr};
+use check::FnCtxt;
use hir::def::Def;
use hir::def_id::DefId;
use rustc::ty::subst::Substs;
use rustc::traits;
use rustc::ty::{self, ToPredicate, ToPolyTraitRef, TraitRef, TypeFoldable};
-use rustc::ty::adjustment::{Adjustment, Adjust, AutoBorrow};
use rustc::ty::subst::Subst;
use rustc::infer::{self, InferOk};
use self::probe::IsSuggestion;
+#[derive(Clone, Copy, Debug)]
+pub struct MethodCallee<'tcx> {
+ /// Impl method ID, for inherent methods, or trait method ID, otherwise.
+ pub def_id: DefId,
+ pub substs: &'tcx Substs<'tcx>,
+
+ /// Instantiated method signature, i.e. it has been
+ /// substituted, normalized, and has had late-bound
+ /// lifetimes replaced with inference variables.
+ pub sig: ty::FnSig<'tcx>,
+}
+
pub enum MethodError<'tcx> {
// Did not find an applicable method, but we did find various near-misses that may work.
NoMatch(NoMatchData<'tcx>),
supplied_method_types: Vec<ty::Ty<'tcx>>,
call_expr: &'gcx hir::Expr,
self_expr: &'gcx hir::Expr)
- -> Result<ty::MethodCallee<'tcx>, MethodError<'tcx>> {
+ -> Result<MethodCallee<'tcx>, MethodError<'tcx>> {
debug!("lookup(method_name={}, self_ty={:?}, call_expr={:?}, self_expr={:?})",
method_name,
self_ty,
supplied_method_types))
}
- /// `lookup_in_trait_adjusted` is used for overloaded operators.
+ /// `lookup_method_in_trait` is used for overloaded operators.
/// It does a very narrow slice of what the normal probe/confirm path does.
/// In particular, it doesn't really do any probing: it simply constructs
/// an obligation for aparticular trait with the given self-type and checks
/// whether that trait is implemented.
///
/// FIXME(#18741) -- It seems likely that we can consolidate some of this
- /// code with the other method-lookup code. In particular, autoderef on
- /// index is basically identical to autoderef with normal probes, except
- /// that the test also looks for built-in indexing. Also, the second half of
- /// this method is basically the same as confirmation.
- pub fn lookup_method_in_trait_adjusted(&self,
- span: Span,
- self_info: Option<AdjustedRcvr>,
- m_name: ast::Name,
- trait_def_id: DefId,
- self_ty: ty::Ty<'tcx>,
- opt_input_types: Option<Vec<ty::Ty<'tcx>>>)
- -> Option<InferOk<'tcx, ty::MethodCallee<'tcx>>> {
- debug!("lookup_in_trait_adjusted(self_ty={:?}, self_info={:?}, \
+ /// code with the other method-lookup code. In particular, the second half
+ /// of this method is basically the same as confirmation.
+ pub fn lookup_method_in_trait(&self,
+ span: Span,
+ m_name: ast::Name,
+ trait_def_id: DefId,
+ self_ty: ty::Ty<'tcx>,
+ opt_input_types: Option<&[ty::Ty<'tcx>]>)
+ -> Option<InferOk<'tcx, MethodCallee<'tcx>>> {
+ debug!("lookup_in_trait_adjusted(self_ty={:?}, \
m_name={}, trait_def_id={:?})",
self_ty,
- self_info,
m_name,
trait_def_id);
// Construct an obligation
let poly_trait_ref = trait_ref.to_poly_trait_ref();
let obligation =
- traits::Obligation::misc(span, self.body_id, poly_trait_ref.to_predicate());
+ traits::Obligation::misc(span,
+ self.body_id,
+ self.param_env,
+ poly_trait_ref.to_predicate());
// Now we want to know if this can be matched
let mut selcx = traits::SelectionContext::new(self);
// NB: Instantiate late-bound regions first so that
// `instantiate_type_scheme` can normalize associated types that
// may reference those regions.
- let original_method_ty = tcx.type_of(def_id);
- let fn_sig = original_method_ty.fn_sig();
+ let fn_sig = tcx.type_of(def_id).fn_sig();
let fn_sig = self.replace_late_bound_regions_with_fresh_var(span,
infer::FnCall,
&fn_sig).0;
value
}
};
- let transformed_self_ty = fn_sig.inputs()[0];
- let method_ty = tcx.mk_fn_def(def_id, substs, ty::Binder(fn_sig));
-
- debug!("lookup_in_trait_adjusted: matched method method_ty={:?} obligation={:?}",
- method_ty,
- obligation);
// Register obligations for the parameters. This will include the
// `Self` parameter, which in turn has a bound of the main trait,
assert!(!bounds.has_escaping_regions());
let cause = traits::ObligationCause::misc(span, self.body_id);
- obligations.extend(traits::predicates_for_generics(cause.clone(), &bounds));
+ obligations.extend(traits::predicates_for_generics(cause.clone(),
+ self.param_env,
+ &bounds));
// Also add an obligation for the method type being well-formed.
- obligations.push(traits::Obligation::new(cause, ty::Predicate::WellFormed(method_ty)));
-
- // Insert any adjustments needed (always an autoref of some mutability).
- if let Some(AdjustedRcvr { rcvr_expr, autoderefs, unsize }) = self_info {
- debug!("lookup_in_trait_adjusted: inserting adjustment if needed \
- (self-id={}, autoderefs={}, unsize={}, fty={:?})",
- rcvr_expr.id, autoderefs, unsize, original_method_ty);
-
- let original_sig = original_method_ty.fn_sig();
- let autoref = match (&original_sig.input(0).skip_binder().sty,
- &transformed_self_ty.sty) {
- (&ty::TyRef(..), &ty::TyRef(region, ty::TypeAndMut { mutbl, ty: _ })) => {
- // Trait method is fn(&self) or fn(&mut self), need an
- // autoref. Pull the region etc out of the type of first argument.
- Some(AutoBorrow::Ref(region, mutbl))
- }
- _ => {
- // Trait method is fn(self), no transformation needed.
- assert!(!unsize);
- None
- }
- };
-
- self.apply_adjustment(rcvr_expr.id, Adjustment {
- kind: Adjust::DerefRef {
- autoderefs: autoderefs,
- autoref: autoref,
- unsize: unsize
- },
- target: transformed_self_ty
- });
- }
+ let method_ty = tcx.mk_fn_ptr(ty::Binder(fn_sig));
+ debug!("lookup_in_trait_adjusted: matched method method_ty={:?} obligation={:?}",
+ method_ty,
+ obligation);
+ obligations.push(traits::Obligation::new(cause,
+ self.param_env,
+ ty::Predicate::WellFormed(method_ty)));
- let callee = ty::MethodCallee {
+ let callee = MethodCallee {
def_id: def_id,
- ty: method_ty,
substs: trait_ref.substs,
+ sig: fn_sig,
};
debug!("callee = {:?}", callee);
let cause = traits::ObligationCause::misc(self.span, self.body_id);
let mut selcx = &mut traits::SelectionContext::new(self.fcx);
let traits::Normalized { value: xform_self_ty, obligations } =
- traits::normalize(selcx, cause, &xform_self_ty);
+ traits::normalize(selcx, self.param_env, cause, &xform_self_ty);
debug!("assemble_inherent_impl_probe: xform_self_ty = {:?}",
xform_self_ty);
let output = fty.output().subst(self.tcx, substs);
let (output, _) = self.replace_late_bound_regions_with_fresh_var(
self.span, infer::FnCall, &output);
- self.can_sub_types(output, expected).is_ok()
+ self.can_sub(self.param_env, output, expected).is_ok()
})
}
_ => false,
let cause = traits::ObligationCause::misc(self.span, self.body_id);
let mut selcx = &mut traits::SelectionContext::new(self.fcx);
let traits::Normalized { value: xform_self_ty, obligations } =
- traits::normalize(selcx, cause, &xform_self_ty);
+ traits::normalize(selcx, self.param_env, cause, &xform_self_ty);
debug!("xform_self_ty={:?}", xform_self_ty);
substs,
bound);
- if self.can_equate(&step.self_ty, &bound.self_ty()).is_ok() {
+ if self.can_eq(self.param_env, step.self_ty, bound.self_ty()).is_ok() {
let xform_self_ty = self.xform_self_ty(&item, bound.self_ty(), bound.substs);
debug!("assemble_projection_candidates: bound={:?} xform_self_ty={:?}",
self.probe(|_| {
// First check that the self type can be related.
- let sub_obligations = match self.sub_types(false,
- &ObligationCause::dummy(),
- self_ty,
- probe.xform_self_ty) {
+ let sub_obligations = match self.at(&ObligationCause::dummy(), self.param_env)
+ .sup(probe.xform_self_ty, self_ty) {
Ok(InferOk { obligations, value: () }) => obligations,
Err(_) => {
debug!("--> cannot relate self-types");
let impl_bounds = self.tcx.predicates_of(impl_def_id);
let impl_bounds = impl_bounds.instantiate(self.tcx, substs);
let traits::Normalized { value: impl_bounds, obligations: norm_obligations } =
- traits::normalize(selcx, cause.clone(), &impl_bounds);
+ traits::normalize(selcx, self.param_env, cause.clone(), &impl_bounds);
// Convert the bounds into obligations.
- let obligations = traits::predicates_for_generics(cause.clone(), &impl_bounds);
+ let obligations = traits::predicates_for_generics(cause.clone(),
+ self.param_env,
+ &impl_bounds);
debug!("impl_obligations={:?}", obligations);
// Evaluate those obligations to see if they might possibly hold.
let trait_ref = ty::TraitRef::new(fn_once, fn_once_substs);
let poly_trait_ref = trait_ref.to_poly_trait_ref();
let obligation =
- Obligation::misc(span, self.body_id, poly_trait_ref.to_predicate());
+ Obligation::misc(span,
+ self.body_id,
+ self.param_env,
+ poly_trait_ref.to_predicate());
SelectionContext::new(self).evaluate_obligation(&obligation)
})
})
if mode == Mode::MethodCall {
"method"
} else {
- "associated item"
+ match item_name.as_str().chars().next() {
+ Some(name) => {
+ if name.is_lowercase() {
+ "function or associated item"
+ } else {
+ "associated item"
+ }
+ },
+ None => {
+ ""
+ },
+ }
},
item_name,
self.ty_to_string(actual))
macro_rules! report_function {
($span:expr, $name:expr) => {
err.note(&format!("{} is a function, perhaps you wish to call it",
- $name));
+ $name));
}
}
let mut candidates = valid_out_of_scope_traits;
candidates.sort();
candidates.dedup();
- let msg = format!("items from traits can only be used if the trait is in scope; the \
- following {traits_are} implemented but not in scope, perhaps add \
- a `use` for {one_of_them}:",
+ err.help("items from traits can only be used if the trait is in scope");
+ let mut msg = format!("the following {traits_are} implemented but not in scope, \
+ perhaps add a `use` for {one_of_them}:",
traits_are = if candidates.len() == 1 {
"trait is"
} else {
"one of them"
});
- err.help(&msg[..]);
-
let limit = if candidates.len() == 5 { 5 } else { 4 };
for (i, trait_did) in candidates.iter().take(limit).enumerate() {
- err.help(&format!("candidate #{}: `use {};`",
- i + 1,
- self.tcx.item_path_str(*trait_did)));
+ msg.push_str(&format!("\ncandidate #{}: `use {};`",
+ i + 1,
+ self.tcx.item_path_str(*trait_did)));
}
if candidates.len() > limit {
- err.note(&format!("and {} others", candidates.len() - limit));
+ msg.push_str(&format!("\nand {} others", candidates.len() - limit));
}
+ err.note(&msg[..]);
+
return;
}
// FIXME #21673 this help message could be tuned to the case
// of a type parameter: suggest adding a trait bound rather
// than implementing.
- let msg = format!("items from traits can only be used if the trait is implemented \
- and in scope; the following {traits_define} an item `{name}`, \
- perhaps you need to implement {one_of_them}:",
- traits_define = if candidates.len() == 1 {
- "trait defines"
- } else {
- "traits define"
- },
- one_of_them = if candidates.len() == 1 {
- "it"
- } else {
- "one of them"
- },
- name = item_name);
-
- err.help(&msg[..]);
+ err.help("items from traits can only be used if the trait is implemented and in scope");
+ let mut msg = format!("the following {traits_define} an item `{name}`, \
+ perhaps you need to implement {one_of_them}:",
+ traits_define = if candidates.len() == 1 {
+ "trait defines"
+ } else {
+ "traits define"
+ },
+ one_of_them = if candidates.len() == 1 {
+ "it"
+ } else {
+ "one of them"
+ },
+ name = item_name);
for (i, trait_info) in candidates.iter().enumerate() {
- err.help(&format!("candidate #{}: `{}`",
- i + 1,
- self.tcx.item_path_str(trait_info.def_id)));
+ msg.push_str(&format!("\ncandidate #{}: `{}`",
+ i + 1,
+ self.tcx.item_path_str(trait_info.def_id)));
}
+ err.note(&msg[..]);
}
}
While type checking a function, the intermediate types for the
expressions, blocks, and so forth contained within the function are
-stored in `fcx.node_types` and `fcx.item_substs`. These types
+stored in `fcx.node_types` and `fcx.node_substs`. These types
may contain unresolved type variables. After type checking is
complete, the functions in the writeback module are used to take the
types from this table, resolve them, and then write them into their
*/
pub use self::Expectation::*;
+use self::autoderef::Autoderef;
+use self::callee::DeferredCallResolution;
use self::coercion::{CoerceMany, DynamicCoerceMany};
pub use self::compare_method::{compare_impl_method, compare_const_impl};
+use self::method::MethodCallee;
use self::TupleArgumentsFlag::*;
use astconv::AstConv;
use rustc::infer::type_variable::{TypeVariableOrigin};
use rustc::middle::region::CodeExtent;
use rustc::ty::subst::{Kind, Subst, Substs};
-use rustc::traits::{self, FulfillmentContext, ObligationCause, ObligationCauseCode, Reveal};
+use rustc::traits::{self, FulfillmentContext, ObligationCause, ObligationCauseCode};
use rustc::ty::{ParamTy, LvaluePreference, NoPreference, PreferMutLvalue};
use rustc::ty::{self, Ty, TyCtxt, Visibility};
-use rustc::ty::{MethodCall, MethodCallee};
use rustc::ty::adjustment::{Adjust, Adjustment, AutoBorrow};
use rustc::ty::fold::{BottomUpFolder, TypeFoldable};
use rustc::ty::maps::Providers;
// decision. We keep these deferred resolutions grouped by the
// def-id of the closure, so that once we decide, we can easily go
// back and process them.
- deferred_call_resolutions: RefCell<DefIdMap<Vec<DeferredCallResolutionHandler<'gcx, 'tcx>>>>,
+ deferred_call_resolutions: RefCell<DefIdMap<Vec<DeferredCallResolution<'gcx, 'tcx>>>>,
deferred_cast_checks: RefCell<Vec<cast::CastCheck<'tcx>>>,
}
}
-trait DeferredCallResolution<'gcx, 'tcx> {
- fn resolve<'a>(&mut self, fcx: &FnCtxt<'a, 'gcx, 'tcx>);
-}
-
-type DeferredCallResolutionHandler<'gcx, 'tcx> = Box<DeferredCallResolution<'gcx, 'tcx>+'tcx>;
-
/// When type-checking an expression, we propagate downward
/// whatever type hint we are able in the form of an `Expectation`.
#[derive(Copy, Clone, Debug)]
Index
}
-#[derive(Copy, Clone, Debug)]
-pub struct AdjustedRcvr<'a> {
- pub rcvr_expr: &'a hir::Expr,
- pub autoderefs: usize,
- pub unsize: bool
-}
-
/// Tracks whether executing a node may exit normally (versus
/// return/break/panic, which "diverge", leaving dead code in their
/// wake). Tracked semi-automatically (through type variables marked
pub struct FnCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
body_id: ast::NodeId,
+ /// The parameter environment used for proving trait obligations
+ /// in this function. This can change when we descend into
+ /// closures (as they bring new things into scope), hence it is
+ /// not part of `Inherited` (as of the time of this writing,
+ /// closures do not yet change the environment, but they will
+ /// eventually).
+ param_env: ty::ParamEnv<'tcx>,
+
// Number of errors that had been reported when we started
// checking this function. On exit, if we find that *more* errors
// have been reported, we will skip regionck and other work that
pub fn build(tcx: TyCtxt<'a, 'gcx, 'gcx>, def_id: DefId)
-> InheritedBuilder<'a, 'gcx, 'tcx> {
let tables = ty::TypeckTables::empty();
- let param_env = tcx.param_env(def_id);
InheritedBuilder {
- infcx: tcx.infer_ctxt((tables, param_env), Reveal::UserFacing),
+ infcx: tcx.infer_ctxt(tables),
def_id,
}
}
fn normalize_associated_types_in<T>(&self,
span: Span,
body_id: ast::NodeId,
+ param_env: ty::ParamEnv<'tcx>,
value: &T) -> T
where T : TypeFoldable<'tcx>
{
- let ok = self.normalize_associated_types_in_as_infer_ok(span, body_id, value);
+ let ok = self.normalize_associated_types_in_as_infer_ok(span, body_id, param_env, value);
self.register_infer_ok_obligations(ok)
}
fn normalize_associated_types_in_as_infer_ok<T>(&self,
span: Span,
body_id: ast::NodeId,
+ param_env: ty::ParamEnv<'tcx>,
value: &T)
-> InferOk<'tcx, T>
where T : TypeFoldable<'tcx>
let mut selcx = traits::SelectionContext::new(self);
let cause = ObligationCause::misc(span, body_id);
let traits::Normalized { value, obligations } =
- traits::normalize(&mut selcx, cause, value);
+ traits::normalize(&mut selcx, param_env, cause, value);
debug!("normalize_associated_types_in: result={:?} predicates={:?}",
value,
obligations);
let body = tcx.hir.body(body_id);
Inherited::build(tcx, def_id).enter(|inh| {
+ let param_env = tcx.param_env(def_id);
let fcx = if let Some(decl) = fn_decl {
let fn_sig = tcx.type_of(def_id).fn_sig();
let fn_sig =
inh.liberate_late_bound_regions(def_id, &fn_sig);
let fn_sig =
- inh.normalize_associated_types_in(body.value.span, body_id.node_id, &fn_sig);
+ inh.normalize_associated_types_in(body.value.span,
+ body_id.node_id,
+ param_env,
+ &fn_sig);
- check_fn(&inh, fn_sig, decl, id, body)
+ check_fn(&inh, param_env, fn_sig, decl, id, body)
} else {
- let fcx = FnCtxt::new(&inh, body.value.id);
+ let fcx = FnCtxt::new(&inh, param_env, body.value.id);
let expected_type = tcx.type_of(def_id);
let expected_type = fcx.normalize_associated_types_in(body.value.span, &expected_type);
fcx.require_type_is_sized(expected_type, body.value.span, traits::ConstSized);
/// * ...
/// * inherited: other fields inherited from the enclosing fn (if any)
fn check_fn<'a, 'gcx, 'tcx>(inherited: &'a Inherited<'a, 'gcx, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
fn_sig: ty::FnSig<'tcx>,
decl: &'gcx hir::FnDecl,
fn_id: ast::NodeId,
{
let mut fn_sig = fn_sig.clone();
- debug!("check_fn(sig={:?}, fn_id={})", fn_sig, fn_id);
+ debug!("check_fn(sig={:?}, fn_id={}, param_env={:?})", fn_sig, fn_id, param_env);
// Create the function context. This is either derived from scratch or,
// in the case of function expressions, based on the outer context.
- let mut fcx = FnCtxt::new(inherited, body.value.id);
+ let mut fcx = FnCtxt::new(inherited, param_env, body.value.id);
*fcx.ps.borrow_mut() = UnsafetyState::function(fn_sig.unsafety, fn_id);
let ret_ty = fn_sig.output();
}
}
- let signature = |item: &ty::AssociatedItem| {
- match item.kind {
- ty::AssociatedKind::Method => {
- format!("{}", tcx.type_of(item.def_id).fn_sig().0)
- }
- ty::AssociatedKind::Type => format!("type {};", item.name.to_string()),
- ty::AssociatedKind::Const => {
- format!("const {}: {:?};", item.name.to_string(), tcx.type_of(item.def_id))
- }
- }
- };
-
if !missing_items.is_empty() {
let mut err = struct_span_err!(tcx.sess, impl_span, E0046,
"not all trait items implemented, missing: `{}`",
if let Some(span) = tcx.hir.span_if_local(trait_item.def_id) {
err.span_label(span, format!("`{}` from trait", trait_item.name));
} else {
- err.note(&format!("`{}` from trait: `{}`",
- trait_item.name,
- signature(&trait_item)));
+ err.note_trait_signature(trait_item.name.to_string(),
+ trait_item.signature(&tcx));
}
}
err.emit();
impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
pub fn new(inh: &'a Inherited<'a, 'gcx, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
body_id: ast::NodeId)
-> FnCtxt<'a, 'gcx, 'tcx> {
FnCtxt {
body_id: body_id,
+ param_env,
err_count_on_creation: inh.tcx.sess.err_count(),
ret_coercion: None,
ps: RefCell::new(UnsafetyState::function(hir::Unsafety::Normal,
fn record_deferred_call_resolution(&self,
closure_def_id: DefId,
- r: DeferredCallResolutionHandler<'gcx, 'tcx>) {
+ r: DeferredCallResolution<'gcx, 'tcx>) {
let mut deferred_call_resolutions = self.deferred_call_resolutions.borrow_mut();
deferred_call_resolutions.entry(closure_def_id).or_insert(vec![]).push(r);
}
fn remove_deferred_call_resolutions(&self,
closure_def_id: DefId)
- -> Vec<DeferredCallResolutionHandler<'gcx, 'tcx>>
+ -> Vec<DeferredCallResolution<'gcx, 'tcx>>
{
let mut deferred_call_resolutions = self.deferred_call_resolutions.borrow_mut();
- deferred_call_resolutions.remove(&closure_def_id).unwrap_or(Vec::new())
+ deferred_call_resolutions.remove(&closure_def_id).unwrap_or(vec![])
}
pub fn tag(&self) -> String {
}
}
- pub fn write_substs(&self, node_id: ast::NodeId, substs: ty::ItemSubsts<'tcx>) {
- if !substs.substs.is_noop() {
+ pub fn write_method_call(&self, node_id: ast::NodeId, method: MethodCallee<'tcx>) {
+ self.tables.borrow_mut().type_dependent_defs.insert(node_id, Def::Method(method.def_id));
+ self.write_substs(node_id, method.substs);
+ }
+
+ pub fn write_substs(&self, node_id: ast::NodeId, substs: &'tcx Substs<'tcx>) {
+ if !substs.is_noop() {
debug!("write_substs({}, {:?}) in fcx {}",
node_id,
substs,
self.tag());
- self.tables.borrow_mut().item_substs.insert(node_id, substs);
+ self.tables.borrow_mut().node_substs.insert(node_id, substs);
}
}
- pub fn apply_autoderef_adjustment(&self,
- node_id: ast::NodeId,
- derefs: usize,
- adjusted_ty: Ty<'tcx>) {
- self.apply_adjustment(node_id, Adjustment {
- kind: Adjust::DerefRef {
- autoderefs: derefs,
- autoref: None,
- unsize: false
- },
- target: adjusted_ty
- });
- }
+ pub fn apply_adjustments(&self, expr: &hir::Expr, adj: Vec<Adjustment<'tcx>>) {
+ debug!("apply_adjustments(expr={:?}, adj={:?})", expr, adj);
- pub fn apply_adjustment(&self, node_id: ast::NodeId, adj: Adjustment<'tcx>) {
- debug!("apply_adjustment(node_id={}, adj={:?})", node_id, adj);
-
- if adj.is_identity() {
+ if adj.is_empty() {
return;
}
- match self.tables.borrow_mut().adjustments.entry(node_id) {
+ match self.tables.borrow_mut().adjustments.entry(expr.id) {
Entry::Vacant(entry) => { entry.insert(adj); },
Entry::Occupied(mut entry) => {
debug!(" - composing on top of {:?}", entry.get());
- let composed_kind = match (entry.get().kind, adj.kind) {
+ match (&entry.get()[..], &adj[..]) {
// Applying any adjustment on top of a NeverToAny
// is a valid NeverToAny adjustment, because it can't
// be reached.
- (Adjust::NeverToAny, _) => Adjust::NeverToAny,
- (Adjust::DerefRef {
- autoderefs: 1,
- autoref: Some(AutoBorrow::Ref(..)),
- unsize: false
- }, Adjust::DerefRef { autoderefs, .. }) if autoderefs > 0 => {
+ (&[Adjustment { kind: Adjust::NeverToAny, .. }], _) => return,
+ (&[
+ Adjustment { kind: Adjust::Deref(_), .. },
+ Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(..)), .. },
+ ], &[
+ Adjustment { kind: Adjust::Deref(_), .. },
+ .. // Any following adjustments are allowed.
+ ]) => {
// A reborrow has no effect before a dereference.
- adj.kind
}
// FIXME: currently we never try to compose autoderefs
// and ReifyFnPointer/UnsafeFnPointer, but we could.
_ =>
- bug!("while adjusting {}, can't compose {:?} and {:?}",
- node_id, entry.get(), adj)
- };
- *entry.get_mut() = Adjustment {
- kind: composed_kind,
- target: adj.target
+ bug!("while adjusting {:?}, can't compose {:?} and {:?}",
+ expr, entry.get(), adj)
};
+ *entry.get_mut() = adj;
}
}
}
// Require that the predicate holds for the concrete type.
let cause = traits::ObligationCause::new(span, self.body_id,
traits::ReturnType);
- self.register_predicate(traits::Obligation::new(cause, predicate));
+ self.register_predicate(traits::Obligation::new(cause,
+ self.param_env,
+ predicate));
}
ty_var
fn normalize_associated_types_in<T>(&self, span: Span, value: &T) -> T
where T : TypeFoldable<'tcx>
{
- let ok = self.normalize_associated_types_in_as_infer_ok(span, value);
- self.register_infer_ok_obligations(ok)
+ self.inh.normalize_associated_types_in(span, self.body_id, self.param_env, value)
}
fn normalize_associated_types_in_as_infer_ok<T>(&self, span: Span, value: &T)
-> InferOk<'tcx, T>
where T : TypeFoldable<'tcx>
{
- self.inh.normalize_associated_types_in_as_infer_ok(span, self.body_id, value)
+ self.inh.normalize_associated_types_in_as_infer_ok(span,
+ self.body_id,
+ self.param_env,
+ value)
}
pub fn write_nil(&self, node_id: ast::NodeId) {
cause: traits::ObligationCause<'tcx>)
{
self.fulfillment_cx.borrow_mut()
- .register_bound(self, ty, def_id, cause);
+ .register_bound(self, self.param_env, ty, def_id, cause);
}
pub fn to_ty(&self, ast_t: &hir::Ty) -> Ty<'tcx> {
}
}
- pub fn opt_node_ty_substs<F>(&self,
- id: ast::NodeId,
- f: F) where
- F: FnOnce(&ty::ItemSubsts<'tcx>),
- {
- if let Some(s) = self.tables.borrow().item_substs.get(&id) {
- f(s);
- }
- }
-
/// Registers an obligation for checking later, during regionck, that the type `ty` must
/// outlive the region `r`.
pub fn register_region_obligation(&self,
{
// WF obligations never themselves fail, so no real need to give a detailed cause:
let cause = traits::ObligationCause::new(span, self.body_id, code);
- self.register_predicate(traits::Obligation::new(cause, ty::Predicate::WellFormed(ty)));
+ self.register_predicate(traits::Obligation::new(cause,
+ self.param_env,
+ ty::Predicate::WellFormed(ty)));
}
pub fn register_old_wf_obligation(&self,
debug!("add_obligations_for_parameters(predicates={:?})",
predicates);
- for obligation in traits::predicates_for_generics(cause, predicates) {
+ for obligation in traits::predicates_for_generics(cause, self.param_env, predicates) {
self.register_predicate(obligation);
}
}
{
// extract method return type, which will be &T;
// all LB regions should have been instantiated during method lookup
- let ret_ty = method.ty.fn_ret();
- let ret_ty = self.tcx.no_late_bound_regions(&ret_ty).unwrap();
+ let ret_ty = method.sig.output();
// method returns &T, but the type as visible to user is T, so deref
ret_ty.builtin_deref(true, NoPreference).unwrap()
// consolidated.
let mut autoderef = self.autoderef(base_expr.span, base_ty);
-
- while let Some((adj_ty, autoderefs)) = autoderef.next() {
- if let Some(final_mt) = self.try_index_step(
- MethodCall::expr(expr.id), expr, Some(AdjustedRcvr {
- rcvr_expr: base_expr,
- autoderefs,
- unsize: false
- }), base_expr.span, adj_ty, lvalue_pref, idx_ty)
- {
- autoderef.finalize(lvalue_pref, base_expr);
- return Some(final_mt);
- }
-
- if let ty::TyArray(element_ty, _) = adj_ty.sty {
- autoderef.finalize(lvalue_pref, base_expr);
- let adj_ty = self.tcx.mk_slice(element_ty);
- return self.try_index_step(
- MethodCall::expr(expr.id), expr, Some(AdjustedRcvr {
- rcvr_expr: base_expr,
- autoderefs,
- unsize: true
- }), base_expr.span, adj_ty, lvalue_pref, idx_ty)
- }
+ let mut result = None;
+ while result.is_none() && autoderef.next().is_some() {
+ result = self.try_index_step(expr, base_expr, &autoderef, lvalue_pref, idx_ty);
}
- autoderef.unambiguous_final_ty();
- None
+ autoderef.finalize();
+ result
}
/// To type-check `base_expr[index_expr]`, we progressively autoderef
/// This loop implements one step in that search; the autoderef loop
/// is implemented by `lookup_indexing`.
fn try_index_step(&self,
- method_call: MethodCall,
expr: &hir::Expr,
- base_expr: Option<AdjustedRcvr>,
- base_span: Span,
- adjusted_ty: Ty<'tcx>,
+ base_expr: &hir::Expr,
+ autoderef: &Autoderef<'a, 'gcx, 'tcx>,
lvalue_pref: LvaluePreference,
index_ty: Ty<'tcx>)
-> Option<(/*index type*/ Ty<'tcx>, /*element type*/ Ty<'tcx>)>
{
- let tcx = self.tcx;
+ let adjusted_ty = autoderef.unambiguous_final_ty();
debug!("try_index_step(expr={:?}, base_expr={:?}, adjusted_ty={:?}, \
index_ty={:?})",
expr,
adjusted_ty,
index_ty);
- let input_ty = self.next_ty_var(TypeVariableOrigin::AutoDeref(base_span));
// First, try built-in indexing.
match (adjusted_ty.builtin_index(), &index_ty.sty) {
(Some(ty), &ty::TyUint(ast::UintTy::Us)) | (Some(ty), &ty::TyInfer(ty::IntVar(_))) => {
debug!("try_index_step: success, using built-in indexing");
- // If we had `[T; N]`, we should've caught it before unsizing to `[T]`.
- if let Some(base_expr) = base_expr {
- assert!(!base_expr.unsize);
- self.apply_autoderef_adjustment(
- base_expr.rcvr_expr.id, base_expr.autoderefs, adjusted_ty);
- }
- return Some((tcx.types.usize, ty));
+ let adjustments = autoderef.adjust_steps(lvalue_pref);
+ self.apply_adjustments(base_expr, adjustments);
+ return Some((self.tcx.types.usize, ty));
}
_ => {}
}
- // If some lookup succeeds, write callee into table and extract index/element
- // type from the method signature.
- // If some lookup succeeded, install method in table
- let method = self.try_overloaded_lvalue_op(
- expr.span, base_expr, adjusted_ty, &[input_ty], lvalue_pref, LvalueOp::Index);
-
- method.map(|ok| {
- debug!("try_index_step: success, using overloaded indexing");
- let method = self.register_infer_ok_obligations(ok);
- self.tables.borrow_mut().method_map.insert(method_call, method);
- (input_ty, self.make_overloaded_lvalue_return_type(method).ty)
- })
+ for &unsize in &[false, true] {
+ let mut self_ty = adjusted_ty;
+ if unsize {
+ // We only unsize arrays here.
+ if let ty::TyArray(element_ty, _) = adjusted_ty.sty {
+ self_ty = self.tcx.mk_slice(element_ty);
+ } else {
+ continue;
+ }
+ }
+
+ // If some lookup succeeds, write callee into table and extract index/element
+ // type from the method signature.
+ // If some lookup succeeded, install method in table
+ let input_ty = self.next_ty_var(TypeVariableOrigin::AutoDeref(base_expr.span));
+ let method = self.try_overloaded_lvalue_op(
+ expr.span, self_ty, &[input_ty], lvalue_pref, LvalueOp::Index);
+
+ let result = method.map(|ok| {
+ debug!("try_index_step: success, using overloaded indexing");
+ let method = self.register_infer_ok_obligations(ok);
+
+ let mut adjustments = autoderef.adjust_steps(lvalue_pref);
+ if let ty::TyRef(region, mt) = method.sig.inputs()[0].sty {
+ adjustments.push(Adjustment {
+ kind: Adjust::Borrow(AutoBorrow::Ref(region, mt.mutbl)),
+ target: self.tcx.mk_ref(region, ty::TypeAndMut {
+ mutbl: mt.mutbl,
+ ty: adjusted_ty
+ })
+ });
+ }
+ if unsize {
+ adjustments.push(Adjustment {
+ kind: Adjust::Unsize,
+ target: method.sig.inputs()[0]
+ });
+ }
+ self.apply_adjustments(base_expr, adjustments);
+
+ self.write_method_call(expr.id, method);
+ (input_ty, self.make_overloaded_lvalue_return_type(method).ty)
+ });
+ if result.is_some() {
+ return result;
+ }
+ }
+
+ None
}
fn resolve_lvalue_op(&self, op: LvalueOp, is_mut: bool) -> (Option<DefId>, Symbol) {
fn try_overloaded_lvalue_op(&self,
span: Span,
- base_expr: Option<AdjustedRcvr>,
base_ty: Ty<'tcx>,
arg_tys: &[Ty<'tcx>],
lvalue_pref: LvaluePreference,
op: LvalueOp)
-> Option<InferOk<'tcx, MethodCallee<'tcx>>>
{
- debug!("try_overloaded_lvalue_op({:?},{:?},{:?},{:?},{:?})",
+ debug!("try_overloaded_lvalue_op({:?},{:?},{:?},{:?})",
span,
- base_expr,
base_ty,
lvalue_pref,
op);
let (mut_tr, mut_op) = self.resolve_lvalue_op(op, true);
let method = match (lvalue_pref, mut_tr) {
(PreferMutLvalue, Some(trait_did)) => {
- self.lookup_method_in_trait_adjusted(span,
- base_expr,
- mut_op,
- trait_did,
- base_ty,
- Some(arg_tys.to_owned()))
+ self.lookup_method_in_trait(span, mut_op, trait_did, base_ty, Some(arg_tys))
}
_ => None,
};
let (imm_tr, imm_op) = self.resolve_lvalue_op(op, false);
let method = match (method, imm_tr) {
(None, Some(trait_did)) => {
- self.lookup_method_in_trait_adjusted(span,
- base_expr,
- imm_op,
- trait_did,
- base_ty,
- Some(arg_tys.to_owned()))
+ self.lookup_method_in_trait(span, imm_op, trait_did, base_ty, Some(arg_tys))
}
(method, _) => method,
};
fn check_method_argument_types(&self,
sp: Span,
- method_fn_ty: Ty<'tcx>,
- callee_expr: &'gcx hir::Expr,
+ method: Result<MethodCallee<'tcx>, ()>,
args_no_rcvr: &'gcx [hir::Expr],
tuple_arguments: TupleArgumentsFlag,
expected: Expectation<'tcx>)
-> Ty<'tcx> {
- if method_fn_ty.references_error() {
+ let has_error = match method {
+ Ok(method) => {
+ method.substs.references_error() || method.sig.references_error()
+ }
+ Err(_) => true
+ };
+ if has_error {
let err_inputs = self.err_args(args_no_rcvr.len());
let err_inputs = match tuple_arguments {
self.check_argument_types(sp, &err_inputs[..], &[], args_no_rcvr,
false, tuple_arguments, None);
- self.tcx.types.err
- } else {
- match method_fn_ty.sty {
- ty::TyFnDef(def_id, .., ref fty) => {
- // HACK(eddyb) ignore self in the definition (see above).
- let expected_arg_tys = self.expected_inputs_for_expected_output(
- sp,
- expected,
- fty.0.output(),
- &fty.0.inputs()[1..]
- );
- self.check_argument_types(sp, &fty.0.inputs()[1..], &expected_arg_tys[..],
- args_no_rcvr, fty.0.variadic, tuple_arguments,
- self.tcx.hir.span_if_local(def_id));
- fty.0.output()
- }
- _ => {
- span_bug!(callee_expr.span, "method without bare fn type");
- }
- }
+ return self.tcx.types.err;
}
+
+ let method = method.unwrap();
+ // HACK(eddyb) ignore self in the definition (see above).
+ let expected_arg_tys = self.expected_inputs_for_expected_output(
+ sp,
+ expected,
+ method.sig.output(),
+ &method.sig.inputs()[1..]
+ );
+ self.check_argument_types(sp, &method.sig.inputs()[1..], &expected_arg_tys[..],
+ args_no_rcvr, method.sig.variadic, tuple_arguments,
+ self.tcx.hir.span_if_local(method.def_id));
+ method.sig.output()
}
/// Generic function that factors out common logic from function calls,
"expression with never type wound up being adjusted");
let adj_ty = self.next_diverging_ty_var(
TypeVariableOrigin::AdjustmentType(expr.span));
- self.apply_adjustment(expr.id, Adjustment {
+ self.apply_adjustments(expr, vec![Adjustment {
kind: Adjust::NeverToAny,
target: adj_ty
- });
+ }]);
ty = adj_ty;
}
// is polymorphic) and the expected return type.
// No argument expectations are produced if unification fails.
let origin = self.misc(call_span);
- let ures = self.sub_types(false, &origin, formal_ret, ret_ty);
+ let ures = self.at(&origin, self.param_env).sup(ret_ty, formal_ret);
// FIXME(#15760) can't use try! here, FromError doesn't default
// to identity so the resulting type is not constrained.
let expr_t = self.structurally_resolved_type(expr.span, rcvr_t);
let tps = tps.iter().map(|ast_ty| self.to_ty(&ast_ty)).collect::<Vec<_>>();
- let fn_ty = match self.lookup_method(method_name.span,
- method_name.node,
- expr_t,
- tps,
- expr,
- rcvr) {
+ let method = match self.lookup_method(method_name.span,
+ method_name.node,
+ expr_t,
+ tps,
+ expr,
+ rcvr) {
Ok(method) => {
- let method_ty = method.ty;
- let method_call = MethodCall::expr(expr.id);
- self.tables.borrow_mut().method_map.insert(method_call, method);
- method_ty
+ self.write_method_call(expr.id, method);
+ Ok(method)
}
Err(error) => {
if method_name.node != keywords::Invalid.name() {
error,
Some(args));
}
- self.write_error(expr.id);
- self.tcx.types.err
+ Err(())
}
};
// Call the generic checker.
- let ret_ty = self.check_method_argument_types(method_name.span, fn_ty,
- expr, &args[1..],
- DontTupleArguments,
- expected);
-
- ret_ty
+ self.check_method_argument_types(method_name.span, method,
+ &args[1..],
+ DontTupleArguments,
+ expected)
}
fn check_return_expr(&self, return_expr: &'gcx hir::Expr) {
expr_t);
let mut private_candidate = None;
let mut autoderef = self.autoderef(expr.span, expr_t);
- while let Some((base_t, autoderefs)) = autoderef.next() {
+ while let Some((base_t, _)) = autoderef.next() {
match base_t.sty {
ty::TyAdt(base_def, substs) if !base_def.is_enum() => {
debug!("struct named {:?}", base_t);
if let Some(field) = fields.iter().find(|f| f.name.to_ident() == ident) {
let field_ty = self.field_ty(expr.span, field, substs);
if field.vis.is_accessible_from(def_scope, self.tcx) {
- autoderef.finalize(lvalue_pref, base);
- self.apply_autoderef_adjustment(base.id, autoderefs, base_t);
+ let adjustments = autoderef.adjust_steps(lvalue_pref);
+ self.apply_adjustments(base, adjustments);
+ autoderef.finalize();
self.tcx.check_stability(field.did, expr.id, expr.span);
let mut private_candidate = None;
let mut tuple_like = false;
let mut autoderef = self.autoderef(expr.span, expr_t);
- while let Some((base_t, autoderefs)) = autoderef.next() {
+ while let Some((base_t, _)) = autoderef.next() {
let field = match base_t.sty {
ty::TyAdt(base_def, substs) if base_def.is_struct() => {
tuple_like = base_def.struct_variant().ctor_kind == CtorKind::Fn;
};
if let Some(field_ty) = field {
- autoderef.finalize(lvalue_pref, base);
- self.apply_autoderef_adjustment(base.id, autoderefs, base_t);
+ let adjustments = autoderef.adjust_steps(lvalue_pref);
+ self.apply_adjustments(base, adjustments);
+ autoderef.finalize();
return field_ty;
}
}
lvalue_pref);
if !oprnd_t.references_error() {
+ oprnd_t = self.structurally_resolved_type(expr.span, oprnd_t);
match unop {
hir::UnDeref => {
- oprnd_t = self.structurally_resolved_type(expr.span, oprnd_t);
-
if let Some(mt) = oprnd_t.builtin_deref(true, NoPreference) {
oprnd_t = mt.ty;
} else if let Some(ok) = self.try_overloaded_deref(
- expr.span, Some(&oprnd), oprnd_t, lvalue_pref) {
+ expr.span, oprnd_t, lvalue_pref) {
let method = self.register_infer_ok_obligations(ok);
+ if let ty::TyRef(region, mt) = method.sig.inputs()[0].sty {
+ self.apply_adjustments(oprnd, vec![Adjustment {
+ kind: Adjust::Borrow(AutoBorrow::Ref(region, mt.mutbl)),
+ target: method.sig.inputs()[0]
+ }]);
+ }
oprnd_t = self.make_overloaded_lvalue_return_type(method).ty;
- self.tables.borrow_mut().method_map.insert(MethodCall::expr(expr.id),
- method);
+ self.write_method_call(expr.id, method);
} else {
self.type_error_message(expr.span, |actual| {
format!("type `{}` cannot be \
}
}
hir::UnNot => {
- oprnd_t = self.structurally_resolved_type(oprnd.span,
- oprnd_t);
- let result = self.check_user_unop("!", "not",
- tcx.lang_items.not_trait(),
- expr, &oprnd, oprnd_t, unop);
+ let result = self.check_user_unop(expr, oprnd_t, unop);
// If it's builtin, we can reuse the type, this helps inference.
if !(oprnd_t.is_integral() || oprnd_t.sty == ty::TyBool) {
oprnd_t = result;
}
}
hir::UnNeg => {
- oprnd_t = self.structurally_resolved_type(oprnd.span,
- oprnd_t);
- let result = self.check_user_unop("-", "neg",
- tcx.lang_items.neg_trait(),
- expr, &oprnd, oprnd_t, unop);
+ let result = self.check_user_unop(expr, oprnd_t, unop);
// If it's builtin, we can reuse the type, this helps inference.
if !(oprnd_t.is_integral() || oprnd_t.is_fp()) {
oprnd_t = result;
// We always require that the type provided as the value for
// a type parameter outlives the moment of instantiation.
- self.opt_node_ty_substs(expr.id, |item_substs| {
- self.add_wf_bounds(&item_substs.substs, expr);
- });
+ let substs = self.tables.borrow().node_substs(expr.id);
+ self.add_wf_bounds(substs, expr);
ty
}
}
// Finish resolving a path in a struct expression or pattern `S::A { .. }` if necessary.
- // The newly resolved definition is written into `type_relative_path_defs`.
+ // The newly resolved definition is written into `type_dependent_defs`.
fn finish_resolving_struct_path(&self,
qpath: &hir::QPath,
path_span: Span,
ty, def, segment);
// Write back the new resolution.
- self.tables.borrow_mut().type_relative_path_defs.insert(node_id, def);
+ self.tables.borrow_mut().type_dependent_defs.insert(node_id, def);
(def, ty)
}
}
// Resolve associated value path into a base type and associated constant or method definition.
- // The newly resolved definition is written into `type_relative_path_defs`.
+ // The newly resolved definition is written into `type_dependent_defs`.
pub fn resolve_ty_and_def_ufcs<'b>(&self,
qpath: &'b hir::QPath,
node_id: ast::NodeId,
};
// Write back the new resolution.
- self.tables.borrow_mut().type_relative_path_defs.insert(node_id, def);
+ self.tables.borrow_mut().type_dependent_defs.insert(node_id, def);
(def, Some(ty), slice::ref_slice(&**item_segment))
}
_ => return,
};
let last_expr_ty = self.expr_ty(last_expr);
- if self.can_sub_types(last_expr_ty, expected_ty).is_err() {
+ if self.can_sub(self.param_env, last_expr_ty, expected_ty).is_err() {
return;
}
let original_span = original_sp(last_stmt.span, blk.span);
let ty = self.local_ty(span, nid);
let ty = self.normalize_associated_types_in(span, &ty);
self.write_ty(node_id, ty);
- self.write_substs(node_id, ty::ItemSubsts {
- substs: self.tcx.intern_substs(&[])
- });
return ty;
}
_ => {}
let ty = self.tcx.type_of(impl_def_id);
let impl_ty = self.instantiate_type_scheme(span, &substs, &ty);
- match self.sub_types(false, &self.misc(span), self_ty, impl_ty) {
+ match self.at(&self.misc(span), self.param_env).sup(impl_ty, self_ty) {
Ok(ok) => self.register_infer_ok_obligations(ok),
Err(_) => {
span_bug!(span,
debug!("instantiate_value_path: type of {:?} is {:?}",
node_id,
ty_substituted);
- self.write_substs(node_id, ty::ItemSubsts {
- substs: substs
- });
+ self.write_substs(node_id, substs);
ty_substituted
}
//! Code related to processing overloaded binary and unary operators.
use super::FnCtxt;
-use hir::def_id::DefId;
-use rustc::ty::{Ty, TypeFoldable, PreferMutLvalue, TypeVariants};
+use super::method::MethodCallee;
+use rustc::ty::{self, Ty, TypeFoldable, PreferMutLvalue, TypeVariants};
use rustc::ty::TypeVariants::{TyStr, TyRef};
+use rustc::ty::adjustment::{Adjustment, Adjust, AutoBorrow};
use rustc::infer::type_variable::TypeVariableOrigin;
use errors;
-use syntax::ast;
+use syntax_pos::Span;
use syntax::symbol::Symbol;
use rustc::hir;
lhs_ty,
is_assign);
- let (name, trait_def_id) = self.name_and_trait_def_id(op, is_assign);
-
// NB: As we have not yet type-checked the RHS, we don't have the
// type at hand. Make a variable to represent it. The whole reason
// for this indirection is so that, below, we can check the expr
// particularly for things like `String + &String`.
let rhs_ty_var = self.next_ty_var(TypeVariableOrigin::MiscVariable(rhs_expr.span));
- let return_ty = self.lookup_op_method(expr, lhs_ty, vec![rhs_ty_var],
- Symbol::intern(name), trait_def_id,
- lhs_expr);
+ let result = self.lookup_op_method(lhs_ty, &[rhs_ty_var], Op::Binary(op, is_assign));
// see `NB` above
let rhs_ty = self.check_expr_coercable_to_type(rhs_expr, rhs_ty_var);
- let return_ty = match return_ty {
- Ok(return_ty) => return_ty,
+ let return_ty = match result {
+ Ok(method) => {
+ let by_ref_binop = !op.node.is_by_value();
+ if is_assign == IsAssign::Yes || by_ref_binop {
+ if let ty::TyRef(region, mt) = method.sig.inputs()[0].sty {
+ let autoref = Adjustment {
+ kind: Adjust::Borrow(AutoBorrow::Ref(region, mt.mutbl)),
+ target: method.sig.inputs()[0]
+ };
+ self.apply_adjustments(lhs_expr, vec![autoref]);
+ }
+ }
+ if by_ref_binop {
+ if let ty::TyRef(region, mt) = method.sig.inputs()[1].sty {
+ let autoref = Adjustment {
+ kind: Adjust::Borrow(AutoBorrow::Ref(region, mt.mutbl)),
+ target: method.sig.inputs()[1]
+ };
+ // HACK(eddyb) Bypass checks due to reborrows being in
+ // some cases applied on the RHS, on top of which we need
+ // to autoref, which is not allowed by apply_adjustments.
+ // self.apply_adjustments(rhs_expr, vec![autoref]);
+ self.tables.borrow_mut().adjustments.entry(rhs_expr.id)
+ .or_insert(vec![]).push(autoref);
+ }
+ }
+ self.write_method_call(expr.id, method);
+
+ method.sig.output()
+ }
Err(()) => {
// error types are considered "builtin"
if !lhs_ty.references_error() {
lhs_ty);
if let TypeVariants::TyRef(_, ref ty_mut) = lhs_ty.sty {
- if !self.infcx.type_moves_by_default(ty_mut.ty, lhs_expr.span) &&
- self.lookup_op_method(expr, ty_mut.ty, vec![rhs_ty],
- Symbol::intern(name), trait_def_id,
- lhs_expr).is_ok() {
+ if {
+ !self.infcx.type_moves_by_default(self.param_env,
+ ty_mut.ty,
+ lhs_expr.span) &&
+ self.lookup_op_method(ty_mut.ty,
+ &[rhs_ty],
+ Op::Binary(op, is_assign))
+ .is_ok()
+ } {
err.note(
&format!(
"this is a reference to a type that `{}` can be applied \
}
pub fn check_user_unop(&self,
- op_str: &str,
- mname: &str,
- trait_did: Option<DefId>,
ex: &'gcx hir::Expr,
- operand_expr: &'gcx hir::Expr,
operand_ty: Ty<'tcx>,
op: hir::UnOp)
-> Ty<'tcx>
{
assert!(op.is_by_value());
- let mname = Symbol::intern(mname);
- match self.lookup_op_method(ex, operand_ty, vec![], mname, trait_did, operand_expr) {
- Ok(t) => t,
+ match self.lookup_op_method(operand_ty, &[], Op::Unary(op, ex.span)) {
+ Ok(method) => {
+ self.write_method_call(ex.id, method);
+ method.sig.output()
+ }
Err(()) => {
let actual = self.resolve_type_vars_if_possible(&operand_ty);
if !actual.references_error() {
struct_span_err!(self.tcx.sess, ex.span, E0600,
"cannot apply unary operator `{}` to type `{}`",
- op_str, actual).emit();
+ op.as_str(), actual).emit();
}
self.tcx.types.err
}
}
}
- fn name_and_trait_def_id(&self,
- op: hir::BinOp,
- is_assign: IsAssign)
- -> (&'static str, Option<DefId>) {
+ fn lookup_op_method(&self, lhs_ty: Ty<'tcx>, other_tys: &[Ty<'tcx>], op: Op)
+ -> Result<MethodCallee<'tcx>, ()>
+ {
let lang = &self.tcx.lang_items;
- if let IsAssign::Yes = is_assign {
+ let span = match op {
+ Op::Binary(op, _) => op.span,
+ Op::Unary(_, span) => span
+ };
+ let (opname, trait_did) = if let Op::Binary(op, IsAssign::Yes) = op {
match op.node {
hir::BiAdd => ("add_assign", lang.add_assign_trait()),
hir::BiSub => ("sub_assign", lang.sub_assign_trait()),
hir::BiGe | hir::BiGt |
hir::BiEq | hir::BiNe |
hir::BiAnd | hir::BiOr => {
- span_bug!(op.span,
+ span_bug!(span,
"impossible assignment operation: {}=",
op.node.as_str())
}
}
- } else {
+ } else if let Op::Binary(op, IsAssign::No) = op {
match op.node {
hir::BiAdd => ("add", lang.add_trait()),
hir::BiSub => ("sub", lang.sub_trait()),
hir::BiEq => ("eq", lang.eq_trait()),
hir::BiNe => ("ne", lang.eq_trait()),
hir::BiAnd | hir::BiOr => {
- span_bug!(op.span, "&& and || are not overloadable")
+ span_bug!(span, "&& and || are not overloadable")
}
}
- }
- }
+ } else if let Op::Unary(hir::UnNot, _) = op {
+ ("not", lang.not_trait())
+ } else if let Op::Unary(hir::UnNeg, _) = op {
+ ("neg", lang.neg_trait())
+ } else {
+ bug!("lookup_op_method: op not supported: {:?}", op)
+ };
- fn lookup_op_method(&self,
- expr: &'gcx hir::Expr,
- lhs_ty: Ty<'tcx>,
- other_tys: Vec<Ty<'tcx>>,
- opname: ast::Name,
- trait_did: Option<DefId>,
- lhs_expr: &'a hir::Expr)
- -> Result<Ty<'tcx>,()>
- {
- debug!("lookup_op_method(expr={:?}, lhs_ty={:?}, opname={:?}, \
- trait_did={:?}, lhs_expr={:?})",
- expr,
+ debug!("lookup_op_method(lhs_ty={:?}, op={:?}, opname={:?}, trait_did={:?})",
lhs_ty,
+ op,
opname,
- trait_did,
- lhs_expr);
-
- let method = match trait_did {
- Some(trait_did) => {
- let lhs_expr = Some(super::AdjustedRcvr {
- rcvr_expr: lhs_expr, autoderefs: 0, unsize: false
- });
- self.lookup_method_in_trait_adjusted(expr.span,
- lhs_expr,
- opname,
- trait_did,
- lhs_ty,
- Some(other_tys))
- }
- None => None
- };
+ trait_did);
+
+ let method = trait_did.and_then(|trait_did| {
+ let opname = Symbol::intern(opname);
+ self.lookup_method_in_trait(span, opname, trait_did, lhs_ty, Some(other_tys))
+ });
match method {
Some(ok) => {
let method = self.register_infer_ok_obligations(ok);
self.select_obligations_where_possible();
- let method_ty = method.ty;
-
- // HACK(eddyb) Fully qualified path to work around a resolve bug.
- let method_call = ::rustc::ty::MethodCall::expr(expr.id);
- self.tables.borrow_mut().method_map.insert(method_call, method);
-
- // extract return type for method; all late bound regions
- // should have been instantiated by now
- let ret_ty = method_ty.fn_ret();
- Ok(self.tcx.no_late_bound_regions(&ret_ty).unwrap())
+ Ok(method)
}
None => {
Err(())
}
/// Whether the binary operation is an assignment (`a += b`), or not (`a + b`)
-#[derive(Clone, Copy, Debug)]
+#[derive(Clone, Copy, Debug, PartialEq)]
enum IsAssign {
No,
Yes,
}
+#[derive(Clone, Copy, Debug)]
+enum Op {
+ Binary(hir::BinOp, IsAssign),
+ Unary(hir::UnOp, Span),
+}
+
/// Returns true if this is a built-in arithmetic operation (e.g. u32
/// + u32, i16x4 == i16x4) and false if these types would have to be
/// overloaded to be legal. There are two reasons that we distinguish
use rustc::hir::def_id::DefId;
use rustc::ty::subst::Substs;
use rustc::traits;
-use rustc::ty::{self, Ty, MethodCall, TypeFoldable};
+use rustc::ty::{self, Ty, TypeFoldable};
use rustc::infer::{self, GenericKind, SubregionOrigin, VerifyBound};
use rustc::ty::adjustment;
use rustc::ty::wf::ImpliedBound;
for &ty in fn_sig_tys {
let ty = self.resolve_type(ty);
debug!("relate_free_regions(t={:?})", ty);
- let implied_bounds = ty::wf::implied_bounds(self, body_id, ty, span);
+ let implied_bounds =
+ ty::wf::implied_bounds(self, self.fcx.param_env, body_id, ty, span);
// Record any relations between free regions that we observe into the free-region-map.
self.free_region_map.relate_free_regions_from_implied_bounds(&implied_bounds);
self.type_must_outlive(infer::ExprTypeIsNotInScope(expr_ty, expr.span),
expr_ty, expr_region);
- let method_call = MethodCall::expr(expr.id);
- let opt_method_callee = self.tables.borrow().method_map.get(&method_call).cloned();
- let has_method_map = opt_method_callee.is_some();
+ let is_method_call = self.tables.borrow().is_method_call(expr);
// If we are calling a method (either explicitly or via an
// overloaded operator), check that all of the types provided as
// arguments for its type parameters are well-formed, and all the regions
// provided as arguments outlive the call.
- if let Some(callee) = opt_method_callee {
+ if is_method_call {
let origin = match expr.node {
hir::ExprMethodCall(..) =>
infer::ParameterOrigin::MethodCall,
infer::ParameterOrigin::OverloadedOperator
};
- self.substs_wf_in_scope(origin, &callee.substs, expr.span, expr_region);
- self.type_must_outlive(infer::ExprTypeIsNotInScope(callee.ty, expr.span),
- callee.ty, expr_region);
+ let substs = self.tables.borrow().node_substs(expr.id);
+ self.substs_wf_in_scope(origin, substs, expr.span, expr_region);
+ // Arguments (sub-expressions) are checked via `constrain_call`, below.
}
// Check any autoderefs or autorefs that appear.
- let adjustment = self.tables.borrow().adjustments.get(&expr.id).map(|a| a.clone());
- if let Some(adjustment) = adjustment {
- debug!("adjustment={:?}", adjustment);
- match adjustment.kind {
- adjustment::Adjust::DerefRef { autoderefs, ref autoref, .. } => {
- let expr_ty = self.resolve_node_type(expr.id);
- self.constrain_autoderefs(expr, autoderefs, expr_ty);
- if let Some(ref autoref) = *autoref {
- self.link_autoref(expr, autoderefs, autoref);
-
- // Require that the resulting region encompasses
- // the current node.
- //
- // FIXME(#6268) remove to support nested method calls
- self.type_of_node_must_outlive(infer::AutoBorrow(expr.span),
- expr.id, expr_region);
- }
- }
- /*
- adjustment::AutoObject(_, ref bounds, ..) => {
- // Determine if we are casting `expr` to a trait
- // instance. If so, we have to be sure that the type
- // of the source obeys the new region bound.
- let source_ty = self.resolve_node_type(expr.id);
- self.type_must_outlive(infer::RelateObjectBound(expr.span),
- source_ty, bounds.region_bound);
- }
- */
- _ => {}
- }
-
- // If necessary, constrain destructors in the unadjusted form of this
- // expression.
- let cmt_result = {
- let mc = mc::MemCategorizationContext::new(self, &self.region_maps);
- mc.cat_expr_unadjusted(expr)
- };
- match cmt_result {
- Ok(head_cmt) => {
- self.check_safety_of_rvalue_destructor_if_necessary(head_cmt,
- expr.span);
- }
- Err(..) => {
- self.tcx.sess.delay_span_bug(expr.span, "cat_expr_unadjusted Errd");
- }
- }
- }
+ let cmt_result = self.constrain_adjustments(expr);
// If necessary, constrain destructors in this expression. This will be
// the adjusted form if there is an adjustment.
- let cmt_result = {
- let mc = mc::MemCategorizationContext::new(self, &self.region_maps);
- mc.cat_expr(expr)
- };
match cmt_result {
Ok(head_cmt) => {
self.check_safety_of_rvalue_destructor_if_necessary(head_cmt, expr.span);
expr, self.repeating_scope);
match expr.node {
hir::ExprPath(_) => {
- self.fcx.opt_node_ty_substs(expr.id, |item_substs| {
- let origin = infer::ParameterOrigin::Path;
- self.substs_wf_in_scope(origin, &item_substs.substs, expr.span, expr_region);
- });
+ let substs = self.tables.borrow().node_substs(expr.id);
+ let origin = infer::ParameterOrigin::Path;
+ self.substs_wf_in_scope(origin, substs, expr.span, expr_region);
}
hir::ExprCall(ref callee, ref args) => {
- if has_method_map {
- self.constrain_call(expr, Some(&callee),
- args.iter().map(|e| &*e), false);
+ if is_method_call {
+ self.constrain_call(expr, Some(&callee), args.iter().map(|e| &*e));
} else {
self.constrain_callee(callee.id, expr, &callee);
- self.constrain_call(expr, None,
- args.iter().map(|e| &*e), false);
+ self.constrain_call(expr, None, args.iter().map(|e| &*e));
}
intravisit::walk_expr(self, expr);
}
hir::ExprMethodCall(.., ref args) => {
- self.constrain_call(expr, Some(&args[0]),
- args[1..].iter().map(|e| &*e), false);
+ self.constrain_call(expr, Some(&args[0]), args[1..].iter().map(|e| &*e));
intravisit::walk_expr(self, expr);
}
hir::ExprAssignOp(_, ref lhs, ref rhs) => {
- if has_method_map {
- self.constrain_call(expr, Some(&lhs),
- Some(&**rhs).into_iter(), false);
+ if is_method_call {
+ self.constrain_call(expr, Some(&lhs), Some(&**rhs).into_iter());
}
intravisit::walk_expr(self, expr);
}
- hir::ExprIndex(ref lhs, ref rhs) if has_method_map => {
- self.constrain_call(expr, Some(&lhs),
- Some(&**rhs).into_iter(), true);
+ hir::ExprIndex(ref lhs, ref rhs) if is_method_call => {
+ self.constrain_call(expr, Some(&lhs), Some(&**rhs).into_iter());
intravisit::walk_expr(self, expr);
},
- hir::ExprBinary(op, ref lhs, ref rhs) if has_method_map => {
- let implicitly_ref_args = !op.node.is_by_value();
-
- // As `expr_method_call`, but the call is via an
- // overloaded op. Note that we (sadly) currently use an
- // implicit "by ref" sort of passing style here. This
- // should be converted to an adjustment!
- self.constrain_call(expr, Some(&lhs),
- Some(&**rhs).into_iter(), implicitly_ref_args);
+ hir::ExprBinary(_, ref lhs, ref rhs) if is_method_call => {
+ // As `ExprMethodCall`, but the call is via an overloaded op.
+ self.constrain_call(expr, Some(&lhs), Some(&**rhs).into_iter());
intravisit::walk_expr(self, expr);
}
intravisit::walk_expr(self, expr);
}
- hir::ExprUnary(op, ref lhs) if has_method_map => {
- let implicitly_ref_args = !op.is_by_value();
-
- // As above.
- self.constrain_call(expr, Some(&lhs),
- None::<hir::Expr>.iter(), implicitly_ref_args);
-
- intravisit::walk_expr(self, expr);
- }
-
hir::ExprUnary(hir::UnDeref, ref base) => {
// For *a, the lifetime of a must enclose the deref
- let method_call = MethodCall::expr(expr.id);
- let base_ty = match self.tables.borrow().method_map.get(&method_call) {
- Some(method) => {
- self.constrain_call(expr, Some(&base),
- None::<hir::Expr>.iter(), true);
- // late-bound regions in overloaded method calls are instantiated
- let fn_ret = self.tcx.no_late_bound_regions(&method.ty.fn_ret());
- fn_ret.unwrap()
- }
- None => self.resolve_node_type(base.id)
- };
+ if is_method_call {
+ self.constrain_call(expr, Some(base), None::<hir::Expr>.iter());
+ }
+ // For overloaded derefs, base_ty is the input to `Deref::deref`,
+ // but it's a reference type uing the same region as the output.
+ let base_ty = self.resolve_expr_type_adjusted(base);
if let ty::TyRef(r_ptr, _) = base_ty.sty {
self.mk_subregion_due_to_dereference(expr.span, expr_region, r_ptr);
}
intravisit::walk_expr(self, expr);
}
+ hir::ExprUnary(_, ref lhs) if is_method_call => {
+ // As above.
+ self.constrain_call(expr, Some(&lhs), None::<hir::Expr>.iter());
+
+ intravisit::walk_expr(self, expr);
+ }
+
hir::ExprIndex(ref vec_expr, _) => {
// For a[b], the lifetime of a must enclose the deref
let vec_type = self.resolve_expr_type_adjusted(&vec_expr);
fn constrain_call<'b, I: Iterator<Item=&'b hir::Expr>>(&mut self,
call_expr: &hir::Expr,
receiver: Option<&hir::Expr>,
- arg_exprs: I,
- implicitly_ref_args: bool) {
+ arg_exprs: I) {
//! Invoked on every call site (i.e., normal calls, method calls,
//! and overloaded operators). Constrains the regions which appear
//! in the type of the function. Also constrains the regions that
//! appear in the arguments appropriately.
- debug!("constrain_call(call_expr={:?}, \
- receiver={:?}, \
- implicitly_ref_args={})",
+ debug!("constrain_call(call_expr={:?}, receiver={:?})",
call_expr,
- receiver,
- implicitly_ref_args);
+ receiver);
// `callee_region` is the scope representing the time in which the
// call occurs.
// valid for at least the lifetime of the function:
self.type_of_node_must_outlive(infer::CallArg(arg_expr.span),
arg_expr.id, callee_region);
-
- // unfortunately, there are two means of taking implicit
- // references, and we need to propagate constraints as a
- // result. modes are going away and the "DerefArgs" code
- // should be ported to use adjustments
- if implicitly_ref_args {
- self.link_by_ref(arg_expr, callee_scope);
- }
}
// as loop above, but for receiver
debug!("receiver: {:?}", r);
self.type_of_node_must_outlive(infer::CallRcvr(r.span),
r.id, callee_region);
- if implicitly_ref_args {
- self.link_by_ref(&r, callee_scope);
- }
}
}
- /// Invoked on any auto-dereference that occurs. Checks that if this is a region pointer being
+ /// Invoked on any adjustments that occur. Checks that if this is a region pointer being
/// dereferenced, the lifetime of the pointer includes the deref expr.
- fn constrain_autoderefs(&mut self,
- deref_expr: &hir::Expr,
- derefs: usize,
- mut derefd_ty: Ty<'tcx>)
- {
- debug!("constrain_autoderefs(deref_expr={:?}, derefs={}, derefd_ty={:?})",
- deref_expr,
- derefs,
- derefd_ty);
-
- let r_deref_expr = self.tcx.node_scope_region(deref_expr.id);
- for i in 0..derefs {
- let method_call = MethodCall::autoderef(deref_expr.id, i as u32);
- debug!("constrain_autoderefs: method_call={:?} (of {:?} total)", method_call, derefs);
-
- let method = self.tables.borrow().method_map.get(&method_call).map(|m| m.clone());
-
- derefd_ty = match method {
- Some(method) => {
- debug!("constrain_autoderefs: #{} is overloaded, method={:?}",
- i, method);
-
- let origin = infer::ParameterOrigin::OverloadedDeref;
- self.substs_wf_in_scope(origin, method.substs, deref_expr.span, r_deref_expr);
-
- // Treat overloaded autoderefs as if an AutoBorrow adjustment
- // was applied on the base type, as that is always the case.
- let fn_sig = method.ty.fn_sig();
- let fn_sig = // late-bound regions should have been instantiated
- self.tcx.no_late_bound_regions(&fn_sig).unwrap();
- let self_ty = fn_sig.inputs()[0];
- let (m, r) = match self_ty.sty {
- ty::TyRef(r, ref m) => (m.mutbl, r),
- _ => {
- span_bug!(
- deref_expr.span,
- "bad overloaded deref type {:?}",
- method.ty)
- }
- };
-
- debug!("constrain_autoderefs: receiver r={:?} m={:?}",
- r, m);
-
- {
- let mc = mc::MemCategorizationContext::new(self, &self.region_maps);
- let self_cmt = ignore_err!(mc.cat_expr_autoderefd(deref_expr, i));
- debug!("constrain_autoderefs: self_cmt={:?}",
- self_cmt);
- self.link_region(deref_expr.span, r,
- ty::BorrowKind::from_mutbl(m), self_cmt);
- }
+ fn constrain_adjustments(&mut self, expr: &hir::Expr) -> mc::McResult<mc::cmt<'tcx>> {
+ debug!("constrain_adjustments(expr={:?})", expr);
- // Specialized version of constrain_call.
- self.type_must_outlive(infer::CallRcvr(deref_expr.span),
- self_ty, r_deref_expr);
- self.type_must_outlive(infer::CallReturn(deref_expr.span),
- fn_sig.output(), r_deref_expr);
- fn_sig.output()
- }
- None => derefd_ty
- };
+ let mut cmt = {
+ let mc = mc::MemCategorizationContext::new(self, &self.region_maps);
+ mc.cat_expr_unadjusted(expr)?
+ };
+
+ //NOTE(@jroesch): mixed RefCell borrow causes crash
+ let adjustments = self.tables.borrow().expr_adjustments(&expr).to_vec();
+ if adjustments.is_empty() {
+ return Ok(cmt);
+ }
- if let ty::TyRef(r_ptr, _) = derefd_ty.sty {
- self.mk_subregion_due_to_dereference(deref_expr.span,
- r_deref_expr, r_ptr);
+ debug!("constrain_adjustments: adjustments={:?}", adjustments);
+
+ // If necessary, constrain destructors in the unadjusted form of this
+ // expression.
+ self.check_safety_of_rvalue_destructor_if_necessary(cmt.clone(), expr.span);
+
+ let expr_region = self.tcx.node_scope_region(expr.id);
+ for adjustment in adjustments {
+ debug!("constrain_adjustments: adjustment={:?}, cmt={:?}",
+ adjustment, cmt);
+
+ if let adjustment::Adjust::Deref(Some(deref)) = adjustment.kind {
+ debug!("constrain_adjustments: overloaded deref: {:?}", deref);
+
+ // Treat overloaded autoderefs as if an AutoBorrow adjustment
+ // was applied on the base type, as that is always the case.
+ let input = self.tcx.mk_ref(deref.region, ty::TypeAndMut {
+ ty: cmt.ty,
+ mutbl: deref.mutbl,
+ });
+ let output = self.tcx.mk_ref(deref.region, ty::TypeAndMut {
+ ty: adjustment.target,
+ mutbl: deref.mutbl,
+ });
+
+ self.link_region(expr.span, deref.region,
+ ty::BorrowKind::from_mutbl(deref.mutbl), cmt.clone());
+
+ // Specialized version of constrain_call.
+ self.type_must_outlive(infer::CallRcvr(expr.span),
+ input, expr_region);
+ self.type_must_outlive(infer::CallReturn(expr.span),
+ output, expr_region);
+ }
+
+ if let adjustment::Adjust::Borrow(ref autoref) = adjustment.kind {
+ self.link_autoref(expr, cmt.clone(), autoref);
+
+ // Require that the resulting region encompasses
+ // the current node.
+ //
+ // FIXME(#6268) remove to support nested method calls
+ self.type_of_node_must_outlive(infer::AutoBorrow(expr.span),
+ expr.id, expr_region);
+ }
+
+ {
+ let mc = mc::MemCategorizationContext::new(self, &self.region_maps);
+ cmt = mc.cat_expr_adjusted(expr, cmt, &adjustment)?;
}
- match derefd_ty.builtin_deref(true, ty::NoPreference) {
- Some(mt) => derefd_ty = mt.ty,
- /* if this type can't be dereferenced, then there's already an error
- in the session saying so. Just bail out for now */
- None => break
+ if let Categorization::Deref(_, mc::BorrowedPtr(_, r_ptr)) = cmt.cat {
+ self.mk_subregion_due_to_dereference(expr.span,
+ expr_region, r_ptr);
}
}
+
+ Ok(cmt)
}
pub fn mk_subregion_due_to_dereference(&mut self,
cmt: mc::cmt<'tcx>,
span: Span) {
match cmt.cat {
- Categorization::Rvalue(region, _) => {
+ Categorization::Rvalue(region) => {
match *region {
ty::ReScope(rvalue_scope) => {
let typ = self.resolve_type(cmt.ty);
// is going to fail anyway, so just stop here and let typeck
// report errors later on in the writeback phase.
let ty0 = self.resolve_node_type(id);
- let ty = self.tables.borrow().adjustments.get(&id).map_or(ty0, |adj| adj.target);
+ let ty = self.tables.borrow().adjustments.get(&id)
+ .and_then(|adj| adj.last())
+ .map_or(ty0, |adj| adj.target);
let ty = self.resolve_type(ty);
debug!("constrain_regions_in_type_of_node(\
ty={}, ty0={}, id={}, minimum_lifetime={:?})",
let arg_ty = self.node_ty(arg.id);
let re_scope = self.tcx.mk_region(ty::ReScope(body_scope));
let arg_cmt = mc.cat_rvalue(
- arg.id, arg.pat.span, re_scope, re_scope, arg_ty);
+ arg.id, arg.pat.span, re_scope, arg_ty);
debug!("arg_ty={:?} arg_cmt={:?} arg={:?}",
arg_ty,
arg_cmt,
/// autoref'd.
fn link_autoref(&self,
expr: &hir::Expr,
- autoderefs: usize,
+ expr_cmt: mc::cmt<'tcx>,
autoref: &adjustment::AutoBorrow<'tcx>)
{
- debug!("link_autoref(autoderefs={}, autoref={:?})", autoderefs, autoref);
- let mc = mc::MemCategorizationContext::new(self, &self.region_maps);
- let expr_cmt = ignore_err!(mc.cat_expr_autoderefd(expr, autoderefs));
- debug!("expr_cmt={:?}", expr_cmt);
+ debug!("link_autoref(autoref={:?}, expr_cmt={:?})", autoref, expr_cmt);
match *autoref {
adjustment::AutoBorrow::Ref(r, m) => {
}
}
- /// Computes the guarantor for cases where the `expr` is being passed by implicit reference and
- /// must outlive `callee_scope`.
- fn link_by_ref(&self,
- expr: &hir::Expr,
- callee_scope: CodeExtent) {
- debug!("link_by_ref(expr={:?}, callee_scope={:?})",
- expr, callee_scope);
- let mc = mc::MemCategorizationContext::new(self, &self.region_maps);
- let expr_cmt = ignore_err!(mc.cat_expr(expr));
- let borrow_region = self.tcx.mk_region(ty::ReScope(callee_scope));
- self.link_region(expr.span, borrow_region, ty::ImmBorrow, expr_cmt);
- }
-
/// Like `link_region()`, except that the region is extracted from the type of `id`,
/// which must be some reference (`&T`, `&str`, etc).
fn link_region_from_node_type(&self,
borrow_kind,
borrow_cmt);
match borrow_cmt.cat.clone() {
- Categorization::Deref(ref_cmt, _,
- mc::Implicit(ref_kind, ref_region)) |
- Categorization::Deref(ref_cmt, _,
- mc::BorrowedPtr(ref_kind, ref_region)) => {
+ Categorization::Deref(ref_cmt, mc::Implicit(ref_kind, ref_region)) |
+ Categorization::Deref(ref_cmt, mc::BorrowedPtr(ref_kind, ref_region)) => {
match self.link_reborrowed_region(span,
borrow_region, borrow_kind,
ref_cmt, ref_region, ref_kind,
}
Categorization::Downcast(cmt_base, _) |
- Categorization::Deref(cmt_base, _, mc::Unique) |
+ Categorization::Deref(cmt_base, mc::Unique) |
Categorization::Interior(cmt_base, _) => {
// Borrowing interior or owned data requires the base
// to be valid and borrowable in the same fashion.
borrow_kind = borrow_kind;
}
- Categorization::Deref(.., mc::UnsafePtr(..)) |
+ Categorization::Deref(_, mc::UnsafePtr(..)) |
Categorization::StaticItem |
Categorization::Upvar(..) |
Categorization::Local(..) |
// check whether this predicate applies to our current projection
let cause = self.fcx.misc(span);
- match self.eq_types(false, &cause, ty, outlives.0) {
+ match self.at(&cause, self.fcx.param_env).eq(outlives.0, ty) {
Ok(ok) => {
self.register_infer_ok_obligations(ok);
Ok(outlives.1)
{
let body_owner_def_id = self.fcx.tcx.hir.body_owner_def_id(body.id());
let region_maps = &self.fcx.tcx.region_maps(body_owner_def_id);
+ let param_env = self.fcx.param_env;
let mut euv =
euv::ExprUseVisitor::with_options(self,
self.fcx,
+ param_env,
region_maps,
mc::MemCategorizationOptions {
during_closure_kind_inference: true
let closure_def_id = self.fcx.tcx.hir.local_def_id(id);
debug!("closure_kind({:?}) = {:?}", closure_def_id, kind);
- let mut deferred_call_resolutions =
+ let deferred_call_resolutions =
self.fcx.remove_deferred_call_resolutions(closure_def_id);
- for deferred_call_resolution in &mut deferred_call_resolutions {
+ for deferred_call_resolution in deferred_call_resolutions {
deferred_call_resolution.resolve(self.fcx);
}
}
debug!("adjust_upvar_borrow_kind_for_consume: guarantor={:?}",
guarantor);
match guarantor.cat {
- Categorization::Deref(.., mc::BorrowedPtr(..)) |
- Categorization::Deref(.., mc::Implicit(..)) => {
+ Categorization::Deref(_, mc::BorrowedPtr(..)) |
+ Categorization::Deref(_, mc::Implicit(..)) => {
match cmt.note {
mc::NoteUpvarRef(upvar_id) => {
debug!("adjust_upvar_borrow_kind_for_consume: \
cmt);
match cmt.cat.clone() {
- Categorization::Deref(base, _, mc::Unique) |
+ Categorization::Deref(base, mc::Unique) |
Categorization::Interior(base, _) |
Categorization::Downcast(base, _) => {
// Interior or owned data is mutable if base is
self.adjust_upvar_borrow_kind_for_mut(base);
}
- Categorization::Deref(base, _, mc::BorrowedPtr(..)) |
- Categorization::Deref(base, _, mc::Implicit(..)) => {
+ Categorization::Deref(base, mc::BorrowedPtr(..)) |
+ Categorization::Deref(base, mc::Implicit(..)) => {
if !self.try_adjust_upvar_deref(cmt, ty::MutBorrow) {
// assignment to deref of an `&mut`
// borrowed pointer implies that the
}
}
- Categorization::Deref(.., mc::UnsafePtr(..)) |
+ Categorization::Deref(_, mc::UnsafePtr(..)) |
Categorization::StaticItem |
Categorization::Rvalue(..) |
Categorization::Local(_) |
cmt);
match cmt.cat.clone() {
- Categorization::Deref(base, _, mc::Unique) |
+ Categorization::Deref(base, mc::Unique) |
Categorization::Interior(base, _) |
Categorization::Downcast(base, _) => {
// Interior or owned data is unique if base is
self.adjust_upvar_borrow_kind_for_unique(base);
}
- Categorization::Deref(base, _, mc::BorrowedPtr(..)) |
- Categorization::Deref(base, _, mc::Implicit(..)) => {
+ Categorization::Deref(base, mc::BorrowedPtr(..)) |
+ Categorization::Deref(base, mc::Implicit(..)) => {
if !self.try_adjust_upvar_deref(cmt, ty::UniqueImmBorrow) {
// for a borrowed pointer to be unique, its
// base must be unique
}
}
- Categorization::Deref(.., mc::UnsafePtr(..)) |
+ Categorization::Deref(_, mc::UnsafePtr(..)) |
Categorization::StaticItem |
Categorization::Rvalue(..) |
Categorization::Local(_) |
inherited: super::InheritedBuilder<'a, 'gcx, 'tcx>,
code: ObligationCauseCode<'gcx>,
id: ast::NodeId,
- span: Span
+ span: Span,
+ param_env: ty::ParamEnv<'tcx>,
}
impl<'a, 'gcx, 'tcx> CheckWfFcxBuilder<'a, 'gcx, 'tcx> {
let code = self.code.clone();
let id = self.id;
let span = self.span;
+ let param_env = self.param_env;
self.inherited.enter(|inh| {
- let fcx = FnCtxt::new(&inh, id);
+ let fcx = FnCtxt::new(&inh, param_env, id);
let wf_tys = f(&fcx, &mut CheckTypeWellFormedVisitor {
tcx: fcx.tcx.global_tcx(),
code: code
fn for_id<'tcx>(&self, id: ast::NodeId, span: Span)
-> CheckWfFcxBuilder<'a, 'gcx, 'tcx> {
+ let def_id = self.tcx.hir.local_def_id(id);
CheckWfFcxBuilder {
- inherited: Inherited::build(self.tcx, self.tcx.hir.local_def_id(id)),
+ inherited: Inherited::build(self.tcx, def_id),
code: self.code.clone(),
id: id,
- span: span
+ span: span,
+ param_env: self.tcx.param_env(def_id),
}
}
ast_trait_ref.path.span, &trait_ref);
let obligations =
ty::wf::trait_obligations(fcx,
+ fcx.param_env,
fcx.body_id,
&trait_ref,
ast_trait_ref.path.span);
predicates.predicates
.iter()
.flat_map(|p| ty::wf::predicate_obligations(fcx,
+ fcx.param_env,
fcx.body_id,
p,
span));
use rustc::hir;
use rustc::hir::intravisit::{self, Visitor, NestedVisitorMap};
use rustc::infer::{InferCtxt};
-use rustc::ty::{self, Ty, TyCtxt, MethodCall, MethodCallee};
-use rustc::ty::adjustment;
+use rustc::ty::{self, Ty, TyCtxt};
use rustc::ty::fold::{TypeFolder,TypeFoldable};
use rustc::util::nodemap::DefIdSet;
use syntax::ast;
let inner_ty = self.fcx.resolve_type_vars_if_possible(&inner_ty);
if inner_ty.is_scalar() {
- self.fcx.tables.borrow_mut().method_map.remove(&MethodCall::expr(e.id));
+ let mut tables = self.fcx.tables.borrow_mut();
+ tables.type_dependent_defs.remove(&e.id);
+ tables.node_substs.remove(&e.id);
}
}
hir::ExprBinary(ref op, ref lhs, ref rhs) |
let rhs_ty = self.fcx.resolve_type_vars_if_possible(&rhs_ty);
if lhs_ty.is_scalar() && rhs_ty.is_scalar() {
- self.fcx.tables.borrow_mut().method_map.remove(&MethodCall::expr(e.id));
+ let mut tables = self.fcx.tables.borrow_mut();
+ tables.type_dependent_defs.remove(&e.id);
+ tables.node_substs.remove(&e.id);
- // weird but true: the by-ref binops put an
- // adjustment on the lhs but not the rhs; the
- // adjustment for rhs is kind of baked into the
- // system.
match e.node {
hir::ExprBinary(..) => {
if !op.node.is_by_value() {
- self.fcx.tables.borrow_mut().adjustments.remove(&lhs.id);
+ tables.adjustments.get_mut(&lhs.id).map(|a| a.pop());
+ tables.adjustments.get_mut(&rhs.id).map(|a| a.pop());
}
},
hir::ExprAssignOp(..) => {
- self.fcx.tables.borrow_mut().adjustments.remove(&lhs.id);
+ tables.adjustments.get_mut(&lhs.id).map(|a| a.pop());
},
_ => {},
}
self.fix_scalar_builtin_expr(e);
self.visit_node_id(e.span, e.id);
- self.visit_method_map_entry(e.span, MethodCall::expr(e.id));
if let hir::ExprClosure(_, _, body, _) = e.node {
let body = self.fcx.tcx.hir.body(body);
}
fn visit_node_id(&mut self, span: Span, node_id: ast::NodeId) {
- // Export associated path extensions.
- if let Some(def) = self.fcx.tables.borrow_mut().type_relative_path_defs.remove(&node_id) {
- self.tables.type_relative_path_defs.insert(node_id, def);
+ // Export associated path extensions and method resultions.
+ if let Some(def) = self.fcx.tables.borrow_mut().type_dependent_defs.remove(&node_id) {
+ self.tables.type_dependent_defs.insert(node_id, def);
}
// Resolve any borrowings for the node with id `node_id`
debug!("Node {} has type {:?}", node_id, n_ty);
// Resolve any substitutions
- self.fcx.opt_node_ty_substs(node_id, |item_substs| {
- let item_substs = self.resolve(item_substs, &span);
- if !item_substs.is_noop() {
- debug!("write_substs_to_tcx({}, {:?})", node_id, item_substs);
- assert!(!item_substs.substs.needs_infer());
- self.tables.item_substs.insert(node_id, item_substs);
- }
- });
+ if let Some(&substs) = self.fcx.tables.borrow().node_substs.get(&node_id) {
+ let substs = self.resolve(&substs, &span);
+ debug!("write_substs_to_tcx({}, {:?})", node_id, substs);
+ assert!(!substs.needs_infer());
+ self.tables.node_substs.insert(node_id, substs);
+ }
}
fn visit_adjustments(&mut self, span: Span, node_id: ast::NodeId) {
- let adjustments = self.fcx.tables.borrow_mut().adjustments.remove(&node_id);
- match adjustments {
+ let adjustment = self.fcx.tables.borrow_mut().adjustments.remove(&node_id);
+ match adjustment {
None => {
debug!("No adjustments for node {}", node_id);
}
Some(adjustment) => {
- let resolved_adjustment = match adjustment.kind {
- adjustment::Adjust::NeverToAny => {
- adjustment::Adjust::NeverToAny
- }
-
- adjustment::Adjust::ReifyFnPointer => {
- adjustment::Adjust::ReifyFnPointer
- }
-
- adjustment::Adjust::MutToConstPointer => {
- adjustment::Adjust::MutToConstPointer
- }
-
- adjustment::Adjust::ClosureFnPointer => {
- adjustment::Adjust::ClosureFnPointer
- }
-
- adjustment::Adjust::UnsafeFnPointer => {
- adjustment::Adjust::UnsafeFnPointer
- }
-
- adjustment::Adjust::DerefRef { autoderefs, autoref, unsize } => {
- for autoderef in 0..autoderefs {
- let method_call = MethodCall::autoderef(node_id, autoderef as u32);
- self.visit_method_map_entry(span, method_call);
- }
-
- adjustment::Adjust::DerefRef {
- autoderefs: autoderefs,
- autoref: self.resolve(&autoref, &span),
- unsize: unsize,
- }
- }
- };
- let resolved_adjustment = adjustment::Adjustment {
- kind: resolved_adjustment,
- target: self.resolve(&adjustment.target, &span)
- };
+ let resolved_adjustment = self.resolve(&adjustment, &span);
debug!("Adjustments for node {}: {:?}", node_id, resolved_adjustment);
self.tables.adjustments.insert(node_id, resolved_adjustment);
}
}
}
- fn visit_method_map_entry(&mut self,
- method_span: Span,
- method_call: MethodCall) {
- // Resolve any method map entry
- let new_method = match self.fcx.tables.borrow_mut().method_map.remove(&method_call) {
- Some(method) => {
- debug!("writeback::resolve_method_map_entry(call={:?}, entry={:?})",
- method_call,
- method);
- let new_method = MethodCallee {
- def_id: method.def_id,
- ty: self.resolve(&method.ty, &method_span),
- substs: self.resolve(&method.substs, &method_span),
- };
-
- Some(new_method)
- }
- None => None
- };
-
- //NB(jroesch): We need to match twice to avoid a double borrow which would cause an ICE
- if let Some(method) = new_method {
- self.tables.method_map.insert(method_call, method);
- }
- }
-
fn visit_liberated_fn_sigs(&mut self) {
for (&node_id, fn_sig) in self.fcx.tables.borrow().liberated_fn_sigs.iter() {
let fn_sig = self.resolve(fn_sig, &node_id);
use rustc::middle::region::RegionMaps;
use rustc::middle::lang_items::UnsizeTraitLangItem;
-use rustc::traits::{self, ObligationCause, Reveal};
+use rustc::traits::{self, ObligationCause};
use rustc::ty::{self, Ty, TyCtxt};
use rustc::ty::TypeFoldable;
use rustc::ty::adjustment::CoerceUnsizedInfo;
source,
target);
- tcx.infer_ctxt(param_env, Reveal::UserFacing).enter(|infcx| {
+ tcx.infer_ctxt(()).enter(|infcx| {
let cause = ObligationCause::misc(span, impl_node_id);
let check_mutbl = |mt_a: ty::TypeAndMut<'tcx>,
mt_b: ty::TypeAndMut<'tcx>,
// we may have to evaluate constraint
// expressions in the course of execution.)
// See e.g. #41936.
- if let Ok(ok) = infcx.eq_types(false, &cause, b, a) {
+ if let Ok(ok) = infcx.at(&cause, param_env).eq(a, b) {
if ok.obligations.is_empty() {
return None;
}
// Register an obligation for `A: Trait<B>`.
let cause = traits::ObligationCause::misc(span, impl_node_id);
- let predicate = tcx.predicate_for_trait_def(cause, trait_def_id, 0, source, &[target]);
+ let predicate = tcx.predicate_for_trait_def(param_env,
+ cause,
+ trait_def_id,
+ 0,
+ source,
+ &[target]);
fulfill_cx.register_predicate_obligation(&infcx, predicate);
// Check that all transitive obligations are satisfied.
// Finally, resolve all regions.
let region_maps = RegionMaps::new();
let mut free_regions = FreeRegionMap::new();
- free_regions.relate_free_regions_from_predicates(&infcx.param_env
- .caller_bounds);
+ free_regions.relate_free_regions_from_predicates(¶m_env.caller_bounds);
infcx.resolve_regions_and_report_errors(impl_did, ®ion_maps, &free_regions);
CoerceUnsizedInfo {
use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
use rustc::hir;
use rustc::hir::itemlikevisit::ItemLikeVisitor;
-use rustc::traits::{self, Reveal};
+use rustc::traits;
use rustc::ty::{self, TyCtxt};
pub fn crate_inherent_impls_overlap_check<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
for (i, &impl1_def_id) in impls.iter().enumerate() {
for &impl2_def_id in &impls[(i + 1)..] {
- self.tcx.infer_ctxt((), Reveal::UserFacing).enter(|infcx| {
+ self.tcx.infer_ctxt(()).enter(|infcx| {
if traits::overlapping_impls(&infcx, impl1_def_id, impl2_def_id).is_some() {
self.check_for_common_items_in_impls(impl1_def_id, impl2_def_id)
}
#![allow(non_camel_case_types)]
+#![feature(advanced_slice_patterns)]
#![feature(box_patterns)]
#![feature(box_syntax)]
#![feature(conservative_impl_trait)]
#![feature(never_type)]
#![feature(quote)]
#![feature(rustc_diagnostic_macros)]
+#![feature(slice_patterns)]
#![cfg_attr(stage0, unstable(feature = "rustc_private", issue = "27812"))]
#![cfg_attr(stage0, feature(rustc_private))]
expected: Ty<'tcx>,
actual: Ty<'tcx>)
-> bool {
- tcx.infer_ctxt((), Reveal::UserFacing).enter(|ref infcx| {
+ tcx.infer_ctxt(()).enter(|ref infcx| {
+ let param_env = ty::ParamEnv::empty(Reveal::UserFacing);
let mut fulfill_cx = FulfillmentContext::new();
- match infcx.eq_types(false, &cause, expected, actual) {
+ match infcx.at(&cause, param_env).eq(expected, actual) {
Ok(InferOk { obligations, .. }) => {
fulfill_cx.register_predicate_obligations(infcx, obligations);
}
[build-dependencies]
build_helper = { path = "../build_helper" }
-gcc = "0.3.27"
+gcc = "0.3.50"
/// extern "ABI" fn
BareFunction(Box<BareFunctionDecl>),
Tuple(Vec<Type>),
- Vector(Box<Type>),
- FixedVector(Box<Type>, String),
+ Slice(Box<Type>),
+ Array(Box<Type>, usize),
Never,
Unique(Box<Type>),
RawPointer(Mutability, Box<Type>),
pub fn primitive_type(&self) -> Option<PrimitiveType> {
match *self {
Primitive(p) | BorrowedRef { type_: box Primitive(p), ..} => Some(p),
- Vector(..) | BorrowedRef{ type_: box Vector(..), .. } => Some(PrimitiveType::Slice),
- FixedVector(..) | BorrowedRef { type_: box FixedVector(..), .. } => {
- Some(PrimitiveType::Array)
- }
+ Slice(..) | BorrowedRef { type_: box Slice(..), .. } => Some(PrimitiveType::Slice),
+ Array(..) | BorrowedRef { type_: box Array(..), .. } => Some(PrimitiveType::Array),
Tuple(..) => Some(PrimitiveType::Tuple),
RawPointer(..) => Some(PrimitiveType::RawPointer),
_ => None,
BorrowedRef {lifetime: lifetime, mutability: m.mutbl.clean(cx),
type_: box m.ty.clean(cx)}
}
- TySlice(ref ty) => Vector(box ty.clean(cx)),
+ TySlice(ref ty) => Slice(box ty.clean(cx)),
TyArray(ref ty, length) => {
use rustc::middle::const_val::eval_length;
let n = eval_length(cx.tcx, length, "array length").unwrap();
- FixedVector(box ty.clean(cx), n.to_string())
+ Array(box ty.clean(cx), n)
},
TyTup(ref tys) => Tuple(tys.clean(cx)),
TyPath(hir::QPath::Resolved(None, ref path)) => {
ty::TyUint(uint_ty) => Primitive(uint_ty.into()),
ty::TyFloat(float_ty) => Primitive(float_ty.into()),
ty::TyStr => Primitive(PrimitiveType::Str),
- ty::TySlice(ty) => Vector(box ty.clean(cx)),
- ty::TyArray(ty, i) => FixedVector(box ty.clean(cx),
- format!("{}", i)),
+ ty::TySlice(ty) => Slice(box ty.clean(cx)),
+ ty::TyArray(ty, n) => Array(box ty.clean(cx), n),
ty::TyRawPtr(mt) => RawPointer(mt.mutbl.clean(cx), box mt.ty.clean(cx)),
ty::TyRef(r, mt) => BorrowedRef {
lifetime: r.clean(cx),
use clean::{self, PrimitiveType};
use core::DocAccessLevels;
use html::item_type::ItemType;
-use html::escape::Escape;
use html::render;
use html::render::{cache, CURRENT_LOCATION_KEY};
}
}
}
- clean::Vector(ref t) => {
+ clean::Slice(ref t) => {
primitive_link(f, PrimitiveType::Slice, "[")?;
fmt::Display::fmt(t, f)?;
primitive_link(f, PrimitiveType::Slice, "]")
}
- clean::FixedVector(ref t, ref s) => {
+ clean::Array(ref t, n) => {
primitive_link(f, PrimitiveType::Array, "[")?;
fmt::Display::fmt(t, f)?;
- if f.alternate() {
- primitive_link(f, PrimitiveType::Array,
- &format!("; {}]", s))
- } else {
- primitive_link(f, PrimitiveType::Array,
- &format!("; {}]", Escape(s)))
- }
+ primitive_link(f, PrimitiveType::Array, &format!("; {}]", n))
}
clean::Never => f.write_str("!"),
clean::RawPointer(m, ref t) => {
};
let m = MutableSpace(mutability);
match **ty {
- clean::Vector(ref bt) => { // BorrowedRef{ ... Vector(T) } is &[T]
+ clean::Slice(ref bt) => { // BorrowedRef{ ... Slice(T) } is &[T]
match **bt {
clean::Generic(_) => {
if f.alternate() {
[build-dependencies]
build_helper = { path = "../build_helper" }
-gcc = "0.3.27"
+gcc = "0.3.50"
[features]
backtrace = []
/// the heap (for normal construction via Error::new) is too costly.
#[stable(feature = "io_error_from_errorkind", since = "1.14.0")]
impl From<ErrorKind> for Error {
+ #[inline]
fn from(kind: ErrorKind) -> Error {
Error {
repr: Repr::Simple(kind)
/// let (send, recv) = channel();
///
/// thread::spawn(move || {
- /// send.send(1u8).unwrap();
- /// send.send(2u8).unwrap();
- /// send.send(3u8).unwrap();
+ /// send.send(1).unwrap();
+ /// send.send(2).unwrap();
+ /// send.send(3).unwrap();
/// });
///
- /// for x in recv.iter() {
- /// println!("Got: {}", x);
- /// }
+ /// let mut iter = recv.iter();
+ /// assert_eq!(iter.next(), Some(1));
+ /// assert_eq!(iter.next(), Some(2));
+ /// assert_eq!(iter.next(), Some(3));
+ /// assert_eq!(iter.next(), None);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn iter(&self) -> Iter<T> {
///
/// # Examples
///
- /// ```rust
+ /// ```no_run
/// use std::sync::mpsc::channel;
/// use std::thread;
/// use std::time::Duration;
///
/// let (sender, receiver) = channel();
///
- /// // Nothing is in the buffer yet
+ /// // nothing is in the buffer yet
/// assert!(receiver.try_iter().next().is_none());
- /// println!("Nothing in the buffer...");
///
/// thread::spawn(move || {
+ /// thread::sleep(Duration::from_secs(1));
/// sender.send(1).unwrap();
/// sender.send(2).unwrap();
/// sender.send(3).unwrap();
/// });
///
- /// println!("Going to sleep...");
- /// thread::sleep(Duration::from_secs(2)); // block for two seconds
+ /// // nothing is in the buffer yet
+ /// assert!(receiver.try_iter().next().is_none());
+ ///
+ /// // block for two seconds
+ /// thread::sleep(Duration::from_secs(2));
///
- /// for x in receiver.try_iter() {
- /// println!("Got: {}", x);
- /// }
+ /// let mut iter = receiver.try_iter();
+ /// assert_eq!(iter.next(), Some(1));
+ /// assert_eq!(iter.next(), Some(2));
+ /// assert_eq!(iter.next(), Some(3));
+ /// assert_eq!(iter.next(), None);
/// ```
#[stable(feature = "receiver_try_iter", since = "1.15.0")]
pub fn try_iter(&self) -> TryIter<T> {
fn oneshot_single_thread_send_then_recv() {
let (tx, rx) = channel::<Box<i32>>();
tx.send(box 10).unwrap();
- assert!(rx.recv().unwrap() == box 10);
+ assert!(*rx.recv().unwrap() == 10);
}
#[test]
fn oneshot_multi_task_recv_then_send() {
let (tx, rx) = channel::<Box<i32>>();
let _t = thread::spawn(move|| {
- assert!(rx.recv().unwrap() == box 10);
+ assert!(*rx.recv().unwrap() == 10);
});
tx.send(box 10).unwrap();
drop(tx);
});
let res = thread::spawn(move|| {
- assert!(rx.recv().unwrap() == box 10);
+ assert!(*rx.recv().unwrap() == 10);
}).join();
assert!(res.is_err());
}
let _t = thread::spawn(move|| {
tx.send(box 10).unwrap();
});
- assert!(rx.recv().unwrap() == box 10);
+ assert!(*rx.recv().unwrap() == 10);
}
}
if i == 10 { return }
thread::spawn(move|| {
- assert!(rx.recv().unwrap() == box i);
+ assert!(*rx.recv().unwrap() == i);
recv(rx, i + 1);
});
}
fn oneshot_single_thread_send_then_recv() {
let (tx, rx) = sync_channel::<Box<i32>>(1);
tx.send(box 10).unwrap();
- assert!(rx.recv().unwrap() == box 10);
+ assert!(*rx.recv().unwrap() == 10);
}
#[test]
fn oneshot_multi_task_recv_then_send() {
let (tx, rx) = sync_channel::<Box<i32>>(0);
let _t = thread::spawn(move|| {
- assert!(rx.recv().unwrap() == box 10);
+ assert!(*rx.recv().unwrap() == 10);
});
tx.send(box 10).unwrap();
drop(tx);
});
let res = thread::spawn(move|| {
- assert!(rx.recv().unwrap() == box 10);
+ assert!(*rx.recv().unwrap() == 10);
}).join();
assert!(res.is_err());
}
let _t = thread::spawn(move|| {
tx.send(box 10).unwrap();
});
- assert!(rx.recv().unwrap() == box 10);
+ assert!(*rx.recv().unwrap() == 10);
}
}
if i == 10 { return }
thread::spawn(move|| {
- assert!(rx.recv().unwrap() == box i);
+ assert!(*rx.recv().unwrap() == i);
recv(rx, i + 1);
});
}
use core::{mem, slice};
#[derive(Copy, Clone, Debug, Default)]
-#[repr(packed)]
+#[repr(C)]
pub struct Stat {
pub st_dev: u64,
pub st_ino: u64,
}
#[derive(Copy, Clone, Debug, Default)]
-#[repr(packed)]
+#[repr(C)]
pub struct StatVfs {
pub f_bsize: u32,
pub f_blocks: u64,
}
#[derive(Copy, Clone, Debug, Default)]
-#[repr(packed)]
+#[repr(C)]
pub struct TimeSpec {
pub tv_sec: i64,
pub tv_nsec: i32,
}
extern {
- static __magenta_job_default: mx_handle_t;
+ pub fn mx_job_default() -> mx_handle_t;
pub fn mx_task_kill(handle: mx_handle_t) -> mx_status_t;
avail: *mut mx_size_t) -> mx_status_t;
}
-pub fn mx_job_default() -> mx_handle_t {
- unsafe { return __magenta_job_default; }
-}
-
// From `enum special_handles` in system/ulib/launchpad/launchpad.c
// HND_LOADER_SVC = 0
// HND_EXEC_VMO = 1
/// An owned permission to join on a thread (block on its termination).
///
-/// A `JoinHandle` *detaches* the child thread when it is dropped.
+/// A `JoinHandle` *detaches* the associated thread when it is dropped, which
+/// means that there is no longer any handle to thread and no way to `join`
+/// on it.
///
/// Due to platform restrictions, it is not possible to [`Clone`] this
-/// handle: the ability to join a child thread is a uniquely-owned
-/// permission.
+/// handle: the ability to join a thread is a uniquely-owned permission.
///
/// This `struct` is created by the [`thread::spawn`] function and the
/// [`thread::Builder::spawn`] method.
/// }).unwrap();
/// ```
///
+/// Child being detached and outliving its parent:
+///
+/// ```no_run
+/// use std::thread;
+/// use std::time::Duration;
+///
+/// let original_thread = thread::spawn(|| {
+/// let _detached_thread = thread::spawn(|| {
+/// // Here we sleep to make sure that the first thread returns before.
+/// thread::sleep(Duration::from_millis(10));
+/// // This will be called, even though the JoinHandle is dropped.
+/// println!("♫ Still alive ♫");
+/// });
+/// });
+///
+/// let _ = original_thread.join();
+/// println!("Original thread is joined.");
+///
+/// // We make sure that the new thread has time to run, before the main
+/// // thread returns.
+///
+/// thread::sleep(Duration::from_millis(1000));
+/// ```
+///
/// [`Clone`]: ../../std/clone/trait.Clone.html
/// [`thread::spawn`]: fn.spawn.html
/// [`thread::Builder::spawn`]: struct.Builder.html#method.spawn
fn merge_spans(&self, sp_lhs: Span, sp_rhs: Span) -> Option<Span> {
self.merge_spans(sp_lhs, sp_rhs)
}
+ fn call_span_if_macro(&self, sp: Span) -> Span {
+ if self.span_to_filename(sp.clone()).contains("macros>") {
+ let v = sp.macro_backtrace();
+ if let Some(use_site) = v.last() {
+ return use_site.call_site;
+ }
+ }
+ sp
+ }
}
#[derive(Clone)]
BuiltinDerive(BuiltinDeriveFn),
/// A declarative macro, e.g. `macro m() {}`.
- DeclMacro(Box<TTMacroExpander>, Option<Span> /* definition site span */),
+ ///
+ /// The second element is the definition site span.
+ DeclMacro(Box<TTMacroExpander>, Option<(ast::NodeId, Span)>),
}
impl SyntaxExtension {
let marked_tts = noop_fold_tts(mac.node.stream(), &mut Marker(mark));
let opt_expanded = match *ext {
- SyntaxExtension::DeclMacro(ref expand, def_site_span) => {
- if let Err(msg) = validate_and_set_expn_info(def_site_span, false) {
+ SyntaxExtension::DeclMacro(ref expand, def_span) => {
+ if let Err(msg) = validate_and_set_expn_info(def_span.map(|(_, s)| s),
+ false) {
self.cx.span_err(path.span, &msg);
return kind.dummy(span);
}
let allow_internal_unstable = attr::contains_name(&def.attrs, "allow_internal_unstable");
NormalTT(exp, Some((def.id, def.span)), allow_internal_unstable)
} else {
- SyntaxExtension::DeclMacro(exp, Some(def.span))
+ SyntaxExtension::DeclMacro(exp, Some((def.id, def.span)))
}
}
self.with_str_from(start, |string| {
if string == "_" {
+ self.sess.span_diagnostic
+ .struct_span_warn(mk_sp(start, self.pos),
+ "underscore literal suffix is not allowed")
+ .warn("this was previously accepted by the compiler but is \
+ being phased out; it will become a hard error in \
+ a future release!")
+ .note("for more information, see issue #42326 \
+ <https://github.com/rust-lang/rust/issues/42326>")
+ .emit();
None
} else {
Some(Symbol::intern(string))
return Ok(Some(self.parse_item_foreign_fn(visibility, lo, attrs)?));
}
+ if self.check_keyword(keywords::Const) {
+ return Err(self.span_fatal(self.span, "extern items cannot be `const`"));
+ }
+
// FIXME #5668: this will occur for a macro invocation:
match self.parse_macro_use_or_failure(attrs, true, false, lo, visibility)? {
Some(item) => {
// ignore-tce
// ignore-thumb
// ignore-thumbeb
-// ignore-x86_64 no-ignore-x86
+// ignore-x86_64
// ignore-xcore
// ignore-nvptx
// ignore-nvptx64
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// compile-flags: -C no-prepopulate-passes
+
+#![crate_type = "lib"]
+#![feature(core_intrinsics)]
+
+use std::intrinsics::{prefetch_read_data, prefetch_write_data,
+ prefetch_read_instruction, prefetch_write_instruction};
+
+#[no_mangle]
+pub fn check_prefetch_read_data(data: &[i8]) {
+ unsafe {
+ // CHECK: call void @llvm.prefetch(i8* %{{.*}}, i32 0, i32 0, i32 1)
+ prefetch_read_data(data.as_ptr(), 0);
+ // CHECK: call void @llvm.prefetch(i8* %{{.*}}, i32 0, i32 1, i32 1)
+ prefetch_read_data(data.as_ptr(), 1);
+ // CHECK: call void @llvm.prefetch(i8* %{{.*}}, i32 0, i32 2, i32 1)
+ prefetch_read_data(data.as_ptr(), 2);
+ // CHECK: call void @llvm.prefetch(i8* %{{.*}}, i32 0, i32 3, i32 1)
+ prefetch_read_data(data.as_ptr(), 3);
+ }
+}
+
+#[no_mangle]
+pub fn check_prefetch_write_data(data: &[i8]) {
+ unsafe {
+ // CHECK: call void @llvm.prefetch(i8* %{{.*}}, i32 1, i32 0, i32 1)
+ prefetch_write_data(data.as_ptr(), 0);
+ // CHECK: call void @llvm.prefetch(i8* %{{.*}}, i32 1, i32 1, i32 1)
+ prefetch_write_data(data.as_ptr(), 1);
+ // CHECK: call void @llvm.prefetch(i8* %{{.*}}, i32 1, i32 2, i32 1)
+ prefetch_write_data(data.as_ptr(), 2);
+ // CHECK: call void @llvm.prefetch(i8* %{{.*}}, i32 1, i32 3, i32 1)
+ prefetch_write_data(data.as_ptr(), 3);
+ }
+}
+
+#[no_mangle]
+pub fn check_prefetch_read_instruction(data: &[i8]) {
+ unsafe {
+ // CHECK: call void @llvm.prefetch(i8* %{{.*}}, i32 0, i32 0, i32 0)
+ prefetch_read_instruction(data.as_ptr(), 0);
+ // CHECK: call void @llvm.prefetch(i8* %{{.*}}, i32 0, i32 1, i32 0)
+ prefetch_read_instruction(data.as_ptr(), 1);
+ // CHECK: call void @llvm.prefetch(i8* %{{.*}}, i32 0, i32 2, i32 0)
+ prefetch_read_instruction(data.as_ptr(), 2);
+ // CHECK: call void @llvm.prefetch(i8* %{{.*}}, i32 0, i32 3, i32 0)
+ prefetch_read_instruction(data.as_ptr(), 3);
+ }
+}
+
+#[no_mangle]
+pub fn check_prefetch_write_instruction(data: &[i8]) {
+ unsafe {
+ // CHECK: call void @llvm.prefetch(i8* %{{.*}}, i32 1, i32 0, i32 0)
+ prefetch_write_instruction(data.as_ptr(), 0);
+ // CHECK: call void @llvm.prefetch(i8* %{{.*}}, i32 1, i32 1, i32 0)
+ prefetch_write_instruction(data.as_ptr(), 1);
+ // CHECK: call void @llvm.prefetch(i8* %{{.*}}, i32 1, i32 2, i32 0)
+ prefetch_write_instruction(data.as_ptr(), 2);
+ // CHECK: call void @llvm.prefetch(i8* %{{.*}}, i32 1, i32 3, i32 0)
+ prefetch_write_instruction(data.as_ptr(), 3);
+ }
+}
+
+
let xs : Vec<Option<i32>> = vec![Some(1), None];
for Some(x) in xs {}
- //~^ ERROR E0297
+ //~^ ERROR E0005
//~| NOTE pattern `None` not covered
}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+mod SomeModule {
+ const PRIVATE: u32 = 0x_a_bad_1dea_u32;
+}
+
+fn main() {
+ SomeModule::PRIVATE; //~ ERROR E0603
+}
+++ /dev/null
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-pub use reexport::Reexported;
-
-pub struct Foo;
-pub enum Bar { X }
-
-pub mod foo {
- pub trait PubPub {
- fn method(&self) {}
-
- fn method3(&self) {}
- }
-
- impl PubPub for u32 {}
- impl PubPub for i32 {}
-}
-pub mod bar {
- trait PubPriv {
- fn method(&self);
- }
-}
-mod qux {
- pub trait PrivPub {
- fn method(&self);
- }
-}
-mod quz {
- trait PrivPriv {
- fn method(&self);
- }
-}
-
-mod reexport {
- pub trait Reexported {
- fn method(&self);
- }
-}
let red: color = color::rgb(255, 0, 0);
match red {
color::rgb(r, g, b) => { println!("rgb"); }
- color::hsl(h, s, l) => { println!("hsl"); } //~ ERROR no associated
+ color::hsl(h, s, l) => { println!("hsl"); } //~ ERROR no function
}
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+#![allow(unused_macros)]
+
macro m() {} //~ ERROR `macro` is experimental (see issue #39412)
//~| HELP add #![feature(decl_macro)] to the crate attributes to enable
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn main() {
+ for x in 0..3 {
+ x //~ ERROR mismatched types
+ //~| NOTE expected ()
+ //~| NOTE expected type `()`
+ }
+}
+++ /dev/null
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-trait Foo<A> {
- fn foo(&self, a: A) -> A {
- a
- }
-}
-
-trait NotRelevant<A> {
- fn nr(&self, a: A) -> A {
- a
- }
-}
-
-struct Bar;
-
-impl NotRelevant<usize> for Bar {}
-
-fn main() {
- let f1 = Bar;
-
- f1.foo(1usize);
- //~^ error: method named `foo` found for type `Bar` in the current scope
- //~| help: items from traits can only be used if the trait is implemented and in scope
- //~| help: candidate #1: `Foo`
-}
+++ /dev/null
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// error-pattern: overflow representing the type
-
-
-trait Mirror { type It: ?Sized; }
-impl<T: ?Sized> Mirror for T { type It = Self; }
-struct S(Option<<S as Mirror>::It>);
-
-fn main() {
- let _s = S(None);
-}
fn main() {
let x: u8 = BitXor::bitor(0 as u8, 0 as u8);
//~^ ERROR must be specified
- //~| no associated item named
+ //~| no function or associated item named
let g = BitXor::bitor;
//~^ ERROR must be specified
- //~| no associated item named
-}
\ No newline at end of file
+ //~| no function or associated item named
+}
fn main() {
let ug = Graph::<i32, i32>::new_undirected();
- //~^ ERROR no associated item named `new_undirected` found for type
+ //~^ ERROR no function or associated item named `new_undirected` found for type
}
let x = RefCell::new((&mut r,s));
let val: &_ = x.borrow().0;
- //~^ WARNING this temporary used to live longer - see issue #39283
- //~^^ ERROR borrowed value does not live long enough
+ //~^ ERROR borrowed value does not live long enough
//~| temporary value dropped here while still borrowed
//~| temporary value created here
//~| consider using a `let` binding to increase its lifetime
- //~| before rustc 1.16, this temporary lived longer - see issue #39283
println!("{}", val);
}
//~^ temporary value needs to live until here
pub struct Vector<T, D: Dim> {
entries: [T; D::dim()]
- //~^ ERROR no associated item named `dim` found for type `D` in the current scope
+ //~^ ERROR no function or associated item named `dim` found for type `D` in the current scope
}
fn main() {}
fn main() {
let p = Point::new(0.0, 0.0);
- //~^ ERROR no associated item named `new` found for type `Point` in the current scope
+ //~^ ERROR no function or associated item named `new` found for type `Point`
println!("{}", p.to_string());
}
struct Foo;
fn main() {
- Foo::bar(); //~ ERROR no associated item named `bar` found for type `Foo` in the current scope
+ Foo::bar();
+ //~^ ERROR no function or associated item named `bar` found for type `Foo`
}
pub fn f() {}
}
fn g<Foo>() {
- Foo::f(); //~ ERROR no associated item named `f`
+ Foo::f(); //~ ERROR no function or associated item named `f`
}
fn main() {}
y.zero()
.take() //~ ERROR no method named `take` found for type `Foo` in the current scope
//~^ NOTE the method `take` exists but the following trait bounds were not satisfied
+ //~| NOTE the following traits define an item `take`, perhaps you need to implement one of them
.one(0);
}
+++ /dev/null
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// issue #21405
-// ignore-tidy-linelength
-
-struct Foo;
-
-fn foo<F>(f: F) where F: FnMut(Foo) {}
-
-fn main() {
- foo(|s| s.is_empty());
- //~^ ERROR no method named `is_empty` found
- //~^^ HELP #1: `std::iter::ExactSizeIterator`
- //~^^^ HELP #2: `core::slice::SliceExt`
- //~^^^^ HELP #3: `core::str::StrExt`
- //~^^^^^ HELP items from traits can only be used if the trait is implemented and in scope; the following traits define an item `is_empty`, perhaps you need to implement one of them:
-}
+++ /dev/null
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// aux-build:no_method_suggested_traits.rs
-
-extern crate no_method_suggested_traits;
-
-struct Foo;
-enum Bar { X }
-
-mod foo {
- pub trait Bar {
- fn method(&self) {}
-
- fn method2(&self) {}
- }
-
- impl Bar for u32 {}
-
- impl Bar for char {}
-}
-
-fn main() {
- // test the values themselves, and autoderef.
-
-
- 1u32.method();
- //~^ HELP following traits are implemented but not in scope, perhaps add a `use` for one of them
- //~^^ ERROR no method named
- //~^^^ HELP `use foo::Bar;`
- //~^^^^ HELP `use no_method_suggested_traits::foo::PubPub;`
- std::rc::Rc::new(&mut Box::new(&1u32)).method();
- //~^ HELP following traits are implemented but not in scope, perhaps add a `use` for one of them
- //~^^ ERROR no method named
- //~^^^ HELP `use foo::Bar;`
- //~^^^^ HELP `use no_method_suggested_traits::foo::PubPub;`
-
- 'a'.method();
- //~^ ERROR no method named
- //~^^ HELP the following trait is implemented but not in scope, perhaps add a `use` for it:
- //~^^^ HELP `use foo::Bar;`
- std::rc::Rc::new(&mut Box::new(&'a')).method();
- //~^ ERROR no method named
- //~^^ HELP the following trait is implemented but not in scope, perhaps add a `use` for it:
- //~^^^ HELP `use foo::Bar;`
-
- 1i32.method();
- //~^ ERROR no method named
- //~^^ HELP the following trait is implemented but not in scope, perhaps add a `use` for it:
- //~^^^ HELP `use no_method_suggested_traits::foo::PubPub;`
- std::rc::Rc::new(&mut Box::new(&1i32)).method();
- //~^ ERROR no method named
- //~^^ HELP the following trait is implemented but not in scope, perhaps add a `use` for it:
- //~^^^ HELP `use no_method_suggested_traits::foo::PubPub;`
-
- Foo.method();
- //~^ ERROR no method named
- //~^^ HELP following traits define an item `method`, perhaps you need to implement one of them
- //~^^^ HELP `foo::Bar`
- //~^^^^ HELP `no_method_suggested_traits::foo::PubPub`
- //~^^^^^ HELP `no_method_suggested_traits::Reexported`
- //~^^^^^^ HELP `no_method_suggested_traits::bar::PubPriv`
- //~^^^^^^^ HELP `no_method_suggested_traits::qux::PrivPub`
- //~^^^^^^^^ HELP `no_method_suggested_traits::quz::PrivPriv`
- std::rc::Rc::new(&mut Box::new(&Foo)).method();
- //~^ ERROR no method named
- //~^^ HELP following traits define an item `method`, perhaps you need to implement one of them
- //~^^^ HELP `foo::Bar`
- //~^^^^ HELP `no_method_suggested_traits::foo::PubPub`
- //~^^^^^ HELP `no_method_suggested_traits::Reexported`
- //~^^^^^^ HELP `no_method_suggested_traits::bar::PubPriv`
- //~^^^^^^^ HELP `no_method_suggested_traits::qux::PrivPub`
- //~^^^^^^^^ HELP `no_method_suggested_traits::quz::PrivPriv`
-
- 1u64.method2();
- //~^ ERROR no method named
- //~^^ HELP the following trait defines an item `method2`, perhaps you need to implement it
- //~^^^ HELP `foo::Bar`
- std::rc::Rc::new(&mut Box::new(&1u64)).method2();
- //~^ ERROR no method named
- //~^^ HELP the following trait defines an item `method2`, perhaps you need to implement it
- //~^^^ HELP `foo::Bar`
-
- no_method_suggested_traits::Foo.method2();
- //~^ ERROR no method named
- //~^^ HELP following trait defines an item `method2`, perhaps you need to implement it
- //~^^^ HELP `foo::Bar`
- std::rc::Rc::new(&mut Box::new(&no_method_suggested_traits::Foo)).method2();
- //~^ ERROR no method named
- //~^^ HELP following trait defines an item `method2`, perhaps you need to implement it
- //~^^^ HELP `foo::Bar`
- no_method_suggested_traits::Bar::X.method2();
- //~^ ERROR no method named
- //~^^ HELP following trait defines an item `method2`, perhaps you need to implement it
- //~^^^ HELP `foo::Bar`
- std::rc::Rc::new(&mut Box::new(&no_method_suggested_traits::Bar::X)).method2();
- //~^ ERROR no method named
- //~^^ HELP following trait defines an item `method2`, perhaps you need to implement it
- //~^^^ HELP `foo::Bar`
-
- Foo.method3();
- //~^ ERROR no method named
- //~^^ HELP following trait defines an item `method3`, perhaps you need to implement it
- //~^^^ HELP `no_method_suggested_traits::foo::PubPub`
- std::rc::Rc::new(&mut Box::new(&Foo)).method3();
- //~^ ERROR no method named
- //~^^ HELP following trait defines an item `method3`, perhaps you need to implement it
- //~^^^ HELP `no_method_suggested_traits::foo::PubPub`
- Bar::X.method3();
- //~^ ERROR no method named
- //~^^ HELP following trait defines an item `method3`, perhaps you need to implement it
- //~^^^ HELP `no_method_suggested_traits::foo::PubPub`
- std::rc::Rc::new(&mut Box::new(&Bar::X)).method3();
- //~^ ERROR no method named
- //~^^ HELP following trait defines an item `method3`, perhaps you need to implement it
- //~^^^ HELP `no_method_suggested_traits::foo::PubPub`
-
- // should have no help:
- 1_usize.method3(); //~ ERROR no method named
- std::rc::Rc::new(&mut Box::new(&1_usize)).method3(); //~ ERROR no method named
- no_method_suggested_traits::Foo.method3(); //~ ERROR no method named
- std::rc::Rc::new(&mut Box::new(&no_method_suggested_traits::Foo)).method3();
- //~^ ERROR no method named
- no_method_suggested_traits::Bar::X.method3(); //~ ERROR no method named
- std::rc::Rc::new(&mut Box::new(&no_method_suggested_traits::Bar::X)).method3();
- //~^ ERROR no method named
-}
// Methods, UFCS
// a, b, c are resolved as trait items, their traits need to be in scope
- S::a(&S); //~ ERROR no associated item named `a` found for type `S` in the current scope
- S::b(&S); //~ ERROR no associated item named `b` found for type `S` in the current scope
+ S::a(&S);
+ //~^ ERROR no function or associated item named `a` found for type `S`
+ S::b(&S);
+ //~^ ERROR no function or associated item named `b` found for type `S`
S::c(&S); // OK
// a, b, c are resolved as inherent items, their traits don't need to be in scope
C::a(&S); //~ ERROR method `a` is private
fn main() {
let a = Foo::lol();
- //~^ ERROR no associated item named
+ //~^ ERROR no function or associated item named
let b = Foo::<_>::lol();
- //~^ ERROR no associated item named
+ //~^ ERROR no function or associated item named
let c = Bar::lol();
- //~^ ERROR no associated item named
+ //~^ ERROR no function or associated item named
let d = Bar::<usize, _>::lol();
- //~^ ERROR no associated item named
+ //~^ ERROR no function or associated item named
let e = Bar::<usize>::lol();
//~^ ERROR must be explicitly specified
}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![deny(unused_macros)]
+
+// Most simple case
+macro_rules! unused { //~ ERROR: unused macro definition
+ () => {};
+}
+
+// Test macros created by macros
+macro_rules! create_macro {
+ () => {
+ macro_rules! m { //~ ERROR: unused macro definition
+ () => {};
+ }
+ };
+}
+create_macro!();
+
+#[allow(unused_macros)]
+mod bar {
+ // Test that putting the #[deny] close to the macro's definition
+ // works.
+
+ #[deny(unused_macros)]
+ macro_rules! unused { //~ ERROR: unused macro definition
+ () => {};
+ }
+}
+
+fn main() {}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+#![feature(decl_macro)]
#![deny(unused_macros)]
// Most simple case
-macro_rules! unused { //~ ERROR: unused macro definition
- () => {};
+macro unused { //~ ERROR: unused macro definition
+ () => {}
}
-// Test macros created by macros
-macro_rules! create_macro {
- () => {
- macro_rules! m { //~ ERROR: unused macro definition
- () => {};
- }
- };
-}
-create_macro!();
-
#[allow(unused_macros)]
mod bar {
// Test that putting the #[deny] close to the macro's definition
// works.
#[deny(unused_macros)]
- macro_rules! unused { //~ ERROR: unused macro definition
- () => {};
+ macro unused { //~ ERROR: unused macro definition
+ () => {}
+ }
+}
+
+mod boo {
+ pub(crate) macro unused { //~ ERROR: unused macro definition
+ () => {}
}
}
extern {
const i: isize;
- //~^ ERROR expected one of `fn`, `pub`, `static`, or `}`, found `const`
+ //~^ ERROR extern items cannot be `const`
}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn main() {
+ let _ = "Foo"_;
+ //~^ WARNING underscore literal suffix is not allowed
+ //~| WARNING this was previously accepted
+ //~| NOTE issue #42326
+}
+
+FAIL
+//~^ ERROR
+//~| NOTE
+++ /dev/null
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// force-host
-
-#![feature(plugin_registrar)]
-#![feature(box_syntax, rustc_private)]
-
-// Load rustc as a plugin to get macros
-#[macro_use]
-extern crate rustc;
-extern crate rustc_plugin;
-
-use rustc::hir;
-use rustc::lint::{LateContext, LintContext, LintPass, LateLintPass, LateLintPassObject, LintArray};
-use rustc_plugin::Registry;
-
-declare_lint!(TEST_LINT, Warn, "Warn about items named 'lintme'");
-
-declare_lint!(PLEASE_LINT, Warn, "Warn about items named 'pleaselintme'");
-
-struct Pass;
-
-impl LintPass for Pass {
- fn get_lints(&self) -> LintArray {
- lint_array!(TEST_LINT, PLEASE_LINT)
- }
-}
-
-impl<'a, 'tcx> LateLintPass<'a, 'tcx> for Pass {
- fn check_item(&mut self, cx: &LateContext, it: &hir::Item) {
- match &*it.name.as_str() {
- "lintme" => cx.span_lint(TEST_LINT, it.span, "item is named 'lintme'"),
- "pleaselintme" => cx.span_lint(PLEASE_LINT, it.span, "item is named 'pleaselintme'"),
- _ => {}
- }
- }
-}
-
-#[plugin_registrar]
-pub fn plugin_registrar(reg: &mut Registry) {
- reg.register_late_lint_pass(box Pass);
- reg.register_lint_group("lint_me", vec![TEST_LINT, PLEASE_LINT]);
-}
+++ /dev/null
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// force-host
-
-#![feature(plugin_registrar)]
-#![feature(box_syntax, rustc_private)]
-
-extern crate syntax;
-
-// Load rustc as a plugin to get macros
-#[macro_use]
-extern crate rustc;
-extern crate rustc_plugin;
-
-use rustc::lint::{EarlyContext, LintContext, LintPass, EarlyLintPass,
- EarlyLintPassObject, LintArray};
-use rustc_plugin::Registry;
-use syntax::ast;
-declare_lint!(TEST_LINT, Warn, "Warn about items named 'lintme'");
-
-struct Pass;
-
-impl LintPass for Pass {
- fn get_lints(&self) -> LintArray {
- lint_array!(TEST_LINT)
- }
-}
-
-impl EarlyLintPass for Pass {
- fn check_item(&mut self, cx: &EarlyContext, it: &ast::Item) {
- if it.ident.name == "lintme" {
- cx.span_lint(TEST_LINT, it.span, "item is named 'lintme'");
- }
- }
-}
-
-#[plugin_registrar]
-pub fn plugin_registrar(reg: &mut Registry) {
- reg.register_early_lint_pass(box Pass as EarlyLintPassObject);
-}
+++ /dev/null
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-#![feature(rustc_private)]
-#![allow(dead_code)]
-
-extern crate serialize;
-
-#[derive(Encodable)]
-//~^ WARNING derive(Encodable) is deprecated in favor of derive(RustcEncodable)
-struct Test1;
-
-fn main() { }
+++ /dev/null
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// aux-build:lint_group_plugin_test.rs
-// ignore-stage1
-#![feature(plugin)]
-#![plugin(lint_group_plugin_test)]
-#![allow(dead_code)]
-
-fn lintme() { } //~ WARNING item is named 'lintme'
-fn pleaselintme() { } //~ WARNING item is named 'pleaselintme'
-
-#[allow(lint_me)]
-pub fn main() {
- fn lintme() { }
-
- fn pleaselintme() { }
-}
+++ /dev/null
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// aux-build:lint_plugin_test.rs
-// ignore-stage1
-// compile-flags: -A test-lint
-
-#![feature(plugin)]
-#![plugin(lint_plugin_test)]
-
-fn lintme() { }
-
-pub fn main() {
-}
+++ /dev/null
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// aux-build:lint_plugin_test.rs
-// ignore-stage1
-// compile-flags: -Z extra-plugins=lint_plugin_test
-
-#![allow(dead_code)]
-
-fn lintme() { } //~ WARNING item is named 'lintme'
-
-#[allow(test_lint)]
-pub fn main() {
- fn lintme() { }
-}
+++ /dev/null
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// aux-build:lint_plugin_test.rs
-// ignore-stage1
-#![feature(plugin)]
-#![plugin(lint_plugin_test)]
-#![allow(dead_code)]
-
-fn lintme() { } //~ WARNING item is named 'lintme'
-
-#[allow(test_lint)]
-pub fn main() {
- fn lintme() { }
-}
+++ /dev/null
-// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-mod foo {
- #![macro_escape] //~ WARNING macro_escape is a deprecated synonym for macro_use
- //~^ HELP consider an outer attribute
-}
-
-fn main() {
-}
+++ /dev/null
-// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-#[macro_escape] //~ WARNING macro_escape is a deprecated synonym for macro_use
-mod foo {
-}
-
-fn main() {
-}
+++ /dev/null
-// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-#![allow(dead_code)]
-
-#[derive] //~ WARNING empty trait list in `derive`
-struct Foo;
-
-#[derive()] //~ WARNING empty trait list in `derive`
-struct Bar;
-
-pub fn main() {}
+++ /dev/null
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-//
-#![warn(variant_size_differences)]
-#![allow(dead_code)]
-
-// Note that the following test works because all fields of the enum variants are of the same size.
-// If this test is modified and the reordering logic in librustc/ty/layout.rs kicks in, it fails.
-
-enum Enum1 { }
-
-enum Enum2 { A, B, C }
-
-enum Enum3 { D(isize), E, F }
-
-enum Enum4 { H(isize), I(isize), J }
-
-enum Enum5 {
- L(isize, isize, isize, isize), //~ WARNING three times larger
- M(isize),
- N
-}
-
-enum Enum6<T, U> {
- O(T),
- P(U),
- Q(isize)
-}
-
-#[allow(variant_size_differences)]
-enum Enum7 {
- R(isize, isize, isize, isize),
- S(isize),
- T
-}
-pub fn main() { }
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn main() {
+ // Check that the tail statement in the body unifies with something
+ for _ in 0..3 {
+ unsafe { std::mem::uninitialized() }
+ }
+
+ // Check that the tail statement in the body can be unit
+ for _ in 0..3 {
+ ()
+ }
+}
// ignore-windows
// Ignore 32 bit targets:
-// ignore-x86, ignore-arm
+// ignore-x86
+// ignore-arm
// ignore-emscripten
+++ /dev/null
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-#![allow(non_snake_case)]
-#![allow(dead_code)]
-#![allow(unused_variables)]
-
-#[derive(Copy, Clone)]
-enum Foo {
- Bar,
- Baz
-}
-
-impl Foo {
- fn foo(&self) {
- match self {
- &
-Bar if true
-//~^ WARN pattern binding `Bar` is named the same as one of the variants of the type `Foo`
-//~^^ HELP to match on a variant, consider making the path in the pattern qualified: `Foo::Bar`
-=> println!("bar"),
- &
-Baz if false
-//~^ WARN pattern binding `Baz` is named the same as one of the variants of the type `Foo`
-//~^^ HELP to match on a variant, consider making the path in the pattern qualified: `Foo::Baz`
-=> println!("baz"),
-_ => ()
- }
- }
-}
-
-fn main() {}
+++ /dev/null
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// Parser test for #37765
-
-fn with_parens<T: ToString>(arg: T) -> String { //~WARN function is never used: `with_parens`
- return (<T as ToString>::to_string(&arg)); //~WARN unnecessary parentheses around `return` value
-}
-
-fn no_parens<T: ToString>(arg: T) -> String { //~WARN function is never used: `no_parens`
- return <T as ToString>::to_string(&arg);
-}
-
-fn main() {
-
-}
+++ /dev/null
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// compile-flags: --test
-
-#[test]
-#[should_panic = "foo"]
-//~^ WARN: attribute must be of the form:
-fn test1() {
- panic!();
-}
-
-#[test]
-#[should_panic(expected)]
-//~^ WARN: argument must be of the form:
-fn test2() {
- panic!();
-}
-
-#[test]
-#[should_panic(expect)]
-//~^ WARN: argument must be of the form:
-fn test3() {
- panic!();
-}
-
-#[test]
-#[should_panic(expected(foo, bar))]
-//~^ WARN: argument must be of the form:
-fn test4() {
- panic!();
-}
-
-#[test]
-#[should_panic(expected = "foo", bar)]
-//~^ WARN: argument must be of the form:
-fn test5() {
- panic!();
-}
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// force-host
+
+#![feature(plugin_registrar)]
+#![feature(box_syntax, rustc_private)]
+
+// Load rustc as a plugin to get macros
+#[macro_use]
+extern crate rustc;
+extern crate rustc_plugin;
+
+use rustc::hir;
+use rustc::lint::{LateContext, LintContext, LintPass, LateLintPass, LateLintPassObject, LintArray};
+use rustc_plugin::Registry;
+
+declare_lint!(TEST_LINT, Warn, "Warn about items named 'lintme'");
+
+declare_lint!(PLEASE_LINT, Warn, "Warn about items named 'pleaselintme'");
+
+struct Pass;
+
+impl LintPass for Pass {
+ fn get_lints(&self) -> LintArray {
+ lint_array!(TEST_LINT, PLEASE_LINT)
+ }
+}
+
+impl<'a, 'tcx> LateLintPass<'a, 'tcx> for Pass {
+ fn check_item(&mut self, cx: &LateContext, it: &hir::Item) {
+ match &*it.name.as_str() {
+ "lintme" => cx.span_lint(TEST_LINT, it.span, "item is named 'lintme'"),
+ "pleaselintme" => cx.span_lint(PLEASE_LINT, it.span, "item is named 'pleaselintme'"),
+ _ => {}
+ }
+ }
+}
+
+#[plugin_registrar]
+pub fn plugin_registrar(reg: &mut Registry) {
+ reg.register_late_lint_pass(box Pass);
+ reg.register_lint_group("lint_me", vec![TEST_LINT, PLEASE_LINT]);
+}
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// force-host
+
+#![feature(plugin_registrar)]
+#![feature(box_syntax, rustc_private)]
+
+extern crate syntax;
+
+// Load rustc as a plugin to get macros
+#[macro_use]
+extern crate rustc;
+extern crate rustc_plugin;
+
+use rustc::lint::{EarlyContext, LintContext, LintPass, EarlyLintPass,
+ EarlyLintPassObject, LintArray};
+use rustc_plugin::Registry;
+use syntax::ast;
+declare_lint!(TEST_LINT, Warn, "Warn about items named 'lintme'");
+
+struct Pass;
+
+impl LintPass for Pass {
+ fn get_lints(&self) -> LintArray {
+ lint_array!(TEST_LINT)
+ }
+}
+
+impl EarlyLintPass for Pass {
+ fn check_item(&mut self, cx: &EarlyContext, it: &ast::Item) {
+ if it.ident.name == "lintme" {
+ cx.span_lint(TEST_LINT, it.span, "item is named 'lintme'");
+ }
+ }
+}
+
+#[plugin_registrar]
+pub fn plugin_registrar(reg: &mut Registry) {
+ reg.register_early_lint_pass(box Pass as EarlyLintPassObject);
+}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// run-pass
+
+#![feature(rustc_private)]
+#![allow(dead_code)]
+
+extern crate serialize;
+
+#[derive(Encodable)]
+//~^ WARNING derive(Encodable) is deprecated in favor of derive(RustcEncodable)
+struct Test1;
+
+fn main() { }
--- /dev/null
+warning: derive(Encodable) is deprecated in favor of derive(RustcEncodable)
+ --> $DIR/deprecated-derive.rs:18:10
+ |
+18 | #[derive(Encodable)]
+ | ^^^^^^^^^
+
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// run-pass
+// aux-build:lint_group_plugin_test.rs
+// ignore-stage1
+#![feature(plugin)]
+#![plugin(lint_group_plugin_test)]
+#![allow(dead_code)]
+
+fn lintme() { } //~ WARNING item is named 'lintme'
+fn pleaselintme() { } //~ WARNING item is named 'pleaselintme'
+
+#[allow(lint_me)]
+pub fn main() {
+ fn lintme() { }
+
+ fn pleaselintme() { }
+}
--- /dev/null
+warning: item is named 'lintme'
+ --> $DIR/lint-group-plugin.rs:18:1
+ |
+18 | fn lintme() { } //~ WARNING item is named 'lintme'
+ | ^^^^^^^^^^^^^^^
+ |
+ = note: #[warn(test_lint)] on by default
+
+warning: item is named 'pleaselintme'
+ --> $DIR/lint-group-plugin.rs:19:1
+ |
+19 | fn pleaselintme() { } //~ WARNING item is named 'pleaselintme'
+ | ^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: #[warn(please_lint)] on by default
+
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// run-pass
+// aux-build:lint_plugin_test.rs
+// ignore-stage1
+// compile-flags: -A test-lint
+
+#![feature(plugin)]
+#![plugin(lint_plugin_test)]
+
+fn lintme() { }
+
+pub fn main() {
+}
--- /dev/null
+warning: function is never used: `lintme`
+ --> $DIR/lint-plugin-cmdline-allow.rs:19:1
+ |
+19 | fn lintme() { }
+ | ^^^^^^^^^^^^^^^
+ |
+ = note: #[warn(dead_code)] on by default
+
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// run-pass
+// aux-build:lint_plugin_test.rs
+// ignore-stage1
+// compile-flags: -Z extra-plugins=lint_plugin_test
+
+#![allow(dead_code)]
+
+fn lintme() { } //~ WARNING item is named 'lintme'
+
+#[allow(test_lint)]
+pub fn main() {
+ fn lintme() { }
+}
--- /dev/null
+warning: item is named 'lintme'
+ --> $DIR/lint-plugin-cmdline-load.rs:18:1
+ |
+18 | fn lintme() { } //~ WARNING item is named 'lintme'
+ | ^^^^^^^^^^^^^^^
+ |
+ = note: #[warn(test_lint)] on by default
+
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// run-pass
+// aux-build:lint_plugin_test.rs
+// ignore-stage1
+#![feature(plugin)]
+#![plugin(lint_plugin_test)]
+#![allow(dead_code)]
+
+fn lintme() { } //~ WARNING item is named 'lintme'
+
+#[allow(test_lint)]
+pub fn main() {
+ fn lintme() { }
+}
--- /dev/null
+warning: item is named 'lintme'
+ --> $DIR/lint-plugin.rs:18:1
+ |
+18 | fn lintme() { } //~ WARNING item is named 'lintme'
+ | ^^^^^^^^^^^^^^^
+ |
+ = note: #[warn(test_lint)] on by default
+
--- /dev/null
+// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// run-pass
+
+mod foo {
+ #![macro_escape] //~ WARNING macro_escape is a deprecated synonym for macro_use
+ //~^ HELP consider an outer attribute
+}
+
+fn main() {
+}
--- /dev/null
+warning: macro_escape is a deprecated synonym for macro_use
+ --> $DIR/deprecated-macro_escape-inner.rs:14:5
+ |
+14 | #![macro_escape] //~ WARNING macro_escape is a deprecated synonym for macro_use
+ | ^^^^^^^^^^^^^^^^
+ |
+ = help: consider an outer attribute, #[macro_use] mod ...
+
--- /dev/null
+// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// run-pass
+
+#[macro_escape] //~ WARNING macro_escape is a deprecated synonym for macro_use
+mod foo {
+}
+
+fn main() {
+}
--- /dev/null
+warning: macro_escape is a deprecated synonym for macro_use
+ --> $DIR/deprecated-macro_escape.rs:13:1
+ |
+13 | #[macro_escape] //~ WARNING macro_escape is a deprecated synonym for macro_use
+ | ^^^^^^^^^^^^^^^
+
--- /dev/null
+// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// run-pass
+
+#![allow(dead_code)]
+
+#[derive] //~ WARNING empty trait list in `derive`
+struct Foo;
+
+#[derive()] //~ WARNING empty trait list in `derive`
+struct Bar;
+
+pub fn main() {}
--- /dev/null
+warning: empty trait list in `derive`
+ --> $DIR/deriving-meta-empty-trait-list.rs:15:1
+ |
+15 | #[derive] //~ WARNING empty trait list in `derive`
+ | ^^^^^^^^^
+
+warning: empty trait list in `derive`
+ --> $DIR/deriving-meta-empty-trait-list.rs:18:1
+ |
+18 | #[derive()] //~ WARNING empty trait list in `derive`
+ | ^^^^^^^^^^^
+
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// run-pass
+// ignore-x86
+// ignore-arm
+// ignore-emscripten
+// ^ ignore 32-bit targets, as the error message is target-dependent. see PR #41968.
+
+#![warn(variant_size_differences)]
+#![allow(dead_code)]
+
+// Note that the following test works because all fields of the enum variants are of the same size.
+// If this test is modified and the reordering logic in librustc/ty/layout.rs kicks in, it fails.
+
+enum Enum1 { }
+
+enum Enum2 { A, B, C }
+
+enum Enum3 { D(isize), E, F }
+
+enum Enum4 { H(isize), I(isize), J }
+
+enum Enum5 {
+ L(isize, isize, isize, isize), //~ WARNING three times larger
+ M(isize),
+ N
+}
+
+enum Enum6<T, U> {
+ O(T),
+ P(U),
+ Q(isize)
+}
+
+#[allow(variant_size_differences)]
+enum Enum7 {
+ R(isize, isize, isize, isize),
+ S(isize),
+ T
+}
+pub fn main() { }
--- /dev/null
+warning: enum variant is more than three times larger (32 bytes) than the next largest
+ --> $DIR/enum-size-variance.rs:32:5
+ |
+32 | L(isize, isize, isize, isize), //~ WARNING three times larger
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+note: lint level defined here
+ --> $DIR/enum-size-variance.rs:17:9
+ |
+17 | #![warn(variant_size_differences)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^
+
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+pub use reexport::Reexported;
+
+pub struct Foo;
+pub enum Bar { X }
+
+pub mod foo {
+ pub trait PubPub {
+ fn method(&self) {}
+
+ fn method3(&self) {}
+ }
+
+ impl PubPub for u32 {}
+ impl PubPub for i32 {}
+}
+pub mod bar {
+ trait PubPriv {
+ fn method(&self);
+ }
+}
+mod qux {
+ pub trait PrivPub {
+ fn method(&self);
+ }
+}
+mod quz {
+ trait PrivPriv {
+ fn method(&self);
+ }
+}
+
+mod reexport {
+ pub trait Reexported {
+ fn method(&self);
+ }
+}
found type `u32`
error[E0277]: the trait bound `u32: std::ops::Add<impl Foo>` is not satisfied
- --> $DIR/equality.rs:34:9
+ --> $DIR/equality.rs:34:11
|
34 | n + sum_to(n - 1)
- | ^^^^^^^^^^^^^^^^^ no implementation for `u32 + impl Foo`
+ | ^ no implementation for `u32 + impl Foo`
|
= help: the trait `std::ops::Add<impl Foo>` is not implemented for `u32`
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+trait Foo<A> {
+ fn foo(&self, a: A) -> A {
+ a
+ }
+}
+
+trait NotRelevant<A> {
+ fn nr(&self, a: A) -> A {
+ a
+ }
+}
+
+struct Bar;
+
+impl NotRelevant<usize> for Bar {}
+
+fn main() {
+ let f1 = Bar;
+
+ f1.foo(1usize);
+ //~^ error: method named `foo` found for type `Bar` in the current scope
+ //~| help: items from traits can only be used if the trait is implemented and in scope
+ //~| help: candidate #1: `Foo`
+}
--- /dev/null
+error[E0599]: no method named `foo` found for type `Bar` in the current scope
+ --> $DIR/issue-21659-show-relevant-trait-impls-3.rs:30:8
+ |
+30 | f1.foo(1usize);
+ | ^^^
+ |
+ = help: items from traits can only be used if the trait is implemented and in scope
+ = note: the following trait defines an item `foo`, perhaps you need to implement it:
+ candidate #1: `Foo`
+
+error: aborting due to previous error(s)
+
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// issue #21405
+// ignore-tidy-linelength
+
+struct Foo;
+
+fn foo<F>(f: F) where F: FnMut(Foo) {}
+
+fn main() {
+ foo(|s| s.is_empty());
+ //~^ ERROR no method named `is_empty` found
+ //~^^ HELP #1: `std::iter::ExactSizeIterator`
+ //~^^^ HELP #2: `core::slice::SliceExt`
+ //~^^^^ HELP #3: `core::str::StrExt`
+ //~^^^^^ HELP items from traits can only be used if the trait is implemented and in scope; the following traits define an item `is_empty`, perhaps you need to implement one of them:
+}
--- /dev/null
+error[E0599]: no method named `is_empty` found for type `Foo` in the current scope
+ --> $DIR/method-suggestion-no-duplication.rs:19:15
+ |
+19 | foo(|s| s.is_empty());
+ | ^^^^^^^^
+ |
+ = help: items from traits can only be used if the trait is implemented and in scope
+ = note: the following traits define an item `is_empty`, perhaps you need to implement one of them:
+ candidate #1: `std::iter::ExactSizeIterator`
+ candidate #2: `core::slice::SliceExt`
+ candidate #3: `core::str::StrExt`
+
+error: aborting due to previous error(s)
+
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// aux-build:no_method_suggested_traits.rs
+extern crate no_method_suggested_traits;
+
+struct Foo;
+enum Bar { X }
+
+mod foo {
+ pub trait Bar {
+ fn method(&self) {}
+
+ fn method2(&self) {}
+ }
+
+ impl Bar for u32 {}
+
+ impl Bar for char {}
+}
+
+fn main() {
+ // test the values themselves, and autoderef.
+
+
+ 1u32.method();
+ //~^ HELP following traits are implemented but not in scope, perhaps add a `use` for one of them
+ //~| ERROR no method named
+ //~| HELP `use foo::Bar;`
+ //~| HELP `use no_method_suggested_traits::foo::PubPub;`
+ std::rc::Rc::new(&mut Box::new(&1u32)).method();
+ //~^ HELP following traits are implemented but not in scope, perhaps add a `use` for one of them
+ //~| ERROR no method named
+ //~| HELP `use foo::Bar;`
+ //~| HELP `use no_method_suggested_traits::foo::PubPub;`
+
+ 'a'.method();
+ //~^ ERROR no method named
+ //~| HELP the following trait is implemented but not in scope, perhaps add a `use` for it:
+ //~| HELP `use foo::Bar;`
+ std::rc::Rc::new(&mut Box::new(&'a')).method();
+ //~^ ERROR no method named
+ //~| HELP the following trait is implemented but not in scope, perhaps add a `use` for it:
+ //~| HELP `use foo::Bar;`
+
+ 1i32.method();
+ //~^ ERROR no method named
+ //~| HELP the following trait is implemented but not in scope, perhaps add a `use` for it:
+ //~| HELP `use no_method_suggested_traits::foo::PubPub;`
+ std::rc::Rc::new(&mut Box::new(&1i32)).method();
+ //~^ ERROR no method named
+ //~| HELP the following trait is implemented but not in scope, perhaps add a `use` for it:
+ //~| HELP `use no_method_suggested_traits::foo::PubPub;`
+
+ Foo.method();
+ //~^ ERROR no method named
+ //~| HELP following traits define an item `method`, perhaps you need to implement one of them
+ //~| HELP `foo::Bar`
+ //~| HELP `no_method_suggested_traits::foo::PubPub`
+ //~| HELP `no_method_suggested_traits::Reexported`
+ //~| HELP `no_method_suggested_traits::bar::PubPriv`
+ //~| HELP `no_method_suggested_traits::qux::PrivPub`
+ //~| HELP `no_method_suggested_traits::quz::PrivPriv`
+ std::rc::Rc::new(&mut Box::new(&Foo)).method();
+ //~^ ERROR no method named
+ //~| HELP following traits define an item `method`, perhaps you need to implement one of them
+ //~| HELP `foo::Bar`
+ //~| HELP `no_method_suggested_traits::foo::PubPub`
+ //~| HELP `no_method_suggested_traits::Reexported`
+ //~| HELP `no_method_suggested_traits::bar::PubPriv`
+ //~| HELP `no_method_suggested_traits::qux::PrivPub`
+ //~| HELP `no_method_suggested_traits::quz::PrivPriv`
+
+ 1u64.method2();
+ //~^ ERROR no method named
+ //~| HELP the following trait defines an item `method2`, perhaps you need to implement it
+ //~| HELP `foo::Bar`
+ std::rc::Rc::new(&mut Box::new(&1u64)).method2();
+ //~^ ERROR no method named
+ //~| HELP the following trait defines an item `method2`, perhaps you need to implement it
+ //~| HELP `foo::Bar`
+
+ no_method_suggested_traits::Foo.method2();
+ //~^ ERROR no method named
+ //~| HELP following trait defines an item `method2`, perhaps you need to implement it
+ //~| HELP `foo::Bar`
+ std::rc::Rc::new(&mut Box::new(&no_method_suggested_traits::Foo)).method2();
+ //~^ ERROR no method named
+ //~| HELP following trait defines an item `method2`, perhaps you need to implement it
+ //~| HELP `foo::Bar`
+ no_method_suggested_traits::Bar::X.method2();
+ //~^ ERROR no method named
+ //~| HELP following trait defines an item `method2`, perhaps you need to implement it
+ //~| HELP `foo::Bar`
+ std::rc::Rc::new(&mut Box::new(&no_method_suggested_traits::Bar::X)).method2();
+ //~^ ERROR no method named
+ //~| HELP following trait defines an item `method2`, perhaps you need to implement it
+ //~| HELP `foo::Bar`
+
+ Foo.method3();
+ //~^ ERROR no method named
+ //~| HELP following trait defines an item `method3`, perhaps you need to implement it
+ //~| HELP `no_method_suggested_traits::foo::PubPub`
+ std::rc::Rc::new(&mut Box::new(&Foo)).method3();
+ //~^ ERROR no method named
+ //~| HELP following trait defines an item `method3`, perhaps you need to implement it
+ //~| HELP `no_method_suggested_traits::foo::PubPub`
+ Bar::X.method3();
+ //~^ ERROR no method named
+ //~| HELP following trait defines an item `method3`, perhaps you need to implement it
+ //~| HELP `no_method_suggested_traits::foo::PubPub`
+ std::rc::Rc::new(&mut Box::new(&Bar::X)).method3();
+ //~^ ERROR no method named
+ //~| HELP following trait defines an item `method3`, perhaps you need to implement it
+ //~| HELP `no_method_suggested_traits::foo::PubPub`
+
+ // should have no help:
+ 1_usize.method3(); //~ ERROR no method named
+ std::rc::Rc::new(&mut Box::new(&1_usize)).method3(); //~ ERROR no method named
+ no_method_suggested_traits::Foo.method3(); //~ ERROR no method named
+ std::rc::Rc::new(&mut Box::new(&no_method_suggested_traits::Foo)).method3();
+ //~^ ERROR no method named
+ no_method_suggested_traits::Bar::X.method3(); //~ ERROR no method named
+ std::rc::Rc::new(&mut Box::new(&no_method_suggested_traits::Bar::X)).method3();
+ //~^ ERROR no method named
+}
--- /dev/null
+error[E0599]: no method named `method` found for type `u32` in the current scope
+ --> $DIR/no-method-suggested-traits.rs:33:10
+ |
+33 | 1u32.method();
+ | ^^^^^^
+ |
+ = help: items from traits can only be used if the trait is in scope
+ = note: the following traits are implemented but not in scope, perhaps add a `use` for one of them:
+ candidate #1: `use foo::Bar;`
+ candidate #2: `use no_method_suggested_traits::foo::PubPub;`
+
+error[E0599]: no method named `method` found for type `std::rc::Rc<&mut std::boxed::Box<&u32>>` in the current scope
+ --> $DIR/no-method-suggested-traits.rs:38:44
+ |
+38 | std::rc::Rc::new(&mut Box::new(&1u32)).method();
+ | ^^^^^^
+ |
+ = help: items from traits can only be used if the trait is in scope
+ = note: the following traits are implemented but not in scope, perhaps add a `use` for one of them:
+ candidate #1: `use foo::Bar;`
+ candidate #2: `use no_method_suggested_traits::foo::PubPub;`
+
+error[E0599]: no method named `method` found for type `char` in the current scope
+ --> $DIR/no-method-suggested-traits.rs:44:9
+ |
+44 | 'a'.method();
+ | ^^^^^^
+ |
+ = help: items from traits can only be used if the trait is in scope
+ = note: the following trait is implemented but not in scope, perhaps add a `use` for it:
+ candidate #1: `use foo::Bar;`
+
+error[E0599]: no method named `method` found for type `std::rc::Rc<&mut std::boxed::Box<&char>>` in the current scope
+ --> $DIR/no-method-suggested-traits.rs:48:43
+ |
+48 | std::rc::Rc::new(&mut Box::new(&'a')).method();
+ | ^^^^^^
+ |
+ = help: items from traits can only be used if the trait is in scope
+ = note: the following trait is implemented but not in scope, perhaps add a `use` for it:
+ candidate #1: `use foo::Bar;`
+
+error[E0599]: no method named `method` found for type `i32` in the current scope
+ --> $DIR/no-method-suggested-traits.rs:53:10
+ |
+53 | 1i32.method();
+ | ^^^^^^
+ |
+ = help: items from traits can only be used if the trait is in scope
+ = note: the following trait is implemented but not in scope, perhaps add a `use` for it:
+ candidate #1: `use no_method_suggested_traits::foo::PubPub;`
+
+error[E0599]: no method named `method` found for type `std::rc::Rc<&mut std::boxed::Box<&i32>>` in the current scope
+ --> $DIR/no-method-suggested-traits.rs:57:44
+ |
+57 | std::rc::Rc::new(&mut Box::new(&1i32)).method();
+ | ^^^^^^
+ |
+ = help: items from traits can only be used if the trait is in scope
+ = note: the following trait is implemented but not in scope, perhaps add a `use` for it:
+ candidate #1: `use no_method_suggested_traits::foo::PubPub;`
+
+error[E0599]: no method named `method` found for type `Foo` in the current scope
+ --> $DIR/no-method-suggested-traits.rs:62:9
+ |
+62 | Foo.method();
+ | ^^^^^^
+ |
+ = help: items from traits can only be used if the trait is implemented and in scope
+ = note: the following traits define an item `method`, perhaps you need to implement one of them:
+ candidate #1: `foo::Bar`
+ candidate #2: `no_method_suggested_traits::foo::PubPub`
+ candidate #3: `no_method_suggested_traits::bar::PubPriv`
+ candidate #4: `no_method_suggested_traits::qux::PrivPub`
+ candidate #5: `no_method_suggested_traits::quz::PrivPriv`
+ candidate #6: `no_method_suggested_traits::Reexported`
+
+error[E0599]: no method named `method` found for type `std::rc::Rc<&mut std::boxed::Box<&Foo>>` in the current scope
+ --> $DIR/no-method-suggested-traits.rs:71:43
+ |
+71 | std::rc::Rc::new(&mut Box::new(&Foo)).method();
+ | ^^^^^^
+ |
+ = help: items from traits can only be used if the trait is implemented and in scope
+ = note: the following traits define an item `method`, perhaps you need to implement one of them:
+ candidate #1: `foo::Bar`
+ candidate #2: `no_method_suggested_traits::foo::PubPub`
+ candidate #3: `no_method_suggested_traits::bar::PubPriv`
+ candidate #4: `no_method_suggested_traits::qux::PrivPub`
+ candidate #5: `no_method_suggested_traits::quz::PrivPriv`
+ candidate #6: `no_method_suggested_traits::Reexported`
+
+error[E0599]: no method named `method2` found for type `u64` in the current scope
+ --> $DIR/no-method-suggested-traits.rs:81:10
+ |
+81 | 1u64.method2();
+ | ^^^^^^^
+ |
+ = help: items from traits can only be used if the trait is implemented and in scope
+ = note: the following trait defines an item `method2`, perhaps you need to implement it:
+ candidate #1: `foo::Bar`
+
+error[E0599]: no method named `method2` found for type `std::rc::Rc<&mut std::boxed::Box<&u64>>` in the current scope
+ --> $DIR/no-method-suggested-traits.rs:85:44
+ |
+85 | std::rc::Rc::new(&mut Box::new(&1u64)).method2();
+ | ^^^^^^^
+ |
+ = help: items from traits can only be used if the trait is implemented and in scope
+ = note: the following trait defines an item `method2`, perhaps you need to implement it:
+ candidate #1: `foo::Bar`
+
+error[E0599]: no method named `method2` found for type `no_method_suggested_traits::Foo` in the current scope
+ --> $DIR/no-method-suggested-traits.rs:90:37
+ |
+90 | no_method_suggested_traits::Foo.method2();
+ | ^^^^^^^
+ |
+ = help: items from traits can only be used if the trait is implemented and in scope
+ = note: the following trait defines an item `method2`, perhaps you need to implement it:
+ candidate #1: `foo::Bar`
+
+error[E0599]: no method named `method2` found for type `std::rc::Rc<&mut std::boxed::Box<&no_method_suggested_traits::Foo>>` in the current scope
+ --> $DIR/no-method-suggested-traits.rs:94:71
+ |
+94 | std::rc::Rc::new(&mut Box::new(&no_method_suggested_traits::Foo)).method2();
+ | ^^^^^^^
+ |
+ = help: items from traits can only be used if the trait is implemented and in scope
+ = note: the following trait defines an item `method2`, perhaps you need to implement it:
+ candidate #1: `foo::Bar`
+
+error[E0599]: no method named `method2` found for type `no_method_suggested_traits::Bar` in the current scope
+ --> $DIR/no-method-suggested-traits.rs:98:40
+ |
+98 | no_method_suggested_traits::Bar::X.method2();
+ | ^^^^^^^
+ |
+ = help: items from traits can only be used if the trait is implemented and in scope
+ = note: the following trait defines an item `method2`, perhaps you need to implement it:
+ candidate #1: `foo::Bar`
+
+error[E0599]: no method named `method2` found for type `std::rc::Rc<&mut std::boxed::Box<&no_method_suggested_traits::Bar>>` in the current scope
+ --> $DIR/no-method-suggested-traits.rs:102:74
+ |
+102 | std::rc::Rc::new(&mut Box::new(&no_method_suggested_traits::Bar::X)).method2();
+ | ^^^^^^^
+ |
+ = help: items from traits can only be used if the trait is implemented and in scope
+ = note: the following trait defines an item `method2`, perhaps you need to implement it:
+ candidate #1: `foo::Bar`
+
+error[E0599]: no method named `method3` found for type `Foo` in the current scope
+ --> $DIR/no-method-suggested-traits.rs:107:9
+ |
+107 | Foo.method3();
+ | ^^^^^^^
+ |
+ = help: items from traits can only be used if the trait is implemented and in scope
+ = note: the following trait defines an item `method3`, perhaps you need to implement it:
+ candidate #1: `no_method_suggested_traits::foo::PubPub`
+
+error[E0599]: no method named `method3` found for type `std::rc::Rc<&mut std::boxed::Box<&Foo>>` in the current scope
+ --> $DIR/no-method-suggested-traits.rs:111:43
+ |
+111 | std::rc::Rc::new(&mut Box::new(&Foo)).method3();
+ | ^^^^^^^
+ |
+ = help: items from traits can only be used if the trait is implemented and in scope
+ = note: the following trait defines an item `method3`, perhaps you need to implement it:
+ candidate #1: `no_method_suggested_traits::foo::PubPub`
+
+error[E0599]: no method named `method3` found for type `Bar` in the current scope
+ --> $DIR/no-method-suggested-traits.rs:115:12
+ |
+115 | Bar::X.method3();
+ | ^^^^^^^
+ |
+ = help: items from traits can only be used if the trait is implemented and in scope
+ = note: the following trait defines an item `method3`, perhaps you need to implement it:
+ candidate #1: `no_method_suggested_traits::foo::PubPub`
+
+error[E0599]: no method named `method3` found for type `std::rc::Rc<&mut std::boxed::Box<&Bar>>` in the current scope
+ --> $DIR/no-method-suggested-traits.rs:119:46
+ |
+119 | std::rc::Rc::new(&mut Box::new(&Bar::X)).method3();
+ | ^^^^^^^
+ |
+ = help: items from traits can only be used if the trait is implemented and in scope
+ = note: the following trait defines an item `method3`, perhaps you need to implement it:
+ candidate #1: `no_method_suggested_traits::foo::PubPub`
+
+error[E0599]: no method named `method3` found for type `usize` in the current scope
+ --> $DIR/no-method-suggested-traits.rs:125:13
+ |
+125 | 1_usize.method3(); //~ ERROR no method named
+ | ^^^^^^^
+
+error[E0599]: no method named `method3` found for type `std::rc::Rc<&mut std::boxed::Box<&usize>>` in the current scope
+ --> $DIR/no-method-suggested-traits.rs:126:47
+ |
+126 | std::rc::Rc::new(&mut Box::new(&1_usize)).method3(); //~ ERROR no method named
+ | ^^^^^^^
+
+error[E0599]: no method named `method3` found for type `no_method_suggested_traits::Foo` in the current scope
+ --> $DIR/no-method-suggested-traits.rs:127:37
+ |
+127 | no_method_suggested_traits::Foo.method3(); //~ ERROR no method named
+ | ^^^^^^^
+
+error[E0599]: no method named `method3` found for type `std::rc::Rc<&mut std::boxed::Box<&no_method_suggested_traits::Foo>>` in the current scope
+ --> $DIR/no-method-suggested-traits.rs:128:71
+ |
+128 | std::rc::Rc::new(&mut Box::new(&no_method_suggested_traits::Foo)).method3();
+ | ^^^^^^^
+
+error[E0599]: no method named `method3` found for type `no_method_suggested_traits::Bar` in the current scope
+ --> $DIR/no-method-suggested-traits.rs:130:40
+ |
+130 | no_method_suggested_traits::Bar::X.method3(); //~ ERROR no method named
+ | ^^^^^^^
+
+error[E0599]: no method named `method3` found for type `std::rc::Rc<&mut std::boxed::Box<&no_method_suggested_traits::Bar>>` in the current scope
+ --> $DIR/no-method-suggested-traits.rs:131:74
+ |
+131 | std::rc::Rc::new(&mut Box::new(&no_method_suggested_traits::Bar::X)).method3();
+ | ^^^^^^^
+
+error: aborting due to previous error(s)
+
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+struct MyType;
+struct MyType2;
+struct MyType3;
+struct MyType4;
+
+impl std::fmt::Display for MyType {
+ fn fmt(&self, x: &str) -> () { }
+}
+
+impl std::fmt::Display for MyType2 {
+ fn fmt(&self) -> () { }
+}
+
+impl std::fmt::Display for MyType3 {
+ fn fmt() -> () { }
+}
+
+impl std::fmt::Display for MyType4 {}
+
+fn main() {}
--- /dev/null
+error[E0053]: method `fmt` has an incompatible type for trait
+ --> $DIR/trait_type.rs:17:4
+ |
+17 | fn fmt(&self, x: &str) -> () { }
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ types differ in mutability
+ |
+ = note: expected type `fn(&MyType, &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error>`
+ found type `fn(&MyType, &str)`
+
+error[E0050]: method `fmt` has 1 parameter but the declaration in trait `std::fmt::Display::fmt` has 2
+ --> $DIR/trait_type.rs:21:11
+ |
+21 | fn fmt(&self) -> () { }
+ | ^^^^^ expected 2 parameters, found 1
+ |
+ = note: `fmt` from trait: `fn(&Self, &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error>`
+
+error[E0186]: method `fmt` has a `&self` declaration in the trait, but not in the impl
+ --> $DIR/trait_type.rs:25:4
+ |
+25 | fn fmt() -> () { }
+ | ^^^^^^^^^^^^^^^^^^ expected `&self` in impl
+ |
+ = note: `fmt` from trait: `fn(&Self, &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error>`
+
+error[E0046]: not all trait items implemented, missing: `fmt`
+ --> $DIR/trait_type.rs:28:1
+ |
+28 | impl std::fmt::Display for MyType4 {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ missing `fmt` in implementation
+ |
+ = note: `fmt` from trait: `fn(&Self, &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error>`
+
+error: aborting due to previous error(s)
+
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// run-pass
+
+#![allow(non_snake_case)]
+#![allow(dead_code)]
+#![allow(unused_variables)]
+
+#[derive(Copy, Clone)]
+enum Foo {
+ Bar,
+ Baz
+}
+
+impl Foo {
+ fn foo(&self) {
+ match self {
+ &
+Bar if true
+//~^ WARN pattern binding `Bar` is named the same as one of the variants of the type `Foo`
+//~^^ HELP to match on a variant, consider making the path in the pattern qualified: `Foo::Bar`
+=> println!("bar"),
+ &
+Baz if false
+//~^ WARN pattern binding `Baz` is named the same as one of the variants of the type `Foo`
+//~^^ HELP to match on a variant, consider making the path in the pattern qualified: `Foo::Baz`
+=> println!("baz"),
+_ => ()
+ }
+ }
+}
+
+fn main() {}
--- /dev/null
+warning[E0170]: pattern binding `Bar` is named the same as one of the variants of the type `Foo`
+ --> $DIR/issue-19100.rs:27:1
+ |
+27 | Bar if true
+ | ^^^
+ |
+ = help: if you meant to match on a variant, consider making the path in the pattern qualified: `Foo::Bar`
+
+warning[E0170]: pattern binding `Baz` is named the same as one of the variants of the type `Foo`
+ --> $DIR/issue-19100.rs:32:1
+ |
+32 | Baz if false
+ | ^^^
+ |
+ = help: if you meant to match on a variant, consider making the path in the pattern qualified: `Foo::Baz`
+
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// error-pattern: overflow representing the type
+
+
+trait Mirror { type It: ?Sized; }
+impl<T: ?Sized> Mirror for T { type It = Self; }
+struct S(Option<<S as Mirror>::It>);
+
+fn main() {
+ let _s = S(None);
+}
--- /dev/null
+error[E0391]: unsupported cyclic reference between types/traits detected
+ |
+note: the cycle begins when computing layout of `S`...
+note: ...which then requires computing layout of `std::option::Option<<S as Mirror>::It>`...
+note: ...which then requires computing layout of `<S as Mirror>::It`...
+ = note: ...which then again requires computing layout of `S`, completing the cycle.
+
+error: aborting due to previous error(s)
+
error[E0277]: the trait bound `{integer}: std::ops::Add<std::option::Option<{integer}>>` is not satisfied
- --> $DIR/binops.rs:12:5
+ --> $DIR/binops.rs:12:7
|
12 | 1 + Some(1);
- | ^^^^^^^^^^^ no implementation for `{integer} + std::option::Option<{integer}>`
+ | ^ no implementation for `{integer} + std::option::Option<{integer}>`
|
= help: the trait `std::ops::Add<std::option::Option<{integer}>>` is not implemented for `{integer}`
error[E0277]: the trait bound `usize: std::ops::Sub<std::option::Option<{integer}>>` is not satisfied
- --> $DIR/binops.rs:13:5
+ --> $DIR/binops.rs:13:16
|
13 | 2 as usize - Some(1);
- | ^^^^^^^^^^^^^^^^^^^^ no implementation for `usize - std::option::Option<{integer}>`
+ | ^ no implementation for `usize - std::option::Option<{integer}>`
|
= help: the trait `std::ops::Sub<std::option::Option<{integer}>>` is not implemented for `usize`
error[E0277]: the trait bound `{integer}: std::ops::Mul<()>` is not satisfied
- --> $DIR/binops.rs:14:5
+ --> $DIR/binops.rs:14:7
|
14 | 3 * ();
- | ^^^^^^ no implementation for `{integer} * ()`
+ | ^ no implementation for `{integer} * ()`
|
= help: the trait `std::ops::Mul<()>` is not implemented for `{integer}`
error[E0277]: the trait bound `{integer}: std::ops::Div<&str>` is not satisfied
- --> $DIR/binops.rs:15:5
+ --> $DIR/binops.rs:15:7
|
15 | 4 / "";
- | ^^^^^^ no implementation for `{integer} / &str`
+ | ^ no implementation for `{integer} / &str`
|
= help: the trait `std::ops::Div<&str>` is not implemented for `{integer}`
error[E0277]: the trait bound `{integer}: std::cmp::PartialEq<std::string::String>` is not satisfied
- --> $DIR/binops.rs:16:5
+ --> $DIR/binops.rs:16:7
|
16 | 5 < String::new();
- | ^^^^^^^^^^^^^^^^^ can't compare `{integer}` with `std::string::String`
+ | ^ can't compare `{integer}` with `std::string::String`
|
= help: the trait `std::cmp::PartialEq<std::string::String>` is not implemented for `{integer}`
error[E0277]: the trait bound `{integer}: std::cmp::PartialOrd<std::string::String>` is not satisfied
- --> $DIR/binops.rs:16:5
+ --> $DIR/binops.rs:16:7
|
16 | 5 < String::new();
- | ^^^^^^^^^^^^^^^^^ can't compare `{integer}` with `std::string::String`
+ | ^ can't compare `{integer}` with `std::string::String`
|
= help: the trait `std::cmp::PartialOrd<std::string::String>` is not implemented for `{integer}`
error[E0277]: the trait bound `{integer}: std::cmp::PartialEq<std::result::Result<{integer}, _>>` is not satisfied
- --> $DIR/binops.rs:17:5
+ --> $DIR/binops.rs:17:7
|
17 | 6 == Ok(1);
- | ^^^^^^^^^^ can't compare `{integer}` with `std::result::Result<{integer}, _>`
+ | ^^ can't compare `{integer}` with `std::result::Result<{integer}, _>`
|
= help: the trait `std::cmp::PartialEq<std::result::Result<{integer}, _>>` is not implemented for `{integer}`
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// run-pass
+
+// Parser test for #37765
+
+fn with_parens<T: ToString>(arg: T) -> String { //~WARN function is never used: `with_parens`
+ return (<T as ToString>::to_string(&arg)); //~WARN unnecessary parentheses around `return` value
+}
+
+fn no_parens<T: ToString>(arg: T) -> String { //~WARN function is never used: `no_parens`
+ return <T as ToString>::to_string(&arg);
+}
+
+fn main() {
+
+}
--- /dev/null
+warning: unnecessary parentheses around `return` value
+ --> $DIR/path-lookahead.rs:16:10
+ |
+16 | return (<T as ToString>::to_string(&arg)); //~WARN unnecessary parentheses around `return` value
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: #[warn(unused_parens)] on by default
+
+warning: function is never used: `with_parens`
+ --> $DIR/path-lookahead.rs:15:1
+ |
+15 | / fn with_parens<T: ToString>(arg: T) -> String { //~WARN function is never used: `with_parens`
+16 | | return (<T as ToString>::to_string(&arg)); //~WARN unnecessary parentheses around `return` value
+17 | | }
+ | |_^
+ |
+ = note: #[warn(dead_code)] on by default
+
+warning: function is never used: `no_parens`
+ --> $DIR/path-lookahead.rs:19:1
+ |
+19 | / fn no_parens<T: ToString>(arg: T) -> String { //~WARN function is never used: `no_parens`
+20 | | return <T as ToString>::to_string(&arg);
+21 | | }
+ | |_^
+ |
+ = note: #[warn(dead_code)] on by default
+
help: possible better candidate is found in another module, you can import it into scope
| use m::S;
-error: tuple struct `Z` is private
+error[E0603]: tuple struct `Z` is private
--> $DIR/privacy-struct-ctor.rs:25:9
|
25 | n::Z; //~ ERROR tuple struct `Z` is private
| ^^^^
-error: tuple struct `S` is private
+error[E0603]: tuple struct `S` is private
--> $DIR/privacy-struct-ctor.rs:35:5
|
35 | m::S; //~ ERROR tuple struct `S` is private
| ^^^^
-error: tuple struct `Z` is private
+error[E0603]: tuple struct `Z` is private
--> $DIR/privacy-struct-ctor.rs:39:5
|
39 | m::n::Z; //~ ERROR tuple struct `Z` is private
| ^^^^^^^
-error: tuple struct `S` is private
+error[E0603]: tuple struct `S` is private
--> $DIR/privacy-struct-ctor.rs:41:5
|
41 | xcrate::m::S; //~ ERROR tuple struct `S` is private
| ^^^^^^^^^^^^
-error: tuple struct `Z` is private
+error[E0603]: tuple struct `Z` is private
--> $DIR/privacy-struct-ctor.rs:45:5
|
45 | xcrate::m::n::Z; //~ ERROR tuple struct `Z` is private
//~| NOTE cyclic type of infinite size
//~| NOTE expected type `_`
//~| NOTE found type `Box<_>`
+
+ let s = &mut String::new();
+ s = format!("foo");
+ //~^ ERROR E0308
+ //~| NOTE expected mutable reference, found struct `std::string::String`
+ //~| NOTE expected type `&mut std::string::String`
+ //~| HELP try with `&mut format!("foo")`
+ //~| NOTE this error originates in a macro outside of the current crate
}
= note: expected type `_`
found type `std::boxed::Box<_>`
+error[E0308]: mismatched types
+ --> $DIR/coerce-suggestions.rs:48:9
+ |
+48 | s = format!("foo");
+ | ^^^^^^^^^^^^^^ expected mutable reference, found struct `std::string::String`
+ |
+ = note: expected type `&mut std::string::String`
+ found type `std::string::String`
+ = help: try with `&mut format!("foo")`
+ = note: this error originates in a macro outside of the current crate
+
error: aborting due to previous error(s)
29 | fn f9(usize) -> usize; //~ NOTE candidate
| ^^^^^^^^^^^^^^^^^^^^^^
= help: to disambiguate the method call, write `UnusedTrait::f9(u, 342)` instead
- = help: items from traits can only be used if the trait is implemented and in scope; the following traits define an item `f9`, perhaps you need to implement one of them:
- = help: candidate #1: `CtxtFn`
- = help: candidate #2: `OtherTrait`
- = help: candidate #3: `UnusedTrait`
+ = help: items from traits can only be used if the trait is implemented and in scope
+ = note: the following traits define an item `f9`, perhaps you need to implement one of them:
+ candidate #1: `CtxtFn`
+ candidate #2: `OtherTrait`
+ candidate #3: `UnusedTrait`
error[E0599]: no method named `fff` found for type `Myisize` in the current scope
--> $DIR/issue-7575.rs:74:30
59 | | }
| |_____^
= help: to disambiguate the method call, write `ManyImplTrait::is_str(t)` instead
- = help: items from traits can only be used if the trait is implemented and in scope; the following trait defines an item `is_str`, perhaps you need to implement it:
- = help: candidate #1: `ManyImplTrait`
+ = help: items from traits can only be used if the trait is implemented and in scope
+ = note: the following trait defines an item `is_str`, perhaps you need to implement it:
+ candidate #1: `ManyImplTrait`
error: aborting due to previous error(s)
error[E0277]: the trait bound `u32: std::ops::Add<()>` is not satisfied
- --> $DIR/multiline-span-simple.rs:23:9
+ --> $DIR/multiline-span-simple.rs:23:18
|
-23 | foo(1 as u32 +
- | _________^
-24 | |
-25 | | bar(x,
-26 | |
-27 | | y),
- | |______________^ no implementation for `u32 + ()`
+23 | foo(1 as u32 +
+ | ^ no implementation for `u32 + ()`
|
= help: the trait `std::ops::Add<()>` is not implemented for `u32`
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// run-pass
+// compile-flags: --test
+
+#[test]
+#[should_panic = "foo"]
+//~^ WARN: attribute must be of the form:
+fn test1() {
+ panic!();
+}
+
+#[test]
+#[should_panic(expected)]
+//~^ WARN: argument must be of the form:
+fn test2() {
+ panic!();
+}
+
+#[test]
+#[should_panic(expect)]
+//~^ WARN: argument must be of the form:
+fn test3() {
+ panic!();
+}
+
+#[test]
+#[should_panic(expected(foo, bar))]
+//~^ WARN: argument must be of the form:
+fn test4() {
+ panic!();
+}
+
+#[test]
+#[should_panic(expected = "foo", bar)]
+//~^ WARN: argument must be of the form:
+fn test5() {
+ panic!();
+}
--- /dev/null
+warning: attribute must be of the form: `#[should_panic]` or `#[should_panic(expected = "error message")]`
+ --> $DIR/test-should-panic-attr.rs:15:1
+ |
+15 | #[should_panic = "foo"]
+ | ^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: Errors in this attribute were erroneously allowed and will become a hard error in a future release.
+
+warning: argument must be of the form: `expected = "error message"`
+ --> $DIR/test-should-panic-attr.rs:22:1
+ |
+22 | #[should_panic(expected)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: Errors in this attribute were erroneously allowed and will become a hard error in a future release.
+
+warning: argument must be of the form: `expected = "error message"`
+ --> $DIR/test-should-panic-attr.rs:29:1
+ |
+29 | #[should_panic(expect)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: Errors in this attribute were erroneously allowed and will become a hard error in a future release.
+
+warning: argument must be of the form: `expected = "error message"`
+ --> $DIR/test-should-panic-attr.rs:36:1
+ |
+36 | #[should_panic(expected(foo, bar))]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: Errors in this attribute were erroneously allowed and will become a hard error in a future release.
+
+warning: argument must be of the form: `expected = "error message"`
+ --> $DIR/test-should-panic-attr.rs:43:1
+ |
+43 | #[should_panic(expected = "foo", bar)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: Errors in this attribute were erroneously allowed and will become a hard error in a future release.
+
use std::str::FromStr;
use std::path::PathBuf;
+use test::ColorConfig;
+
#[derive(Clone, Copy, PartialEq, Debug)]
pub enum Mode {
CompileFail,
// Print one character per test instead of one line
pub quiet: bool,
+ // Whether to use colors in test.
+ pub color: ColorConfig,
+
// where to find the remote test client process, if we're using it
pub remote_test_client: Option<PathBuf>,
return false;
}
- if !line.contains("ignore-gdb-version") &&
- config.parse_name_directive(line, "ignore-gdb") {
+ if config.parse_name_directive(line, "ignore-gdb") {
return true;
}
if let Some(actual_version) = config.gdb_version {
- if line.contains("min-gdb-version") {
+ if line.starts_with("min-gdb-version") {
let (start_ver, end_ver) = extract_gdb_version_range(line);
if start_ver != end_ver {
// Ignore if actual version is smaller the minimum required
// version
actual_version < start_ver
- } else if line.contains("ignore-gdb-version") {
+ } else if line.starts_with("ignore-gdb-version") {
let (min_version, max_version) = extract_gdb_version_range(line);
if max_version < min_version {
fn extract_gdb_version_range(line: &str) -> (u32, u32) {
const ERROR_MESSAGE: &'static str = "Malformed GDB version directive";
- let range_components = line.split(' ')
- .flat_map(|word| word.split('-'))
- .filter(|word| word.len() > 0)
- .skip_while(|word| extract_gdb_version(word).is_none())
- .collect::<Vec<&str>>();
+ let range_components = line.split(&[' ', '-'][..])
+ .filter(|word| !word.is_empty())
+ .map(extract_gdb_version)
+ .skip_while(Option::is_none)
+ .take(3) // 3 or more = invalid, so take at most 3.
+ .collect::<Vec<Option<u32>>>();
match range_components.len() {
1 => {
- let v = extract_gdb_version(range_components[0]).unwrap();
+ let v = range_components[0].unwrap();
(v, v)
}
2 => {
- let v_min = extract_gdb_version(range_components[0]).unwrap();
- let v_max = extract_gdb_version(range_components[1]).expect(ERROR_MESSAGE);
+ let v_min = range_components[0].unwrap();
+ let v_max = range_components[1].expect(ERROR_MESSAGE);
(v_min, v_max)
}
_ => panic!(ERROR_MESSAGE),
}
if let Some(ref actual_version) = config.lldb_version {
- if line.contains("min-lldb-version") {
- let min_version = line.trim()
- .split(' ')
- .last()
+ if line.starts_with("min-lldb-version") {
+ let min_version = line.trim_right()
+ .rsplit(' ')
+ .next()
.expect("Malformed lldb version directive");
// Ignore if actual version is smaller the minimum required
// version
fn ignore_llvm(config: &Config, line: &str) -> bool {
if let Some(ref actual_version) = config.llvm_version {
- if line.contains("min-llvm-version") {
- let min_version = line.trim()
- .split(' ')
- .last()
+ if line.starts_with("min-llvm-version") {
+ let min_version = line.trim_right()
+ .rsplit(' ')
+ .next()
.expect("Malformed llvm version directive");
// Ignore if actual version is smaller the minimum required
// version
pub must_compile_successfully: bool,
// rustdoc will test the output of the `--test` option
pub check_test_line_numbers_match: bool,
+ // The test must be compiled and run successfully. Only used in UI tests for
+ // now.
+ pub run_pass: bool,
}
impl TestProps {
incremental_dir: None,
must_compile_successfully: false,
check_test_line_numbers_match: false,
+ run_pass: false,
}
}
if !self.check_test_line_numbers_match {
self.check_test_line_numbers_match = config.parse_check_test_line_numbers_match(ln);
}
+
+ if !self.run_pass {
+ self.run_pass = config.parse_run_pass(ln);
+ }
});
for key in vec!["RUST_TEST_NOCAPTURE", "RUST_TEST_THREADS"] {
None => false,
};
if matches {
- it(&ln[close_brace + 1..]);
+ it(ln[(close_brace + 1) ..].trim_left());
}
} else {
panic!("malformed condition directive: expected `//[foo]`, found `{}`",
ln)
}
} else if ln.starts_with("//") {
- it(&ln[2..]);
+ it(ln[2..].trim_left());
}
}
return;
self.parse_name_directive(line, "check-test-line-numbers-match")
}
+ fn parse_run_pass(&self, line: &str) -> bool {
+ self.parse_name_directive(line, "run-pass")
+ }
+
fn parse_env(&self, line: &str, name: &str) -> Option<(String, String)> {
self.parse_name_value_directive(line, name).map(|nv| {
// nv is either FOO or FOO=BAR
}
fn parse_name_directive(&self, line: &str, directive: &str) -> bool {
- // This 'no-' rule is a quick hack to allow pretty-expanded and
- // no-pretty-expanded to coexist
- line.contains(directive) && !line.contains(&("no-".to_owned() + directive))
+ // Ensure the directive is a whole word. Do not match "ignore-x86" when
+ // the line says "ignore-x86_64".
+ line.starts_with(directive) && match line.as_bytes().get(directive.len()) {
+ None | Some(&b' ') | Some(&b':') => true,
+ _ => false
+ }
}
pub fn parse_name_value_directive(&self, line: &str, directive: &str) -> Option<String> {
- let keycolon = format!("{}:", directive);
- if let Some(colon) = line.find(&keycolon) {
- let value = line[(colon + keycolon.len())..line.len()].to_owned();
+ let colon = directive.len();
+ if line.starts_with(directive) && line.as_bytes().get(colon) == Some(&b':') {
+ let value = line[(colon + 1) ..].to_owned();
debug!("{}: {}", directive, value);
Some(expand_variables(value, self))
} else {
use getopts::{optopt, optflag, reqopt};
use common::Config;
use common::{Pretty, DebugInfoGdb, DebugInfoLldb, Mode};
-use test::TestPaths;
+use test::{TestPaths, ColorConfig};
use util::logv;
use self::header::EarlyProps;
optopt("", "target-rustcflags", "flags to pass to rustc for target", "FLAGS"),
optflag("", "verbose", "run tests verbosely, showing all output"),
optflag("", "quiet", "print one character per test instead of one line"),
+ optopt("", "color", "coloring: auto, always, never", "WHEN"),
optopt("", "logfile", "file to log test execution to", "FILE"),
optopt("", "target", "the target to build for", "TARGET"),
optopt("", "host", "the host to build for", "HOST"),
let (gdb, gdb_version, gdb_native_rust) = analyze_gdb(matches.opt_str("gdb"));
+ let color = match matches.opt_str("color").as_ref().map(|x| &**x) {
+ Some("auto") | None => ColorConfig::AutoColor,
+ Some("always") => ColorConfig::AlwaysColor,
+ Some("never") => ColorConfig::NeverColor,
+ Some(x) => panic!("argument for --color must be auto, always, or never, but found `{}`", x),
+ };
+
Config {
compile_lib_path: make_absolute(opt_path(matches, "compile-lib-path")),
run_lib_path: make_absolute(opt_path(matches, "run-lib-path")),
lldb_python_dir: matches.opt_str("lldb-python-dir"),
verbose: matches.opt_present("verbose"),
quiet: matches.opt_present("quiet"),
+ color: color,
remote_test_client: matches.opt_str("remote-test-client").map(PathBuf::from),
cc: matches.opt_str("cc").unwrap(),
Ok(val) => &val != "0",
Err(_) => false
},
- color: test::AutoColor,
+ color: config.color,
test_threads: None,
skip: vec![],
list: false,
self.fatal_proc_rec("compilation failed!", &proc_res);
}
+ // FIXME(#41968): Move this check to tidy?
let expected_errors = errors::load_errors(&self.testpaths.file, self.revision);
- if !expected_errors.is_empty() {
- self.check_expected_errors(expected_errors, &proc_res);
- }
+ assert!(expected_errors.is_empty(),
+ "run-pass tests with expected warnings should be moved to ui/");
let proc_res = self.exec_compiled_test();
match self.config.mode {
CompileFail |
ParseFail |
- RunPass |
Incremental => {
// If we are extracting and matching errors in the new
// fashion, then you want JSON mode. Old-skool error
args.push(dir_opt);
}
+ RunPass |
RunFail |
RunPassValgrind |
Pretty |
self.fatal_proc_rec(&format!("{} errors occurred comparing output.", errors),
&proc_res);
}
+
+ if self.props.run_pass {
+ let proc_res = self.exec_compiled_test();
+
+ if !proc_res.status.success() {
+ self.fatal_proc_rec("test run failed!", &proc_res);
+ }
+ }
}
fn run_mir_opt_test(&self) {
-Subproject commit 8707ceaf040f6d87b67a002de16a8d2bc4db7a41
+Subproject commit b4ff403041f17957f735ad750c3241a3a428b9b7
pub has_gate_test: bool,
}
-pub fn check(path: &Path, bad: &mut bool) {
+pub fn check(path: &Path, bad: &mut bool, quiet: bool) {
let mut features = collect_lang_features(path);
assert!(!features.is_empty());
if *bad {
return;
}
+ if quiet {
+ println!("* {} features", features.len());
+ return;
+ }
let mut lines = Vec::new();
for (name, feature) in features.iter() {
let args: Vec<String> = env::args().skip(1).collect();
let mut bad = false;
+ let quiet = args.iter().any(|s| *s == "--quiet");
bins::check(&path, &mut bad);
style::check(&path, &mut bad);
errors::check(&path, &mut bad);
cargo::check(&path, &mut bad);
- features::check(&path, &mut bad);
+ features::check(&path, &mut bad, quiet);
pal::check(&path, &mut bad);
unstable_book::check(&path, &mut bad);
if !args.iter().any(|s| *s == "--no-vendor") {