]> git.lizzy.rs Git - rust.git/commitdiff
Rollup merge of #40818 - theotherphil:master, r=steveklabnik
authorAlex Crichton <alex@alexcrichton.com>
Mon, 27 Mar 2017 20:24:20 +0000 (15:24 -0500)
committerAlex Crichton <alex@alexcrichton.com>
Mon, 27 Mar 2017 22:56:24 +0000 (15:56 -0700)
Don't stutter in operator trait descriptions

Fixes first item on #29365.

r? @steveklabnik

166 files changed:
.travis.yml
appveyor.yml
cargo
src/Cargo.lock
src/bootstrap/check.rs
src/ci/docker/armhf-gnu/Dockerfile
src/ci/docker/cross/Dockerfile
src/ci/docker/dist-android/Dockerfile
src/ci/docker/dist-arm-linux/Dockerfile
src/ci/docker/dist-armv7-aarch64-linux/Dockerfile
src/ci/docker/dist-freebsd/Dockerfile
src/ci/docker/dist-fuchsia/Dockerfile
src/ci/docker/dist-i586-gnu-i686-musl/Dockerfile
src/ci/docker/dist-mips-linux/Dockerfile
src/ci/docker/dist-mips64-linux/Dockerfile
src/ci/docker/dist-powerpc-linux/Dockerfile
src/ci/docker/dist-powerpc64-linux/Dockerfile
src/ci/docker/dist-s390x-linux-netbsd/Dockerfile
src/ci/docker/dist-x86-linux/Dockerfile
src/ci/docker/dist-x86_64-musl/Dockerfile
src/ci/docker/emscripten/Dockerfile
src/ci/docker/emscripten/build-emscripten.sh
src/ci/docker/i686-gnu-nopt/Dockerfile
src/ci/docker/i686-gnu/Dockerfile
src/ci/docker/x86_64-gnu-aux/Dockerfile
src/ci/docker/x86_64-gnu-debug/Dockerfile
src/ci/docker/x86_64-gnu-distcheck/Dockerfile
src/ci/docker/x86_64-gnu-full-bootstrap/Dockerfile
src/ci/docker/x86_64-gnu-incremental/Dockerfile
src/ci/docker/x86_64-gnu-llvm-3.7/Dockerfile
src/ci/docker/x86_64-gnu-nopt/Dockerfile
src/ci/docker/x86_64-gnu/Dockerfile
src/doc/unstable-book/src/inclusive-range-syntax.md
src/grammar/verify.rs
src/libcollections/linked_list.rs
src/libcore/num/dec2flt/algorithm.rs
src/libcore/slice/sort.rs
src/libgraphviz/lib.rs
src/liblibc
src/liblog/Cargo.toml [deleted file]
src/liblog/directive.rs [deleted file]
src/liblog/lib.rs [deleted file]
src/liblog/macros.rs [deleted file]
src/librustc/Cargo.toml
src/librustc/dep_graph/dep_node.rs
src/librustc/dep_graph/dep_tracking_map.rs
src/librustc/dep_graph/edges.rs
src/librustc/dep_graph/shadow.rs
src/librustc/hir/lowering.rs
src/librustc/hir/map/mod.rs
src/librustc/hir/mod.rs
src/librustc/ich/fingerprint.rs
src/librustc/lint/context.rs
src/librustc/middle/cstore.rs
src/librustc/middle/dead.rs
src/librustc/middle/reachable.rs
src/librustc/middle/resolve_lifetime.rs
src/librustc/middle/stability.rs
src/librustc/ty/adjustment.rs
src/librustc/ty/context.rs
src/librustc/ty/item_path.rs
src/librustc/ty/maps.rs
src/librustc/ty/mod.rs
src/librustc_back/Cargo.toml
src/librustc_borrowck/Cargo.toml
src/librustc_borrowck/borrowck/fragments.rs
src/librustc_borrowck/borrowck/mod.rs
src/librustc_borrowck/graphviz.rs
src/librustc_const_eval/Cargo.toml
src/librustc_const_eval/_match.rs
src/librustc_const_eval/check_match.rs
src/librustc_data_structures/Cargo.toml
src/librustc_data_structures/accumulate_vec.rs
src/librustc_data_structures/base_n.rs
src/librustc_data_structures/blake2b.rs
src/librustc_data_structures/indexed_set.rs
src/librustc_driver/Cargo.toml
src/librustc_driver/driver.rs
src/librustc_driver/lib.rs
src/librustc_driver/pretty.rs
src/librustc_driver/test.rs
src/librustc_incremental/Cargo.toml
src/librustc_incremental/persist/file_format.rs
src/librustc_lint/Cargo.toml
src/librustc_lint/bad_style.rs
src/librustc_lint/builtin.rs
src/librustc_lint/unused.rs
src/librustc_llvm/build.rs
src/librustc_metadata/Cargo.toml
src/librustc_metadata/creader.rs
src/librustc_metadata/cstore_impl.rs
src/librustc_metadata/decoder.rs
src/librustc_metadata/encoder.rs
src/librustc_metadata/locator.rs
src/librustc_metadata/schema.rs
src/librustc_mir/Cargo.toml
src/librustc_passes/Cargo.toml
src/librustc_plugin/load.rs
src/librustc_privacy/lib.rs
src/librustc_resolve/Cargo.toml
src/librustc_resolve/build_reduced_graph.rs
src/librustc_resolve/lib.rs
src/librustc_resolve/macros.rs
src/librustc_resolve/resolve_imports.rs
src/librustc_save_analysis/Cargo.toml
src/librustc_save_analysis/csv_dumper.rs
src/librustc_save_analysis/json_dumper.rs
src/librustc_save_analysis/lib.rs
src/librustc_trans/Cargo.toml
src/librustc_trans/abi.rs
src/librustc_trans/adt.rs
src/librustc_trans/asm.rs
src/librustc_trans/back/archive.rs
src/librustc_trans/back/link.rs
src/librustc_trans/back/lto.rs
src/librustc_trans/back/rpath.rs
src/librustc_trans/back/symbol_export.rs
src/librustc_trans/back/symbol_names.rs
src/librustc_trans/back/write.rs
src/librustc_trans/base.rs
src/librustc_trans/builder.rs
src/librustc_trans/context.rs
src/librustc_trans/monomorphize.rs
src/librustc_typeck/Cargo.toml
src/librustc_typeck/check/method/probe.rs
src/librustc_typeck/coherence/builtin.rs
src/librustc_typeck/coherence/inherent.rs [deleted file]
src/librustc_typeck/coherence/inherent_impls.rs [new file with mode: 0644]
src/librustc_typeck/coherence/inherent_impls_overlap.rs [new file with mode: 0644]
src/librustc_typeck/coherence/mod.rs
src/librustdoc/Cargo.toml
src/librustdoc/clean/inline.rs
src/librustdoc/core.rs
src/librustdoc/html/render.rs
src/librustdoc/lib.rs
src/librustdoc/visit_ast.rs
src/libstd/io/mod.rs
src/libstd/os/raw.rs
src/libstd/sys/unix/ext/net.rs
src/libstd/sys/unix/process/process_common.rs
src/libstd/sys/unix/process/process_unix.rs
src/libstd/sys/windows/process.rs
src/libstd/sys/windows/stdio.rs
src/libstd/sys_common/net.rs
src/libsyntax/Cargo.toml
src/libsyntax/ext/placeholders.rs
src/libsyntax/ext/tt/macro_rules.rs
src/libsyntax/feature_gate.rs
src/libsyntax/parse/parser.rs
src/libsyntax/symbol.rs
src/libsyntax/test.rs
src/libsyntax_ext/Cargo.toml
src/test/compile-fail/imports/shadow_builtin_macros.rs [new file with mode: 0644]
src/test/compile-fail/issue-40749.rs [new file with mode: 0644]
src/test/compile-fail/static-lifetime-bound.rs [new file with mode: 0644]
src/test/run-pass-fulldeps/auxiliary/logging_right_crate.rs [deleted file]
src/test/run-pass-fulldeps/conditional-debug-macro-off.rs [deleted file]
src/test/run-pass-fulldeps/logging-enabled-debug.rs [deleted file]
src/test/run-pass-fulldeps/logging-enabled.rs [deleted file]
src/test/run-pass-fulldeps/logging-right-crate.rs [deleted file]
src/test/run-pass-fulldeps/logging-separate-lines.rs [deleted file]
src/test/run-pass-fulldeps/rust-log-filter.rs [deleted file]
src/test/run-pass-fulldeps/switch-stdout.rs [new file with mode: 0644]
src/test/run-pass/conditional-debug-macro-on.rs
src/test/run-pass/issue-40770.rs [new file with mode: 0644]
src/tools/compiletest/Cargo.toml

index 1faf860a3004f04108dd313aca616194ce6dfb14..148b59e8c64ebc5209ab68710d6f331001ca416a 100644 (file)
@@ -52,7 +52,7 @@ matrix:
       os: osx
       osx_image: xcode8.2
       install: &osx_install_sccache >
-        travis_retry curl -o /usr/local/bin/sccache https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-apple-darwin &&
+        travis_retry curl -o /usr/local/bin/sccache https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-apple-darwin &&
           chmod +x /usr/local/bin/sccache &&
         travis_retry curl -o /usr/local/bin/stamp https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-17-stamp-x86_64-apple-darwin &&
           chmod +x /usr/local/bin/stamp
index c33e07fb17e5f834aa9f7d5e441fd25f9a30ef99..0f4d053b6cfeb30260ef50096fc5a8d647c1a13a 100644 (file)
@@ -115,8 +115,8 @@ install:
   - set PATH=C:\Python27;%PATH%
 
   # Download and install sccache
-  - appveyor-retry appveyor DownloadFile https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-pc-windows-msvc
-  - mv 2017-03-22-sccache-x86_64-pc-windows-msvc sccache.exe
+  - appveyor-retry appveyor DownloadFile https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-pc-windows-msvc
+  - mv 2017-03-24-sccache-x86_64-pc-windows-msvc sccache.exe
   - set PATH=%PATH%;%CD%
 
   # Download and install ninja
diff --git a/cargo b/cargo
index c995e9eb5acf3976ae8674a0dc6d9e958053d9fd..4e95c6b41eca3388f54dd5f7787366ad2df637b5 160000 (submodule)
--- a/cargo
+++ b/cargo
@@ -1 +1 @@
-Subproject commit c995e9eb5acf3976ae8674a0dc6d9e958053d9fd
+Subproject commit 4e95c6b41eca3388f54dd5f7787366ad2df637b5
index 9ae894061a6762d0af21dde50427de153f6bfc6c..a0b47f4f0b2bb575be40d6eefd1801047a5969eb 100644 (file)
@@ -147,7 +147,7 @@ dependencies = [
 name = "compiletest"
 version = "0.0.0"
 dependencies = [
- "env_logger 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "env_logger 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
  "filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
  "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
  "rustc-serialize 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -162,14 +162,6 @@ name = "dtoa"
 version = "0.4.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
-[[package]]
-name = "env_logger"
-version = "0.3.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
 [[package]]
 name = "env_logger"
 version = "0.4.2"
@@ -270,10 +262,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 name = "linkchecker"
 version = "0.1.0"
 
-[[package]]
-name = "log"
-version = "0.0.0"
-
 [[package]]
 name = "log"
 version = "0.3.7"
@@ -439,7 +427,7 @@ dependencies = [
  "arena 0.0.0",
  "fmt_macros 0.0.0",
  "graphviz 0.0.0",
- "log 0.0.0",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
  "rustc_back 0.0.0",
  "rustc_bitflags 0.0.0",
  "rustc_const_math 0.0.0",
@@ -479,7 +467,7 @@ dependencies = [
 name = "rustc_back"
 version = "0.0.0"
 dependencies = [
- "log 0.0.0",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
  "serialize 0.0.0",
  "syntax 0.0.0",
 ]
@@ -493,7 +481,7 @@ name = "rustc_borrowck"
 version = "0.0.0"
 dependencies = [
  "graphviz 0.0.0",
- "log 0.0.0",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
  "rustc 0.0.0",
  "rustc_data_structures 0.0.0",
  "rustc_errors 0.0.0",
@@ -508,7 +496,7 @@ version = "0.0.0"
 dependencies = [
  "arena 0.0.0",
  "graphviz 0.0.0",
- "log 0.0.0",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
  "rustc 0.0.0",
  "rustc_back 0.0.0",
  "rustc_const_math 0.0.0",
@@ -530,7 +518,7 @@ dependencies = [
 name = "rustc_data_structures"
 version = "0.0.0"
 dependencies = [
- "log 0.0.0",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
  "serialize 0.0.0",
 ]
 
@@ -539,8 +527,9 @@ name = "rustc_driver"
 version = "0.0.0"
 dependencies = [
  "arena 0.0.0",
+ "env_logger 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
  "graphviz 0.0.0",
- "log 0.0.0",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
  "proc_macro_plugin 0.0.0",
  "rustc 0.0.0",
  "rustc_back 0.0.0",
@@ -579,7 +568,7 @@ name = "rustc_incremental"
 version = "0.0.0"
 dependencies = [
  "graphviz 0.0.0",
- "log 0.0.0",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
  "rustc 0.0.0",
  "rustc_data_structures 0.0.0",
  "serialize 0.0.0",
@@ -591,7 +580,7 @@ dependencies = [
 name = "rustc_lint"
 version = "0.0.0"
 dependencies = [
- "log 0.0.0",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
  "rustc 0.0.0",
  "rustc_back 0.0.0",
  "rustc_const_eval 0.0.0",
@@ -623,7 +612,7 @@ name = "rustc_metadata"
 version = "0.0.0"
 dependencies = [
  "flate 0.0.0",
- "log 0.0.0",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
  "proc_macro 0.0.0",
  "rustc 0.0.0",
  "rustc_back 0.0.0",
@@ -642,7 +631,7 @@ name = "rustc_mir"
 version = "0.0.0"
 dependencies = [
  "graphviz 0.0.0",
- "log 0.0.0",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
  "rustc 0.0.0",
  "rustc_bitflags 0.0.0",
  "rustc_const_eval 0.0.0",
@@ -666,7 +655,7 @@ dependencies = [
 name = "rustc_passes"
 version = "0.0.0"
 dependencies = [
- "log 0.0.0",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
  "rustc 0.0.0",
  "rustc_const_eval 0.0.0",
  "rustc_const_math 0.0.0",
@@ -705,7 +694,7 @@ name = "rustc_resolve"
 version = "0.0.0"
 dependencies = [
  "arena 0.0.0",
- "log 0.0.0",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
  "rustc 0.0.0",
  "rustc_errors 0.0.0",
  "syntax 0.0.0",
@@ -716,7 +705,7 @@ dependencies = [
 name = "rustc_save_analysis"
 version = "0.0.0"
 dependencies = [
- "log 0.0.0",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
  "rls-data 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "rls-span 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "rustc 0.0.0",
@@ -730,7 +719,7 @@ name = "rustc_trans"
 version = "0.0.0"
 dependencies = [
  "flate 0.0.0",
- "log 0.0.0",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
  "rustc 0.0.0",
  "rustc_back 0.0.0",
  "rustc_bitflags 0.0.0",
@@ -762,7 +751,7 @@ version = "0.0.0"
 dependencies = [
  "arena 0.0.0",
  "fmt_macros 0.0.0",
- "log 0.0.0",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
  "rustc 0.0.0",
  "rustc_back 0.0.0",
  "rustc_const_eval 0.0.0",
@@ -780,8 +769,9 @@ version = "0.0.0"
 dependencies = [
  "arena 0.0.0",
  "build_helper 0.1.0",
+ "env_logger 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
  "gcc 0.3.44 (registry+https://github.com/rust-lang/crates.io-index)",
- "log 0.0.0",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
  "rustc 0.0.0",
  "rustc_back 0.0.0",
  "rustc_const_eval 0.0.0",
@@ -857,7 +847,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 name = "syntax"
 version = "0.0.0"
 dependencies = [
- "log 0.0.0",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
  "rustc_bitflags 0.0.0",
  "rustc_data_structures 0.0.0",
  "rustc_errors 0.0.0",
@@ -870,7 +860,7 @@ name = "syntax_ext"
 version = "0.0.0"
 dependencies = [
  "fmt_macros 0.0.0",
- "log 0.0.0",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
  "proc_macro 0.0.0",
  "rustc_errors 0.0.0",
  "syntax 0.0.0",
@@ -996,7 +986,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 "checksum clap 2.21.1 (registry+https://github.com/rust-lang/crates.io-index)" = "74a80f603221c9cd9aa27a28f52af452850051598537bb6b359c38a7d61e5cda"
 "checksum cmake 0.1.22 (registry+https://github.com/rust-lang/crates.io-index)" = "d18d68987ed4c516dcc3e7913659bfa4076f5182eea4a7e0038bb060953e76ac"
 "checksum dtoa 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "80c8b71fd71146990a9742fc06dcbbde19161a267e0ad4e572c35162f4578c90"
-"checksum env_logger 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "15abd780e45b3ea4f76b4e9a26ff4843258dd8a3eed2775a0e7368c2e7936c2f"
 "checksum env_logger 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e3856f1697098606fc6cb97a93de88ca3f3bc35bb878c725920e6e82ecf05e83"
 "checksum filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "5363ab8e4139b8568a6237db5248646e5a8a2f89bd5ccb02092182b11fd3e922"
 "checksum gcc 0.3.44 (registry+https://github.com/rust-lang/crates.io-index)" = "a32cd40070d7611ab76343dcb3204b2bb28c8a9450989a83a3d590248142f439"
index 40cdb9242df15b2fec93cee92f272d31e4ec5849..f8f641060c442674f4d7b9a03b8d8bcde5008fec 100644 (file)
@@ -586,7 +586,7 @@ fn android_copy_libs(build: &Build, compiler: &Compiler, target: &str) {
                       .arg(ADB_TEST_DIR));
 
     let target_dir = format!("{}/{}", ADB_TEST_DIR, target);
-    build.run(Command::new("adb").args(&["shell", "mkdir", &target_dir[..]]));
+    build.run(Command::new("adb").args(&["shell", "mkdir", &target_dir]));
 
     for f in t!(build.sysroot_libdir(compiler, target).read_dir()) {
         let f = t!(f);
index a5126cb3c3f230ff54ace942bf755dcbfcad967d..933562c79e5821837b8fae00fa23bc908d750900 100644 (file)
@@ -74,7 +74,7 @@ RUN arm-linux-gnueabihf-gcc addentropy.c -o rootfs/addentropy -static
 RUN curl -O http://ftp.nl.debian.org/debian/dists/jessie/main/installer-armhf/current/images/device-tree/vexpress-v2p-ca15-tc1.dtb
 
 RUN curl -o /usr/local/bin/sccache \
-      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
       chmod +x /usr/local/bin/sccache
 
 RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
index 9e7abb8b36e358110a03582de2f2e60ef991989f..8dc02ab522c21bf9604a500048734714fc33faa6 100644 (file)
@@ -22,7 +22,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
   pkg-config
 
 RUN curl -o /usr/local/bin/sccache \
-      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
       chmod +x /usr/local/bin/sccache
 
 RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
index 2785d2135f864d41a6f3199d1a34b1e95c840118..44d6863bf0bbdc8430f7cab91cd0c289f41e63a6 100644 (file)
@@ -32,7 +32,7 @@ RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-ini
 ENTRYPOINT ["/usr/bin/dumb-init", "--"]
 
 RUN curl -o /usr/local/bin/sccache \
-      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
       chmod +x /usr/local/bin/sccache
 
 ENV TARGETS=arm-linux-androideabi
index 6cdb5cfca74ba5d6e5181fdbbfbcfbee5b19114b..7facc52390ff4a294b1f5111a46cfe340dc7614b 100644 (file)
@@ -62,7 +62,7 @@ RUN ./build-toolchains.sh
 USER root
 
 RUN curl -o /usr/local/bin/sccache \
-      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
       chmod +x /usr/local/bin/sccache
 
 ENV PATH=$PATH:/x-tools/arm-unknown-linux-gnueabi/bin
index ed9b4a5929188ad0ea90fb1afed3de46d79fede8..369e5a7dffe2a8048c84023e861a322a280a0194 100644 (file)
@@ -63,7 +63,7 @@ RUN ./build-toolchains.sh
 USER root
 
 RUN curl -o /usr/local/bin/sccache \
-      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
       chmod +x /usr/local/bin/sccache
 
 ENV PATH=$PATH:/x-tools/aarch64-unknown-linux-gnueabi/bin
index 332a1fa73a4d5c60708a0d4be0ddc63b466c560d..633f58ea474b4eccc623af0cd2d03e29fa1b622d 100644 (file)
@@ -26,7 +26,7 @@ RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-ini
 ENTRYPOINT ["/usr/bin/dumb-init", "--"]
 
 RUN curl -o /usr/local/bin/sccache \
-      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
       chmod +x /usr/local/bin/sccache
 
 ENV \
index dcb9aa905f14ebcf70eac83379ad0dbb93bc2b35..ed37a9e842e227a9219599bf00b70e947a8c90d5 100644 (file)
@@ -29,7 +29,7 @@ RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-ini
 ENTRYPOINT ["/usr/bin/dumb-init", "--"]
 
 RUN curl -o /usr/local/bin/sccache \
-      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
       chmod +x /usr/local/bin/sccache
 
 ENV \
index 2051396988589da0332c7d978578471bc390c51c..d88ec7aab34643da663133c818c5bfadf58f7f1c 100644 (file)
@@ -26,7 +26,7 @@ RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-ini
 ENTRYPOINT ["/usr/bin/dumb-init", "--"]
 
 RUN curl -o /usr/local/bin/sccache \
-      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
       chmod +x /usr/local/bin/sccache
 
 ENV RUST_CONFIGURE_ARGS \
index 3123e69e7e1df22f97a4ca7be1b4c0832fcca3b5..938c53ae48837d057b653cced3a47d190b2b0b03 100644 (file)
@@ -18,7 +18,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
   pkg-config
 
 RUN curl -o /usr/local/bin/sccache \
-      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
       chmod +x /usr/local/bin/sccache
 
 RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
index 52db932dcdc38499aa97efbbf636cc5a4913e4c1..45de8100b4f27f9f3aed69d309e70a678e4840b3 100644 (file)
@@ -18,7 +18,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
   pkg-config
 
 RUN curl -o /usr/local/bin/sccache \
-      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
       chmod +x /usr/local/bin/sccache
 
 RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
index fca931596853bf489731a767576bd3cf2a10ebd6..c1e5e863ae06c3b710900cdc6f03d81dbb9d4596 100644 (file)
@@ -63,7 +63,7 @@ RUN ./build-powerpc-toolchain.sh
 USER root
 
 RUN curl -o /usr/local/bin/sccache \
-      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
       chmod +x /usr/local/bin/sccache
 
 ENV PATH=$PATH:/x-tools/powerpc-unknown-linux-gnu/bin
index 17a4734aba7d1458eead69f76d9ccaf47e950aea..7413c327323e5072aca0e34050286b13a552eebd 100644 (file)
@@ -67,7 +67,7 @@ COPY build-powerpc64le-toolchain.sh /tmp/
 RUN ./build-powerpc64le-toolchain.sh
 
 RUN curl -o /usr/local/bin/sccache \
-      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
       chmod +x /usr/local/bin/sccache
 
 ENV PATH=$PATH:/x-tools/powerpc64-unknown-linux-gnu/bin
index c2877b081d5d7a7838fac16d96ae7fea6b85190c..4180006690fc902c76eb19eda0b3cf9c91d62edb 100644 (file)
@@ -66,7 +66,7 @@ RUN ./build-netbsd-toolchain.sh
 USER root
 
 RUN curl -o /usr/local/bin/sccache \
-      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
       chmod +x /usr/local/bin/sccache
 
 ENV PATH=$PATH:/x-tools/s390x-ibm-linux-gnu/bin:/x-tools/x86_64-unknown-netbsd/bin
index 100b6dcf9937eb7f6631406d5c8e32fa03b00ea5..18c7a4d2b3e7f54306fd5dba0f4e9056d6a927d7 100644 (file)
@@ -76,7 +76,7 @@ RUN curl -Lo /rustroot/dumb-init \
 ENTRYPOINT ["/rustroot/dumb-init", "--"]
 
 RUN curl -o /usr/local/bin/sccache \
-      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
       chmod +x /usr/local/bin/sccache
 
 ENV HOSTS=i686-unknown-linux-gnu
index 7ebd34ee904bcf3f2a03d2fa9e72384fb98673f5..085aa351659905c2af0da4113bd96c7d4a19207f 100644 (file)
@@ -26,7 +26,7 @@ RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-ini
 ENTRYPOINT ["/usr/bin/dumb-init", "--"]
 
 RUN curl -o /usr/local/bin/sccache \
-      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
       chmod +x /usr/local/bin/sccache
 
 ENV RUST_CONFIGURE_ARGS \
index 630b122c9359ab744d658404256defad42b299d5..77cf54a19a7fd9ee510066f3a9ed19a9aa0f4b16 100644 (file)
@@ -15,7 +15,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
   lib32stdc++6
 
 RUN curl -o /usr/local/bin/sccache \
-      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
       chmod +x /usr/local/bin/sccache
 
 RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
index 88bf583007ce53f296b5a2405dac1174025d94a9..e39767357ad6301a56f463ec24d49a0ab7e97181 100755 (executable)
@@ -29,7 +29,24 @@ exit 1
 }
 
 curl https://s3.amazonaws.com/mozilla-games/emscripten/releases/emsdk-portable.tar.gz | \
-      tar xzf -
+    tar xzf -
+
+# Some versions of the EMSDK archive have their contents in .emsdk-portable
+# and others in emsdk_portable. Make sure the EMSDK ends up in a fixed path.
+if [ -d emsdk-portable ]; then
+    mv emsdk-portable emsdk_portable
+fi
+
+if [ ! -d emsdk_portable ]; then
+    echo "ERROR: Invalid emsdk archive. Dumping working directory." >&2
+    ls -l
+    exit 1
+fi
+
+# Some versions of the EMSDK set the permissions of the root directory to
+# 0700. Ensure the directory is readable by all users.
+chmod 755 emsdk_portable
+
 source emsdk_portable/emsdk_env.sh
 hide_output emsdk update
 hide_output emsdk install --build=Release sdk-tag-1.37.1-32bit
index b9f47731eee757190cd73f4fa5e355d0c0d07822..c84cf56e4e858bdaf788b30371b8fc00a672099b 100644 (file)
@@ -14,7 +14,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
   xz-utils
 
 RUN curl -o /usr/local/bin/sccache \
-      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
       chmod +x /usr/local/bin/sccache
 
 RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
index e4a9c5b258fe6144e0c2a299b467aa4f940db38e..f4bb9083b85827ece5f152af18aaa1a4009761f7 100644 (file)
@@ -14,7 +14,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
   xz-utils
 
 RUN curl -o /usr/local/bin/sccache \
-      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
       chmod +x /usr/local/bin/sccache
 
 RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
index 104361830e1cd3031fe957b6acd06c10834f3b5c..68184c65cf17ffcf75ac8728c2d4de0a5889f279 100644 (file)
@@ -15,7 +15,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
   pkg-config
 
 RUN curl -o /usr/local/bin/sccache \
-      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
       chmod +x /usr/local/bin/sccache
 
 RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
index 1575efdb4cb70763647635bb800a61d705dd30f2..6320a806fc301d3b7d9b7afd29d409ec46fcfc6e 100644 (file)
@@ -14,7 +14,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
   xz-utils
 
 RUN curl -o /usr/local/bin/sccache \
-      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
       chmod +x /usr/local/bin/sccache
 
 RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
index 94847712094d1aad97092e51d3e7273efc8023a5..180f53ec33f31f173ebd1dcd230ebf44272cda40 100644 (file)
@@ -16,7 +16,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
   pkg-config
 
 RUN curl -o /usr/local/bin/sccache \
-      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
       chmod +x /usr/local/bin/sccache
 
 RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
index 542b4f90b119f6120d0ec5746b2019b10cfda4d0..4500fc0f642deec293c20ca4c7e68e6514a1a9ca 100644 (file)
@@ -14,7 +14,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
   xz-utils
 
 RUN curl -o /usr/local/bin/sccache \
-      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
       chmod +x /usr/local/bin/sccache
 
 RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
index 4d9b218c7654cfadbb2d6c4fa56ee1d552f8d4bc..ad1227fa581f9ca745e74752d23333ab8b3b65eb 100644 (file)
@@ -14,7 +14,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
   xz-utils
 
 RUN curl -o /usr/local/bin/sccache \
-      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
       chmod +x /usr/local/bin/sccache
 
 RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
index 04358ff97484ab071d6393c73ecc0903eae08020..f1240201805718819fed02848b084ba7debbb263 100644 (file)
@@ -17,7 +17,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
   xz-utils
 
 RUN curl -o /usr/local/bin/sccache \
-      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
       chmod +x /usr/local/bin/sccache
 
 RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
index dddeb82e6bfb2af8553002798548e4605522bc96..fa9707d1a7352c9f54d1b7bcd91acf4ba79639a0 100644 (file)
@@ -14,7 +14,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
   xz-utils
 
 RUN curl -o /usr/local/bin/sccache \
-      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
       chmod +x /usr/local/bin/sccache
 
 RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
index 393bcf023ea5bb1b913c0d4bda77874f894ccd3a..e5d89034dbe4268b6e46dcdf6dae3d914bbd591e 100644 (file)
@@ -14,7 +14,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
   xz-utils
 
 RUN curl -o /usr/local/bin/sccache \
-      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
       chmod +x /usr/local/bin/sccache
 
 RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
index 74d85536399d72f93096fce8091cb67d6167ddbe..255445c318dca7c830e4e325df5155715259ba44 100644 (file)
@@ -6,5 +6,15 @@ The tracking issue for this feature is: [#28237]
 
 ------------------------
 
+To get a range that goes from 0 to 10 and includes the value 10, you
+can write `0...10`:
 
+```rust
+#![feature(inclusive_range_syntax)]
 
+fn main() {
+    for i in 0...10 {
+        println!("{}", i);
+    }
+}
+```
index 919fc98e438c52a0ceb21bf41f38d2fecd48a274..bd28a63c5f4dfb947edce379ac569c3c1af43519 100644 (file)
@@ -196,7 +196,7 @@ fn parse_antlr_token(s: &str, tokens: &HashMap<String, token::Token>, surrogate_
     let toknum = &s[content_end + 3 .. toknum_end];
 
     let not_found = format!("didn't find token {:?} in the map", toknum);
-    let proto_tok = tokens.get(toknum).expect(&not_found[..]);
+    let proto_tok = tokens.get(toknum).expect(&not_found);
 
     let nm = Symbol::intern(content);
 
@@ -304,14 +304,14 @@ fn next(r: &mut lexer::StringReader) -> TokenAndSpan {
     let mut token_file = File::open(&Path::new(&args.next().unwrap())).unwrap();
     let mut token_list = String::new();
     token_file.read_to_string(&mut token_list).unwrap();
-    let token_map = parse_token_list(&token_list[..]);
+    let token_map = parse_token_list(&token_list);
 
     let stdin = std::io::stdin();
     let lock = stdin.lock();
     let lines = lock.lines();
     let antlr_tokens = lines.map(|l| parse_antlr_token(l.unwrap().trim(),
                                                        &token_map,
-                                                       &surrogate_pairs_pos[..],
+                                                       &surrogate_pairs_pos,
                                                        has_bom));
 
     for antlr_tok in antlr_tokens {
index f58c87b801f552852d2f7c4392971eeaf2d18c3c..8f0488f69369e5d905c18c906e1cfa5c459e34f0 100644 (file)
@@ -1376,7 +1376,7 @@ fn test_send() {
         thread::spawn(move || {
                 check_links(&n);
                 let a: &[_] = &[&1, &2, &3];
-                assert_eq!(a, &n.iter().collect::<Vec<_>>()[..]);
+                assert_eq!(a, &*n.iter().collect::<Vec<_>>());
             })
             .join()
             .ok()
index 604bc7c10dea0aedaa886e976f1c63e392314fce..60dab943a3acfa094b93b275162bacc294610b78 100644 (file)
@@ -141,7 +141,7 @@ pub fn fast_path<T: RawFloat>(integral: &[u8], fractional: &[u8], e: i64) -> Opt
 ///
 /// It rounds ``f`` to a float with 64 bit significand and multiplies it by the best approximation
 /// of `10^e` (in the same floating point format). This is often enough to get the correct result.
-/// However, when the result is close to halfway between two adjecent (ordinary) floats, the
+/// However, when the result is close to halfway between two adjacent (ordinary) floats, the
 /// compound rounding error from multiplying two approximation means the result may be off by a
 /// few bits. When this happens, the iterative Algorithm R fixes things up.
 ///
@@ -392,7 +392,7 @@ fn underflow<T: RawFloat>(x: Big, v: Big, rem: Big) -> T {
     //
     // Therefore, when the rounded-off bits are != 0.5 ULP, they decide the rounding
     // on their own. When they are equal and the remainder is non-zero, the value still
-    // needs to be rounded up. Only when the rounded off bits are 1/2 and the remainer
+    // needs to be rounded up. Only when the rounded off bits are 1/2 and the remainder
     // is zero, we have a half-to-even situation.
     let bits = x.bit_length();
     let lsb = bits - T::sig_bits() as usize;
index d13d537d99301781dd6aa246c4008d529354e320..307e4974d9769f6a1762c6c32951b6e88866674d 100644 (file)
@@ -152,8 +152,8 @@ fn partial_insertion_sort<T, F>(v: &mut [T], is_less: &mut F) -> bool
 fn insertion_sort<T, F>(v: &mut [T], is_less: &mut F)
     where F: FnMut(&T, &T) -> bool
 {
-    for i in 2..v.len()+1 {
-        shift_tail(&mut v[..i], is_less);
+    for i in 1..v.len() {
+        shift_tail(&mut v[..i+1], is_less);
     }
 }
 
index 8e587ad211de80c3c2e50b5d9672a1e527690794..1b2c7775185f78d9dbf9e318236f8d3e4e5c0426 100644 (file)
@@ -554,7 +554,7 @@ fn escape_str(s: &str) -> String {
     pub fn to_dot_string(&self) -> String {
         match self {
             &LabelStr(ref s) => format!("\"{}\"", s.escape_default()),
-            &EscStr(ref s) => format!("\"{}\"", LabelText::escape_str(&s[..])),
+            &EscStr(ref s) => format!("\"{}\"", LabelText::escape_str(&s)),
             &HtmlStr(ref s) => format!("<{}>", s),
         }
     }
@@ -587,7 +587,7 @@ pub fn suffix_line(self, suffix: LabelText) -> LabelText<'static> {
         let mut prefix = self.pre_escaped_content().into_owned();
         let suffix = suffix.pre_escaped_content();
         prefix.push_str(r"\n\n");
-        prefix.push_str(&suffix[..]);
+        prefix.push_str(&suffix);
         EscStr(prefix.into_cow())
     }
 }
@@ -878,7 +878,7 @@ impl<'a> Labeller<'a> for LabelledGraph {
         type Node = Node;
         type Edge = &'a Edge;
         fn graph_id(&'a self) -> Id<'a> {
-            Id::new(&self.name[..]).unwrap()
+            Id::new(self.name).unwrap()
         }
         fn node_id(&'a self, n: &Node) -> Id<'a> {
             id_name(n)
index 64d954c6a76e896fbf7ed5c17e77c40e388abe84..05a2d197356ef253dfd985166576619ac9b6947f 160000 (submodule)
@@ -1 +1 @@
-Subproject commit 64d954c6a76e896fbf7ed5c17e77c40e388abe84
+Subproject commit 05a2d197356ef253dfd985166576619ac9b6947f
diff --git a/src/liblog/Cargo.toml b/src/liblog/Cargo.toml
deleted file mode 100644 (file)
index 31a8624..0000000
+++ /dev/null
@@ -1,9 +0,0 @@
-[package]
-authors = ["The Rust Project Developers"]
-name = "log"
-version = "0.0.0"
-
-[lib]
-name = "log"
-path = "lib.rs"
-crate-type = ["dylib", "rlib"]
diff --git a/src/liblog/directive.rs b/src/liblog/directive.rs
deleted file mode 100644 (file)
index eb50d6e..0000000
+++ /dev/null
@@ -1,193 +0,0 @@
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use std::ascii::AsciiExt;
-use std::cmp;
-
-#[derive(Debug, Clone)]
-pub struct LogDirective {
-    pub name: Option<String>,
-    pub level: u32,
-}
-
-pub const LOG_LEVEL_NAMES: [&'static str; 5] = ["ERROR", "WARN", "INFO", "DEBUG", "TRACE"];
-
-/// Parse an individual log level that is either a number or a symbolic log level
-fn parse_log_level(level: &str) -> Option<u32> {
-    level.parse::<u32>()
-        .ok()
-        .or_else(|| {
-            let pos = LOG_LEVEL_NAMES.iter().position(|&name| name.eq_ignore_ascii_case(level));
-            pos.map(|p| p as u32 + 1)
-        })
-        .map(|p| cmp::min(p, ::MAX_LOG_LEVEL))
-}
-
-/// Parse a logging specification string (e.g: "crate1,crate2::mod3,crate3::x=1/foo")
-/// and return a vector with log directives.
-///
-/// Valid log levels are 0-255, with the most likely ones being 1-4 (defined in
-/// std::).  Also supports string log levels of error, warn, info, and debug
-pub fn parse_logging_spec(spec: &str) -> (Vec<LogDirective>, Option<String>) {
-    let mut dirs = Vec::new();
-
-    let mut parts = spec.split('/');
-    let mods = parts.next();
-    let filter = parts.next();
-    if parts.next().is_some() {
-        println!("warning: invalid logging spec '{}', ignoring it (too many '/'s)",
-                 spec);
-        return (dirs, None);
-    }
-    if let Some(m) = mods {
-        for s in m.split(',') {
-            if s.is_empty() {
-                continue;
-            }
-            let mut parts = s.split('=');
-            let (log_level, name) =
-                match (parts.next(), parts.next().map(|s| s.trim()), parts.next()) {
-                    (Some(part0), None, None) => {
-                        // if the single argument is a log-level string or number,
-                        // treat that as a global fallback
-                        match parse_log_level(part0) {
-                            Some(num) => (num, None),
-                            None => (::MAX_LOG_LEVEL, Some(part0)),
-                        }
-                    }
-                    (Some(part0), Some(""), None) => (::MAX_LOG_LEVEL, Some(part0)),
-                    (Some(part0), Some(part1), None) => {
-                        match parse_log_level(part1) {
-                            Some(num) => (num, Some(part0)),
-                            _ => {
-                                println!("warning: invalid logging spec '{}', ignoring it", part1);
-                                continue;
-                            }
-                        }
-                    }
-                    _ => {
-                        println!("warning: invalid logging spec '{}', ignoring it", s);
-                        continue;
-                    }
-                };
-            dirs.push(LogDirective {
-                name: name.map(str::to_owned),
-                level: log_level,
-            });
-        }
-    }
-
-    (dirs, filter.map(str::to_owned))
-}
-
-#[cfg(test)]
-mod tests {
-    use super::parse_logging_spec;
-
-    #[test]
-    fn parse_logging_spec_valid() {
-        let (dirs, filter) = parse_logging_spec("crate1::mod1=1,crate1::mod2,crate2=4");
-        assert_eq!(dirs.len(), 3);
-        assert_eq!(dirs[0].name, Some("crate1::mod1".to_owned()));
-        assert_eq!(dirs[0].level, 1);
-
-        assert_eq!(dirs[1].name, Some("crate1::mod2".to_owned()));
-        assert_eq!(dirs[1].level, ::MAX_LOG_LEVEL);
-
-        assert_eq!(dirs[2].name, Some("crate2".to_owned()));
-        assert_eq!(dirs[2].level, 4);
-        assert!(filter.is_none());
-    }
-
-    #[test]
-    fn parse_logging_spec_invalid_crate() {
-        // test parse_logging_spec with multiple = in specification
-        let (dirs, filter) = parse_logging_spec("crate1::mod1=1=2,crate2=4");
-        assert_eq!(dirs.len(), 1);
-        assert_eq!(dirs[0].name, Some("crate2".to_owned()));
-        assert_eq!(dirs[0].level, 4);
-        assert!(filter.is_none());
-    }
-
-    #[test]
-    fn parse_logging_spec_invalid_log_level() {
-        // test parse_logging_spec with 'noNumber' as log level
-        let (dirs, filter) = parse_logging_spec("crate1::mod1=noNumber,crate2=4");
-        assert_eq!(dirs.len(), 1);
-        assert_eq!(dirs[0].name, Some("crate2".to_owned()));
-        assert_eq!(dirs[0].level, 4);
-        assert!(filter.is_none());
-    }
-
-    #[test]
-    fn parse_logging_spec_string_log_level() {
-        // test parse_logging_spec with 'warn' as log level
-        let (dirs, filter) = parse_logging_spec("crate1::mod1=wrong,crate2=warn");
-        assert_eq!(dirs.len(), 1);
-        assert_eq!(dirs[0].name, Some("crate2".to_owned()));
-        assert_eq!(dirs[0].level, ::WARN);
-        assert!(filter.is_none());
-    }
-
-    #[test]
-    fn parse_logging_spec_empty_log_level() {
-        // test parse_logging_spec with '' as log level
-        let (dirs, filter) = parse_logging_spec("crate1::mod1=wrong,crate2=");
-        assert_eq!(dirs.len(), 1);
-        assert_eq!(dirs[0].name, Some("crate2".to_owned()));
-        assert_eq!(dirs[0].level, ::MAX_LOG_LEVEL);
-        assert!(filter.is_none());
-    }
-
-    #[test]
-    fn parse_logging_spec_global() {
-        // test parse_logging_spec with no crate
-        let (dirs, filter) = parse_logging_spec("warn,crate2=4");
-        assert_eq!(dirs.len(), 2);
-        assert_eq!(dirs[0].name, None);
-        assert_eq!(dirs[0].level, 2);
-        assert_eq!(dirs[1].name, Some("crate2".to_owned()));
-        assert_eq!(dirs[1].level, 4);
-        assert!(filter.is_none());
-    }
-
-    #[test]
-    fn parse_logging_spec_valid_filter() {
-        let (dirs, filter) = parse_logging_spec("crate1::mod1=1,crate1::mod2,crate2=4/abc");
-        assert_eq!(dirs.len(), 3);
-        assert_eq!(dirs[0].name, Some("crate1::mod1".to_owned()));
-        assert_eq!(dirs[0].level, 1);
-
-        assert_eq!(dirs[1].name, Some("crate1::mod2".to_owned()));
-        assert_eq!(dirs[1].level, ::MAX_LOG_LEVEL);
-
-        assert_eq!(dirs[2].name, Some("crate2".to_owned()));
-        assert_eq!(dirs[2].level, 4);
-        assert!(filter.is_some() && filter.unwrap().to_owned() == "abc");
-    }
-
-    #[test]
-    fn parse_logging_spec_invalid_crate_filter() {
-        let (dirs, filter) = parse_logging_spec("crate1::mod1=1=2,crate2=4/a.c");
-        assert_eq!(dirs.len(), 1);
-        assert_eq!(dirs[0].name, Some("crate2".to_owned()));
-        assert_eq!(dirs[0].level, 4);
-        assert!(filter.is_some() && filter.unwrap().to_owned() == "a.c");
-    }
-
-    #[test]
-    fn parse_logging_spec_empty_with_filter() {
-        let (dirs, filter) = parse_logging_spec("crate1/a*c");
-        assert_eq!(dirs.len(), 1);
-        assert_eq!(dirs[0].name, Some("crate1".to_owned()));
-        assert_eq!(dirs[0].level, ::MAX_LOG_LEVEL);
-        assert!(filter.is_some() && filter.unwrap().to_owned() == "a*c");
-    }
-}
diff --git a/src/liblog/lib.rs b/src/liblog/lib.rs
deleted file mode 100644 (file)
index 057df64..0000000
+++ /dev/null
@@ -1,506 +0,0 @@
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Utilities for program-wide and customizable logging
-//!
-//! # Examples
-//!
-//! ```
-//! # #![feature(rustc_private)]
-//! #[macro_use] extern crate log;
-//!
-//! fn main() {
-//!     debug!("this is a debug {:?}", "message");
-//!     error!("this is printed by default");
-//!
-//!     if log_enabled!(log::INFO) {
-//!         let x = 3 * 4; // expensive computation
-//!         info!("the answer was: {:?}", x);
-//!     }
-//! }
-//! ```
-//!
-//! Assumes the binary is `main`:
-//!
-//! ```{.bash}
-//! $ RUST_LOG=error ./main
-//! ERROR:main: this is printed by default
-//! ```
-//!
-//! ```{.bash}
-//! $ RUST_LOG=info ./main
-//! ERROR:main: this is printed by default
-//! INFO:main: the answer was: 12
-//! ```
-//!
-//! ```{.bash}
-//! $ RUST_LOG=debug ./main
-//! DEBUG:main: this is a debug message
-//! ERROR:main: this is printed by default
-//! INFO:main: the answer was: 12
-//! ```
-//!
-//! You can also set the log level on a per module basis:
-//!
-//! ```{.bash}
-//! $ RUST_LOG=main=info ./main
-//! ERROR:main: this is printed by default
-//! INFO:main: the answer was: 12
-//! ```
-//!
-//! And enable all logging:
-//!
-//! ```{.bash}
-//! $ RUST_LOG=main ./main
-//! DEBUG:main: this is a debug message
-//! ERROR:main: this is printed by default
-//! INFO:main: the answer was: 12
-//! ```
-//!
-//! # Logging Macros
-//!
-//! There are five macros that the logging subsystem uses:
-//!
-//! * `log!(level, ...)` - the generic logging macro, takes a level as a u32 and any
-//!                        related `format!` arguments
-//! * `debug!(...)` - a macro hard-wired to the log level of `DEBUG`
-//! * `info!(...)` - a macro hard-wired to the log level of `INFO`
-//! * `warn!(...)` - a macro hard-wired to the log level of `WARN`
-//! * `error!(...)` - a macro hard-wired to the log level of `ERROR`
-//!
-//! All of these macros use the same style of syntax as the `format!` syntax
-//! extension. Details about the syntax can be found in the documentation of
-//! `std::fmt` along with the Rust tutorial/manual.
-//!
-//! If you want to check at runtime if a given logging level is enabled (e.g. if the
-//! information you would want to log is expensive to produce), you can use the
-//! following macro:
-//!
-//! * `log_enabled!(level)` - returns true if logging of the given level is enabled
-//!
-//! # Enabling logging
-//!
-//! Log levels are controlled on a per-module basis, and by default all logging is
-//! disabled except for `error!` (a log level of 1). Logging is controlled via the
-//! `RUST_LOG` environment variable. The value of this environment variable is a
-//! comma-separated list of logging directives. A logging directive is of the form:
-//!
-//! ```text
-//! path::to::module=log_level
-//! ```
-//!
-//! The path to the module is rooted in the name of the crate it was compiled for,
-//! so if your program is contained in a file `hello.rs`, for example, to turn on
-//! logging for this file you would use a value of `RUST_LOG=hello`.
-//! Furthermore, this path is a prefix-search, so all modules nested in the
-//! specified module will also have logging enabled.
-//!
-//! The actual `log_level` is optional to specify. If omitted, all logging will be
-//! enabled. If specified, the it must be either a numeric in the range of 1-255, or
-//! it must be one of the strings `debug`, `error`, `info`, or `warn`. If a numeric
-//! is specified, then all logging less than or equal to that numeral is enabled.
-//! For example, if logging level 3 is active, error, warn, and info logs will be
-//! printed, but debug will be omitted.
-//!
-//! As the log level for a module is optional, the module to enable logging for is
-//! also optional. If only a `log_level` is provided, then the global log level for
-//! all modules is set to this value.
-//!
-//! Some examples of valid values of `RUST_LOG` are:
-//!
-//! * `hello` turns on all logging for the 'hello' module
-//! * `info` turns on all info logging
-//! * `hello=debug` turns on debug logging for 'hello'
-//! * `hello=3` turns on info logging for 'hello'
-//! * `hello,std::option` turns on hello, and std's option logging
-//! * `error,hello=warn` turn on global error logging and also warn for hello
-//!
-//! # Filtering results
-//!
-//! A RUST_LOG directive may include a string filter. The syntax is to append
-//! `/` followed by a string. Each message is checked against the string and is
-//! only logged if it contains the string. Note that the matching is done after
-//! formatting the log string but before adding any logging meta-data. There is
-//! a single filter for all modules.
-//!
-//! Some examples:
-//!
-//! * `hello/foo` turns on all logging for the 'hello' module where the log message
-//! includes 'foo'.
-//! * `info/f.o` turns on all info logging where the log message includes 'foo',
-//! 'f1o', 'fao', etc.
-//! * `hello=debug/foo*foo` turns on debug logging for 'hello' where the log
-//! message includes 'foofoo' or 'fofoo' or 'fooooooofoo', etc.
-//! * `error,hello=warn/[0-9] scopes` turn on global error logging and also warn for
-//!  hello. In both cases the log message must include a single digit number
-//!  followed by 'scopes'
-//!
-//! # Performance and Side Effects
-//!
-//! Each of these macros will expand to code similar to:
-//!
-//! ```rust,ignore
-//! if log_level <= my_module_log_level() {
-//!     ::log::log(log_level, format!(...));
-//! }
-//! ```
-//!
-//! What this means is that each of these macros are very cheap at runtime if
-//! they're turned off (just a load and an integer comparison). This also means that
-//! if logging is disabled, none of the components of the log will be executed.
-
-#![crate_name = "log"]
-#![unstable(feature = "rustc_private",
-            reason = "use the crates.io `log` library instead",
-            issue = "27812")]
-#![crate_type = "rlib"]
-#![crate_type = "dylib"]
-#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
-       html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
-       html_root_url = "https://doc.rust-lang.org/nightly/",
-       html_playground_url = "https://play.rust-lang.org/",
-       test(attr(deny(warnings))))]
-#![deny(missing_docs)]
-#![deny(warnings)]
-
-#![feature(staged_api)]
-
-use std::cell::RefCell;
-use std::fmt;
-use std::io::{self, Stderr};
-use std::io::prelude::*;
-use std::mem;
-use std::env;
-use std::slice;
-use std::sync::{Mutex, ONCE_INIT, Once};
-
-use directive::LOG_LEVEL_NAMES;
-
-#[macro_use]
-pub mod macros;
-
-mod directive;
-
-/// Maximum logging level of a module that can be specified. Common logging
-/// levels are found in the DEBUG/INFO/WARN/ERROR constants.
-pub const MAX_LOG_LEVEL: u32 = 255;
-
-/// The default logging level of a crate if no other is specified.
-const DEFAULT_LOG_LEVEL: u32 = 1;
-
-static mut LOCK: *mut Mutex<(Vec<directive::LogDirective>, Option<String>)> = 0 as *mut _;
-
-/// An unsafe constant that is the maximum logging level of any module
-/// specified. This is the first line of defense to determining whether a
-/// logging statement should be run.
-static mut LOG_LEVEL: u32 = MAX_LOG_LEVEL;
-
-/// Debug log level
-pub const DEBUG: u32 = 4;
-/// Info log level
-pub const INFO: u32 = 3;
-/// Warn log level
-pub const WARN: u32 = 2;
-/// Error log level
-pub const ERROR: u32 = 1;
-
-thread_local! {
-    static LOCAL_LOGGER: RefCell<Option<Box<Logger + Send>>> = {
-        RefCell::new(None)
-    }
-}
-
-/// A trait used to represent an interface to a thread-local logger. Each thread
-/// can have its own custom logger which can respond to logging messages
-/// however it likes.
-pub trait Logger {
-    /// Logs a single message described by the `record`.
-    fn log(&mut self, record: &LogRecord);
-}
-
-struct DefaultLogger {
-    handle: Stderr,
-}
-
-/// Wraps the log level with fmt implementations.
-#[derive(Copy, Clone, PartialEq, PartialOrd, Debug)]
-pub struct LogLevel(pub u32);
-
-impl fmt::Display for LogLevel {
-    fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
-        let LogLevel(level) = *self;
-        match LOG_LEVEL_NAMES.get(level as usize - 1) {
-            Some(ref name) => fmt::Display::fmt(name, fmt),
-            None => fmt::Display::fmt(&level, fmt),
-        }
-    }
-}
-
-impl Logger for DefaultLogger {
-    fn log(&mut self, record: &LogRecord) {
-        match writeln!(&mut self.handle,
-                       "{}:{}: {}",
-                       record.level,
-                       record.module_path,
-                       record.args) {
-            Err(e) => panic!("failed to log: {:?}", e),
-            Ok(()) => {}
-        }
-    }
-}
-
-impl Drop for DefaultLogger {
-    fn drop(&mut self) {
-        // FIXME(#12628): is panicking the right thing to do?
-        match self.handle.flush() {
-            Err(e) => panic!("failed to flush a logger: {:?}", e),
-            Ok(()) => {}
-        }
-    }
-}
-
-/// This function is called directly by the compiler when using the logging
-/// macros. This function does not take into account whether the log level
-/// specified is active or not, it will always log something if this method is
-/// called.
-///
-/// It is not recommended to call this function directly, rather it should be
-/// invoked through the logging family of macros.
-#[doc(hidden)]
-pub fn log(level: u32, loc: &'static LogLocation, args: fmt::Arguments) {
-    // Test the literal string from args against the current filter, if there
-    // is one.
-    unsafe {
-        let filter = (*LOCK).lock().unwrap();
-        if let Some(ref filter) = filter.1 {
-            if !args.to_string().contains(filter) {
-                return;
-            }
-        }
-    }
-
-    // Completely remove the local logger from TLS in case anyone attempts to
-    // frob the slot while we're doing the logging. This will destroy any logger
-    // set during logging.
-    let logger = LOCAL_LOGGER.with(|s| s.borrow_mut().take());
-    let mut logger = logger.unwrap_or_else(|| Box::new(DefaultLogger { handle: io::stderr() }));
-    logger.log(&LogRecord {
-        level: LogLevel(level),
-        args: args,
-        file: loc.file,
-        module_path: loc.module_path,
-        line: loc.line,
-    });
-    set_logger(logger);
-}
-
-/// Getter for the global log level. This is a function so that it can be called
-/// safely
-#[doc(hidden)]
-#[inline(always)]
-pub fn log_level() -> u32 {
-    unsafe { LOG_LEVEL }
-}
-
-/// Replaces the thread-local logger with the specified logger, returning the old
-/// logger.
-pub fn set_logger(logger: Box<Logger + Send>) -> Option<Box<Logger + Send>> {
-    LOCAL_LOGGER.with(|slot| mem::replace(&mut *slot.borrow_mut(), Some(logger)))
-}
-
-/// A LogRecord is created by the logging macros, and passed as the only
-/// argument to Loggers.
-#[derive(Debug)]
-pub struct LogRecord<'a> {
-    /// The module path of where the LogRecord originated.
-    pub module_path: &'a str,
-
-    /// The LogLevel of this record.
-    pub level: LogLevel,
-
-    /// The arguments from the log line.
-    pub args: fmt::Arguments<'a>,
-
-    /// The file of where the LogRecord originated.
-    pub file: &'a str,
-
-    /// The line number of where the LogRecord originated.
-    pub line: u32,
-}
-
-#[doc(hidden)]
-#[derive(Copy, Clone)]
-pub struct LogLocation {
-    pub module_path: &'static str,
-    pub file: &'static str,
-    pub line: u32,
-}
-
-/// Tests whether a given module's name is enabled for a particular level of
-/// logging. This is the second layer of defense about determining whether a
-/// module's log statement should be emitted or not.
-#[doc(hidden)]
-pub fn mod_enabled(level: u32, module: &str) -> bool {
-    static INIT: Once = ONCE_INIT;
-    INIT.call_once(init);
-
-    // It's possible for many threads are in this function, only one of them
-    // will perform the global initialization, but all of them will need to check
-    // again to whether they should really be here or not. Hence, despite this
-    // check being expanded manually in the logging macro, this function checks
-    // the log level again.
-    if level > unsafe { LOG_LEVEL } {
-        return false;
-    }
-
-    // This assertion should never get tripped unless we're in an at_exit
-    // handler after logging has been torn down and a logging attempt was made.
-
-    unsafe {
-        let directives = (*LOCK).lock().unwrap();
-        enabled(level, module, directives.0.iter())
-    }
-}
-
-fn enabled(level: u32, module: &str, iter: slice::Iter<directive::LogDirective>) -> bool {
-    // Search for the longest match, the vector is assumed to be pre-sorted.
-    for directive in iter.rev() {
-        match directive.name {
-            Some(ref name) if !module.starts_with(&name[..]) => {}
-            Some(..) | None => return level <= directive.level,
-        }
-    }
-    level <= DEFAULT_LOG_LEVEL
-}
-
-/// Initialize logging for the current process.
-///
-/// This is not threadsafe at all, so initialization is performed through a
-/// `Once` primitive (and this function is called from that primitive).
-fn init() {
-    let (mut directives, filter) = match env::var("RUST_LOG") {
-        Ok(spec) => directive::parse_logging_spec(&spec[..]),
-        Err(..) => (Vec::new(), None),
-    };
-
-    // Sort the provided directives by length of their name, this allows a
-    // little more efficient lookup at runtime.
-    directives.sort_by(|a, b| {
-        let alen = a.name.as_ref().map(|a| a.len()).unwrap_or(0);
-        let blen = b.name.as_ref().map(|b| b.len()).unwrap_or(0);
-        alen.cmp(&blen)
-    });
-
-    let max_level = {
-        let max = directives.iter().max_by_key(|d| d.level);
-        max.map(|d| d.level).unwrap_or(DEFAULT_LOG_LEVEL)
-    };
-
-    unsafe {
-        LOG_LEVEL = max_level;
-
-        assert!(LOCK.is_null());
-        LOCK = Box::into_raw(Box::new(Mutex::new((directives, filter))));
-    }
-}
-
-#[cfg(test)]
-mod tests {
-    use super::enabled;
-    use directive::LogDirective;
-
-    #[test]
-    fn match_full_path() {
-        let dirs = [LogDirective {
-                        name: Some("crate2".to_string()),
-                        level: 3,
-                    },
-                    LogDirective {
-                        name: Some("crate1::mod1".to_string()),
-                        level: 2,
-                    }];
-        assert!(enabled(2, "crate1::mod1", dirs.iter()));
-        assert!(!enabled(3, "crate1::mod1", dirs.iter()));
-        assert!(enabled(3, "crate2", dirs.iter()));
-        assert!(!enabled(4, "crate2", dirs.iter()));
-    }
-
-    #[test]
-    fn no_match() {
-        let dirs = [LogDirective {
-                        name: Some("crate2".to_string()),
-                        level: 3,
-                    },
-                    LogDirective {
-                        name: Some("crate1::mod1".to_string()),
-                        level: 2,
-                    }];
-        assert!(!enabled(2, "crate3", dirs.iter()));
-    }
-
-    #[test]
-    fn match_beginning() {
-        let dirs = [LogDirective {
-                        name: Some("crate2".to_string()),
-                        level: 3,
-                    },
-                    LogDirective {
-                        name: Some("crate1::mod1".to_string()),
-                        level: 2,
-                    }];
-        assert!(enabled(3, "crate2::mod1", dirs.iter()));
-    }
-
-    #[test]
-    fn match_beginning_longest_match() {
-        let dirs = [LogDirective {
-                        name: Some("crate2".to_string()),
-                        level: 3,
-                    },
-                    LogDirective {
-                        name: Some("crate2::mod".to_string()),
-                        level: 4,
-                    },
-                    LogDirective {
-                        name: Some("crate1::mod1".to_string()),
-                        level: 2,
-                    }];
-        assert!(enabled(4, "crate2::mod1", dirs.iter()));
-        assert!(!enabled(4, "crate2", dirs.iter()));
-    }
-
-    #[test]
-    fn match_default() {
-        let dirs = [LogDirective {
-                        name: None,
-                        level: 3,
-                    },
-                    LogDirective {
-                        name: Some("crate1::mod1".to_string()),
-                        level: 2,
-                    }];
-        assert!(enabled(2, "crate1::mod1", dirs.iter()));
-        assert!(enabled(3, "crate2::mod2", dirs.iter()));
-    }
-
-    #[test]
-    fn zero_level() {
-        let dirs = [LogDirective {
-                        name: None,
-                        level: 3,
-                    },
-                    LogDirective {
-                        name: Some("crate1::mod1".to_string()),
-                        level: 0,
-                    }];
-        assert!(!enabled(1, "crate1::mod1", dirs.iter()));
-        assert!(enabled(3, "crate2::mod2", dirs.iter()));
-    }
-}
diff --git a/src/liblog/macros.rs b/src/liblog/macros.rs
deleted file mode 100644 (file)
index 803a2df..0000000
+++ /dev/null
@@ -1,205 +0,0 @@
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Logging macros
-
-/// The standard logging macro
-///
-/// This macro will generically log over a provided level (of type u32) with a
-/// format!-based argument list. See documentation in `std::fmt` for details on
-/// how to use the syntax.
-///
-/// # Examples
-///
-/// ```
-/// # #![feature(rustc_private)]
-/// #[macro_use] extern crate log;
-///
-/// fn main() {
-///     log!(log::WARN, "this is a warning {}", "message");
-///     log!(log::DEBUG, "this is a debug message");
-///     log!(6, "this is a custom logging level: {level}", level=6);
-/// }
-/// ```
-///
-/// Assumes the binary is `main`:
-///
-/// ```{.bash}
-/// $ RUST_LOG=warn ./main
-/// WARN:main: this is a warning message
-/// ```
-///
-/// ```{.bash}
-/// $ RUST_LOG=debug ./main
-/// DEBUG:main: this is a debug message
-/// WARN:main: this is a warning message
-/// ```
-///
-/// ```{.bash}
-/// $ RUST_LOG=6 ./main
-/// DEBUG:main: this is a debug message
-/// WARN:main: this is a warning message
-/// 6:main: this is a custom logging level: 6
-/// ```
-#[macro_export]
-macro_rules! log {
-    ($lvl:expr, $($arg:tt)+) => ({
-        static LOC: ::log::LogLocation = ::log::LogLocation {
-            line: line!(),
-            file: file!(),
-            module_path: module_path!(),
-        };
-        let lvl = $lvl;
-        if log_enabled!(lvl) {
-            ::log::log(lvl, &LOC, format_args!($($arg)+))
-        }
-    })
-}
-
-/// A convenience macro for logging at the error log level.
-///
-/// # Examples
-///
-/// ```
-/// # #![feature(rustc_private)]
-/// #[macro_use] extern crate log;
-///
-/// fn main() {
-///     let error = 3;
-///     error!("the build has failed with error code: {}", error);
-/// }
-/// ```
-///
-/// Assumes the binary is `main`:
-///
-/// ```{.bash}
-/// $ RUST_LOG=error ./main
-/// ERROR:main: the build has failed with error code: 3
-/// ```
-///
-#[macro_export]
-macro_rules! error {
-    ($($arg:tt)*) => (log!(::log::ERROR, $($arg)*))
-}
-
-/// A convenience macro for logging at the warning log level.
-///
-/// # Examples
-///
-/// ```
-/// # #![feature(rustc_private)]
-/// #[macro_use] extern crate log;
-///
-/// fn main() {
-///     let code = 3;
-///     warn!("you may like to know that a process exited with: {}", code);
-/// }
-/// ```
-///
-/// Assumes the binary is `main`:
-///
-/// ```{.bash}
-/// $ RUST_LOG=warn ./main
-/// WARN:main: you may like to know that a process exited with: 3
-/// ```
-#[macro_export]
-macro_rules! warn {
-    ($($arg:tt)*) => (log!(::log::WARN, $($arg)*))
-}
-
-/// A convenience macro for logging at the info log level.
-///
-/// # Examples
-///
-/// ```
-/// # #![feature(rustc_private)]
-/// #[macro_use] extern crate log;
-///
-/// fn main() {
-///     let ret = 3;
-///     info!("this function is about to return: {}", ret);
-/// }
-/// ```
-///
-/// Assumes the binary is `main`:
-///
-/// ```{.bash}
-/// $ RUST_LOG=info ./main
-/// INFO:main: this function is about to return: 3
-/// ```
-#[macro_export]
-macro_rules! info {
-    ($($arg:tt)*) => (log!(::log::INFO, $($arg)*))
-}
-
-/// A convenience macro for logging at the debug log level. This macro will
-/// be omitted at compile time in an optimized build unless `-C debug-assertions`
-/// is passed to the compiler.
-///
-/// # Examples
-///
-/// ```
-/// # #![feature(rustc_private)]
-/// #[macro_use] extern crate log;
-///
-/// fn main() {
-///     debug!("x = {x}, y = {y}", x=10, y=20);
-/// }
-/// ```
-///
-/// Assumes the binary is `main`:
-///
-/// ```{.bash}
-/// $ RUST_LOG=debug ./main
-/// DEBUG:main: x = 10, y = 20
-/// ```
-#[macro_export]
-macro_rules! debug {
-    ($($arg:tt)*) => (if cfg!(debug_assertions) { log!(::log::DEBUG, $($arg)*) })
-}
-
-/// A macro to test whether a log level is enabled for the current module.
-///
-/// # Examples
-///
-/// ```
-/// # #![feature(rustc_private)]
-/// #[macro_use] extern crate log;
-///
-/// struct Point { x: i32, y: i32 }
-/// fn some_expensive_computation() -> Point { Point { x: 1, y: 2 } }
-///
-/// fn main() {
-///     if log_enabled!(log::DEBUG) {
-///         let x = some_expensive_computation();
-///         debug!("x.x = {}, x.y = {}", x.x, x.y);
-///     }
-/// }
-/// ```
-///
-/// Assumes the binary is `main`:
-///
-/// ```{.bash}
-/// $ RUST_LOG=error ./main
-/// ```
-///
-/// ```{.bash}
-/// $ RUST_LOG=debug ./main
-/// DEBUG:main: x.x = 1, x.y = 2
-/// ```
-#[macro_export]
-macro_rules! log_enabled {
-    ($lvl:expr) => ({
-        let lvl = $lvl;
-        (lvl != ::log::DEBUG || cfg!(debug_assertions)) &&
-        lvl <= ::log::log_level() &&
-        ::log::mod_enabled(lvl, module_path!())
-    })
-}
index 5d53c60ad7fdc1072620abf156c939ae5dce7619..fa217acd9f9bf5e4eed1ea2feaede465f4450cbd 100644 (file)
@@ -12,7 +12,7 @@ crate-type = ["dylib"]
 arena = { path = "../libarena" }
 fmt_macros = { path = "../libfmt_macros" }
 graphviz = { path = "../libgraphviz" }
-log = { path = "../liblog" }
+log = "0.3"
 rustc_back = { path = "../librustc_back" }
 rustc_bitflags = { path = "../librustc_bitflags" }
 rustc_const_math = { path = "../librustc_const_math" }
index 399af258e925167767be1ea6f3417141bfee6ae4..5aea2bcaa4f5c5fd5f4979df6ddb5183f4a9dcc5 100644 (file)
@@ -8,6 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
+use hir::def_id::CrateNum;
 use std::fmt::Debug;
 use std::sync::Arc;
 
@@ -74,14 +75,13 @@ pub enum DepNode<D: Clone + Debug> {
     CoherenceCheckImpl(D),
     CoherenceOverlapCheck(D),
     CoherenceOverlapCheckSpecial(D),
-    CoherenceOverlapInherentCheck(D),
     CoherenceOrphanCheck(D),
     Variance,
     WfCheck(D),
     TypeckItemType(D),
     UnusedTraitCheck,
     CheckConst(D),
-    Privacy,
+    PrivacyAccessLevels(CrateNum),
     IntrinsicCheck(D),
     MatchCheck(D),
 
@@ -230,7 +230,7 @@ pub fn map_def<E, OP>(&self, mut op: OP) -> Option<DepNode<E>>
             CheckEntryFn => Some(CheckEntryFn),
             Variance => Some(Variance),
             UnusedTraitCheck => Some(UnusedTraitCheck),
-            Privacy => Some(Privacy),
+            PrivacyAccessLevels(k) => Some(PrivacyAccessLevels(k)),
             Reachability => Some(Reachability),
             DeadCheck => Some(DeadCheck),
             LateLintCheck => Some(LateLintCheck),
@@ -251,7 +251,6 @@ pub fn map_def<E, OP>(&self, mut op: OP) -> Option<DepNode<E>>
             CoherenceCheckImpl(ref d) => op(d).map(CoherenceCheckImpl),
             CoherenceOverlapCheck(ref d) => op(d).map(CoherenceOverlapCheck),
             CoherenceOverlapCheckSpecial(ref d) => op(d).map(CoherenceOverlapCheckSpecial),
-            CoherenceOverlapInherentCheck(ref d) => op(d).map(CoherenceOverlapInherentCheck),
             CoherenceOrphanCheck(ref d) => op(d).map(CoherenceOrphanCheck),
             WfCheck(ref d) => op(d).map(WfCheck),
             TypeckItemType(ref d) => op(d).map(TypeckItemType),
index 0f3108df9a822589aa4bba9743d8eaf77309a8c8..b6a2360211cac92610410d081de314d4c45a2148 100644 (file)
@@ -81,21 +81,6 @@ pub fn contains_key(&self, k: &M::Key) -> bool {
     pub fn keys(&self) -> Vec<M::Key> {
         self.map.keys().cloned().collect()
     }
-
-    /// Append `elem` to the vector stored for `k`, creating a new vector if needed.
-    /// This is considered a write to `k`.
-    ///
-    /// NOTE: Caution is required when using this method. You should
-    /// be sure that nobody is **reading from the vector** while you
-    /// are writing to it. Eventually, it'd be nice to remove this.
-    pub fn push<E: Clone>(&mut self, k: M::Key, elem: E)
-        where M: DepTrackingMapConfig<Value=Vec<E>>
-    {
-        self.write(&k);
-        self.map.entry(k)
-                .or_insert(Vec::new())
-                .push(elem);
-    }
 }
 
 impl<M: DepTrackingMapConfig> MemoizationMap for RefCell<DepTrackingMap<M>> {
index 8657a3e5a587899f80ebb536847bcb8f8a9a9f0c..5dbabcc923048482f8f06884ac3ec25c1ae7e82c 100644 (file)
@@ -101,11 +101,15 @@ pub fn pop_task(&mut self, key: DepNode<D>) {
     }
 
     /// Indicates that the current task `C` reads `v` by adding an
-    /// edge from `v` to `C`. If there is no current task, panics. If
-    /// you want to suppress this edge, use `ignore`.
+    /// edge from `v` to `C`. If there is no current task, has no
+    /// effect. Note that *reading* from tracked state is harmless if
+    /// you are not in a task; what is bad is *writing* to tracked
+    /// state (and leaking data that you read into a tracked task).
     pub fn read(&mut self, v: DepNode<D>) {
-        let source = self.make_node(v);
-        self.add_edge_from_current_node(|current| (source, current))
+        if self.current_node().is_some() {
+            let source = self.make_node(v);
+            self.add_edge_from_current_node(|current| (source, current))
+        }
     }
 
     /// Indicates that the current task `C` writes `v` by adding an
index 5d4190a8ae1a35bd76d35bab0e7641897ba25e5b..bedb6ff2771f064e468b5789a0ffb0a04b33fc85 100644 (file)
@@ -80,7 +80,13 @@ pub fn enqueue(&self, message: &DepMessage) {
 
             let mut stack = self.stack.borrow_mut();
             match *message {
-                DepMessage::Read(ref n) => self.check_edge(Some(Some(n)), top(&stack)),
+                // It is ok to READ shared state outside of a
+                // task. That can't do any harm (at least, the only
+                // way it can do harm is by leaking that data into a
+                // query or task, which would be a problem
+                // anyway). What would be bad is WRITING to that
+                // state.
+                DepMessage::Read(_) => { }
                 DepMessage::Write(ref n) => self.check_edge(top(&stack), Some(Some(n))),
                 DepMessage::PushTask(ref n) => stack.push(Some(n.clone())),
                 DepMessage::PushIgnore => stack.push(None),
@@ -116,7 +122,7 @@ fn check_edge(&self,
             (None, None) => unreachable!(),
 
             // nothing on top of the stack
-            (None, Some(n)) | (Some(n), None) => bug!("read/write of {:?} but no current task", n),
+            (None, Some(n)) | (Some(n), None) => bug!("write of {:?} but no current task", n),
 
             // this corresponds to an Ignore being top of the stack
             (Some(None), _) | (_, Some(None)) => (),
index 2ac1a036f99e1a996a85f60c1f653c0693474181..6ca0c971ea4979951919b7e1e6e388e443acb8b9 100644 (file)
@@ -1900,57 +1900,45 @@ fn lower_expr(&mut self, e: &Expr) -> hir::Expr {
                 hir::ExprIndex(P(self.lower_expr(el)), P(self.lower_expr(er)))
             }
             ExprKind::Range(ref e1, ref e2, lims) => {
-                fn make_struct(this: &mut LoweringContext,
-                               ast_expr: &Expr,
-                               path: &[&str],
-                               fields: &[(&str, &P<Expr>)]) -> hir::Expr {
-                    let struct_path = &iter::once(&"ops").chain(path).map(|s| *s)
-                                                         .collect::<Vec<_>>();
-                    let unstable_span = this.allow_internal_unstable("...", ast_expr.span);
-
-                    if fields.len() == 0 {
-                        this.expr_std_path(unstable_span, struct_path,
-                                           ast_expr.attrs.clone())
-                    } else {
-                        let fields = fields.into_iter().map(|&(s, e)| {
-                            let expr = P(this.lower_expr(&e));
-                            let unstable_span = this.allow_internal_unstable("...", e.span);
-                            this.field(Symbol::intern(s), expr, unstable_span)
-                        }).collect();
-                        let attrs = ast_expr.attrs.clone();
-
-                        this.expr_std_struct(unstable_span, struct_path, fields, None, attrs)
-                    }
-                }
-
                 use syntax::ast::RangeLimits::*;
 
-                return match (e1, e2, lims) {
-                    (&None,         &None,         HalfOpen) =>
-                        make_struct(self, e, &["RangeFull"], &[]),
-
-                    (&Some(ref e1), &None,         HalfOpen) =>
-                        make_struct(self, e, &["RangeFrom"],
-                                             &[("start", e1)]),
-
-                    (&None,         &Some(ref e2), HalfOpen) =>
-                        make_struct(self, e, &["RangeTo"],
-                                             &[("end", e2)]),
-
-                    (&Some(ref e1), &Some(ref e2), HalfOpen) =>
-                        make_struct(self, e, &["Range"],
-                                             &[("start", e1), ("end", e2)]),
-
-                    (&None,         &Some(ref e2), Closed)   =>
-                        make_struct(self, e, &["RangeToInclusive"],
-                                             &[("end", e2)]),
-
-                    (&Some(ref e1), &Some(ref e2), Closed)   =>
-                        make_struct(self, e, &["RangeInclusive", "NonEmpty"],
-                                             &[("start", e1), ("end", e2)]),
+                let (path, variant) = match (e1, e2, lims) {
+                    (&None, &None, HalfOpen) => ("RangeFull", None),
+                    (&Some(..), &None, HalfOpen) => ("RangeFrom", None),
+                    (&None, &Some(..), HalfOpen) => ("RangeTo", None),
+                    (&Some(..), &Some(..), HalfOpen) => ("Range", None),
+                    (&None, &Some(..), Closed) => ("RangeToInclusive", None),
+                    (&Some(..), &Some(..), Closed) => ("RangeInclusive", Some("NonEmpty")),
+                    (_, &None, Closed) =>
+                        panic!(self.diagnostic().span_fatal(
+                            e.span, "inclusive range with no end")),
+                };
 
-                    _ => panic!(self.diagnostic()
-                                    .span_fatal(e.span, "inclusive range with no end")),
+                let fields =
+                    e1.iter().map(|e| ("start", e)).chain(e2.iter().map(|e| ("end", e)))
+                    .map(|(s, e)| {
+                        let expr = P(self.lower_expr(&e));
+                        let unstable_span = self.allow_internal_unstable("...", e.span);
+                        self.field(Symbol::intern(s), expr, unstable_span)
+                    }).collect::<P<[hir::Field]>>();
+
+                let is_unit = fields.is_empty();
+                let unstable_span = self.allow_internal_unstable("...", e.span);
+                let struct_path =
+                    iter::once("ops").chain(iter::once(path)).chain(variant)
+                    .collect::<Vec<_>>();
+                let struct_path = self.std_path(unstable_span, &struct_path, is_unit);
+                let struct_path = hir::QPath::Resolved(None, P(struct_path));
+
+                return hir::Expr {
+                    id: self.lower_node_id(e.id),
+                    node: if is_unit {
+                        hir::ExprPath(struct_path)
+                    } else {
+                        hir::ExprStruct(struct_path, fields, None)
+                    },
+                    span: unstable_span,
+                    attrs: e.attrs.clone(),
                 };
             }
             ExprKind::Path(ref qself, ref path) => {
@@ -2613,17 +2601,6 @@ fn expr_tuple(&mut self, sp: Span, exprs: hir::HirVec<hir::Expr>) -> P<hir::Expr
         P(self.expr(sp, hir::ExprTup(exprs), ThinVec::new()))
     }
 
-    fn expr_std_struct(&mut self,
-                       span: Span,
-                       components: &[&str],
-                       fields: hir::HirVec<hir::Field>,
-                       e: Option<P<hir::Expr>>,
-                       attrs: ThinVec<Attribute>) -> hir::Expr {
-        let path = self.std_path(span, components, false);
-        let qpath = hir::QPath::Resolved(None, P(path));
-        self.expr(span, hir::ExprStruct(qpath, fields, e), attrs)
-    }
-
     fn expr(&mut self, span: Span, node: hir::Expr_, attrs: ThinVec<Attribute>) -> hir::Expr {
         hir::Expr {
             id: self.next_id(),
index 583b3b848f30d88a7e572cc713233555a265f594..d7aa36b24f94279ba345399f7276a1e6fe609fff 100644 (file)
@@ -948,7 +948,7 @@ pub fn map_crate<'hir>(forest: &'hir mut Forest,
     intravisit::walk_crate(&mut collector, &forest.krate);
     let map = collector.map;
 
-    if log_enabled!(::log::DEBUG) {
+    if log_enabled!(::log::LogLevel::Debug) {
         // This only makes sense for ordered stores; note the
         // enumerate to count the number of entries.
         let (entries_less_1, _) = map.iter().filter(|&x| {
index 1c79a02d3da0e88d83f9b36511b09deb34bfc69d..f4f2f4cf9211b532aa19a18001802a763a17225b 100644 (file)
@@ -159,6 +159,10 @@ impl Lifetime {
     pub fn is_elided(&self) -> bool {
         self.name == keywords::Invalid.name()
     }
+
+    pub fn is_static(&self) -> bool {
+        self.name == keywords::StaticLifetime.name()
+    }
 }
 
 /// A lifetime definition, eg `'a: 'b+'c+'d`
index d296d8293fb0674010bb00d12ec012fdfaca0588..e760f7efc93d912645454213f9b42b5707836261 100644 (file)
@@ -55,7 +55,7 @@ pub fn to_hex(&self) -> String {
 impl Encodable for Fingerprint {
     #[inline]
     fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
-        for &byte in &self.0[..] {
+        for &byte in &self.0 {
             s.emit_u8(byte)?;
         }
         Ok(())
@@ -66,7 +66,7 @@ impl Decodable for Fingerprint {
     #[inline]
     fn decode<D: Decoder>(d: &mut D) -> Result<Fingerprint, D::Error> {
         let mut result = Fingerprint([0u8; FINGERPRINT_LENGTH]);
-        for byte in &mut result.0[..] {
+        for byte in &mut result.0 {
             *byte = d.read_u8()?;
         }
         Ok(result)
index 843f3a53f33e5c9500b9244563a13d138c605a78..20bf241a99906e3afe1a5196fb29718bc88f9cc1 100644 (file)
 use std::default::Default as StdDefault;
 use std::mem;
 use std::fmt;
-use std::ops::Deref;
 use syntax::attr;
 use syntax::ast;
 use syntax::symbol::Symbol;
-use syntax_pos::{MultiSpan, Span};
+use syntax_pos::{DUMMY_SP, MultiSpan, Span};
 use errors::{self, Diagnostic, DiagnosticBuilder};
 use hir;
+use hir::def_id::LOCAL_CRATE;
 use hir::intravisit as hir_visit;
 use syntax::visit as ast_visit;
 
@@ -484,7 +484,7 @@ pub fn raw_struct_lint<'a, S>(sess: &'a Session,
                 Allow => bug!("earlier conditional return should handle Allow case")
             };
             let hyphen_case_lint_name = name.replace("_", "-");
-            if lint_flag_val.as_str().deref() == name {
+            if lint_flag_val.as_str() == name {
                 err.note(&format!("requested on the command line with `{} {}`",
                                   flag, hyphen_case_lint_name));
             } else {
@@ -495,7 +495,7 @@ pub fn raw_struct_lint<'a, S>(sess: &'a Session,
         },
         Node(lint_attr_name, src) => {
             def = Some(src);
-            if lint_attr_name.as_str().deref() != name {
+            if lint_attr_name.as_str() != name {
                 let level_str = level.as_str();
                 err.note(&format!("#[{}({})] implied by #[{}({})]",
                                   level_str, name, level_str, lint_attr_name));
@@ -1231,10 +1231,11 @@ fn check_lint_name_cmdline(sess: &Session, lint_cx: &LintStore,
 /// Perform lint checking on a crate.
 ///
 /// Consumes the `lint_store` field of the `Session`.
-pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                             access_levels: &AccessLevels) {
+pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
     let _task = tcx.dep_graph.in_task(DepNode::LateLintCheck);
 
+    let access_levels = &ty::queries::privacy_access_levels::get(tcx, DUMMY_SP, LOCAL_CRATE);
+
     let krate = tcx.hir.krate();
 
     // We want to own the lint store, so move it out of the session.
index 225d6fc9bb2b211100ff6bdf65a9fc7a8018d26c..8bc0cf2577b5db05761ebbbf9a4b966875383605 100644 (file)
@@ -176,7 +176,6 @@ pub trait CrateStore {
     fn item_generics_cloned(&self, def: DefId) -> ty::Generics;
     fn item_attrs(&self, def_id: DefId) -> Vec<ast::Attribute>;
     fn fn_arg_names(&self, did: DefId) -> Vec<ast::Name>;
-    fn inherent_implementations_for_type(&self, def_id: DefId) -> Vec<DefId>;
 
     // trait info
     fn implementations_of_trait(&self, filter: Option<DefId>) -> Vec<DefId>;
@@ -255,8 +254,8 @@ fn maybe_get_item_body<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId)
     fn used_crates(&self, prefer: LinkagePreference) -> Vec<(CrateNum, LibSource)>;
     fn used_crate_source(&self, cnum: CrateNum) -> CrateSource;
     fn extern_mod_stmt_cnum(&self, emod_id: ast::NodeId) -> Option<CrateNum>;
-    fn encode_metadata<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                                 reexports: &def::ExportMap,
+    fn encode_metadata<'a, 'tcx>(&self,
+                                 tcx: TyCtxt<'a, 'tcx, 'tcx>,
                                  link_meta: &LinkMeta,
                                  reachable: &NodeSet) -> Vec<u8>;
     fn metadata_encoding_version(&self) -> &[u8];
@@ -310,7 +309,6 @@ fn item_generics_cloned(&self, def: DefId) -> ty::Generics
         { bug!("item_generics_cloned") }
     fn item_attrs(&self, def_id: DefId) -> Vec<ast::Attribute> { bug!("item_attrs") }
     fn fn_arg_names(&self, did: DefId) -> Vec<ast::Name> { bug!("fn_arg_names") }
-    fn inherent_implementations_for_type(&self, def_id: DefId) -> Vec<DefId> { vec![] }
 
     // trait info
     fn implementations_of_trait(&self, filter: Option<DefId>) -> Vec<DefId> { vec![] }
@@ -412,10 +410,10 @@ fn used_crates(&self, prefer: LinkagePreference) -> Vec<(CrateNum, LibSource)>
         { vec![] }
     fn used_crate_source(&self, cnum: CrateNum) -> CrateSource { bug!("used_crate_source") }
     fn extern_mod_stmt_cnum(&self, emod_id: ast::NodeId) -> Option<CrateNum> { None }
-    fn encode_metadata<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                           reexports: &def::ExportMap,
-                           link_meta: &LinkMeta,
-                           reachable: &NodeSet) -> Vec<u8> { vec![] }
+    fn encode_metadata<'a, 'tcx>(&self,
+                                 tcx: TyCtxt<'a, 'tcx, 'tcx>,
+                                 link_meta: &LinkMeta,
+                                 reachable: &NodeSet) -> Vec<u8> { vec![] }
     fn metadata_encoding_version(&self) -> &[u8] { bug!("metadata_encoding_version") }
 }
 
index cc6d6e88dee4ee068424be59d0d10ee853903fe1..8926ff5c1fbbb080dce5afbfe165418548395d66 100644 (file)
 use middle::privacy;
 use ty::{self, TyCtxt};
 use hir::def::Def;
-use hir::def_id::{DefId};
+use hir::def_id::{DefId, LOCAL_CRATE};
 use lint;
 use util::nodemap::FxHashSet;
 
 use syntax::{ast, codemap};
 use syntax::attr;
+use syntax::codemap::DUMMY_SP;
 use syntax_pos;
 
 // Any local node that may call something in its body block should be
@@ -592,9 +593,9 @@ fn visit_trait_item(&mut self, trait_item: &'tcx hir::TraitItem) {
     }
 }
 
-pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                             access_levels: &privacy::AccessLevels) {
+pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
     let _task = tcx.dep_graph.in_task(DepNode::DeadCheck);
+    let access_levels = &ty::queries::privacy_access_levels::get(tcx, DUMMY_SP, LOCAL_CRATE);
     let krate = tcx.hir.krate();
     let live_symbols = find_live(tcx, access_levels, krate);
     let mut visitor = DeadVisitor { tcx: tcx, live_symbols: live_symbols };
index 4ec43e368a60dc9036e7ca96c9675ecbd3612c2e..b0e39442af98c9997f9885949d3b2e56e07d3178 100644 (file)
@@ -27,7 +27,9 @@
 use syntax::abi::Abi;
 use syntax::ast;
 use syntax::attr;
+use syntax::codemap::DUMMY_SP;
 use hir;
+use hir::def_id::LOCAL_CRATE;
 use hir::intravisit::{Visitor, NestedVisitorMap};
 use hir::itemlikevisit::ItemLikeVisitor;
 use hir::intravisit;
@@ -359,11 +361,11 @@ fn visit_impl_item(&mut self, _impl_item: &hir::ImplItem) {
     }
 }
 
-pub fn find_reachable<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                                access_levels: &privacy::AccessLevels)
-                                -> NodeSet {
+pub fn find_reachable<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> NodeSet {
     let _task = tcx.dep_graph.in_task(DepNode::Reachability);
 
+    let access_levels = &ty::queries::privacy_access_levels::get(tcx, DUMMY_SP, LOCAL_CRATE);
+
     let any_library = tcx.sess.crate_types.borrow().iter().any(|ty| {
         *ty == config::CrateTypeRlib || *ty == config::CrateTypeDylib ||
         *ty == config::CrateTypeProcMacro
index 37749816eb153c725300a0eddba7f66cdc21b917..5094e28475b2640e9faea82e38e71fe28b91bba1 100644 (file)
@@ -29,7 +29,7 @@
 use syntax::attr;
 use syntax::ptr::P;
 use syntax::symbol::keywords;
-use syntax_pos::Span;
+use syntax_pos::{mk_sp, Span};
 use errors::DiagnosticBuilder;
 use util::nodemap::{NodeMap, NodeSet, FxHashSet, FxHashMap, DefIdMap};
 use rustc_back::slice;
@@ -434,7 +434,7 @@ fn visit_lifetime(&mut self, lifetime_ref: &'tcx hir::Lifetime) {
             self.resolve_elided_lifetimes(slice::ref_slice(lifetime_ref));
             return;
         }
-        if lifetime_ref.name == keywords::StaticLifetime.name() {
+        if lifetime_ref.is_static() {
             self.insert_lifetime(lifetime_ref, Region::Static);
             return;
         }
@@ -1434,7 +1434,7 @@ fn check_lifetime_defs(&mut self, old_scope: ScopeRef, lifetimes: &[hir::Lifetim
             let lifetime_i = &lifetimes[i];
 
             for lifetime in lifetimes {
-                if lifetime.lifetime.name == keywords::StaticLifetime.name() {
+                if lifetime.lifetime.is_static() {
                     let lifetime = lifetime.lifetime;
                     let mut err = struct_span_err!(self.sess, lifetime.span, E0262,
                                   "invalid lifetime parameter name: `{}`", lifetime.name);
@@ -1464,7 +1464,17 @@ fn check_lifetime_defs(&mut self, old_scope: ScopeRef, lifetimes: &[hir::Lifetim
             self.check_lifetime_def_for_shadowing(old_scope, &lifetime_i.lifetime);
 
             for bound in &lifetime_i.bounds {
-                self.resolve_lifetime_ref(bound);
+                if !bound.is_static() {
+                    self.resolve_lifetime_ref(bound);
+                } else {
+                    self.insert_lifetime(bound, Region::Static);
+                    let full_span = mk_sp(lifetime_i.lifetime.span.lo, bound.span.hi);
+                    self.sess.struct_span_warn(full_span,
+                        &format!("unnecessary lifetime parameter `{}`", lifetime_i.lifetime.name))
+                        .help(&format!("you can use the `'static` lifetime directly, in place \
+                                        of `{}`", lifetime_i.lifetime.name))
+                        .emit();
+                }
             }
         }
     }
index 1fb537140257465eae43371f32b7b78eead2c4bf..4354ed6817ae96376db2640305c638b9f9567b34 100644 (file)
@@ -536,7 +536,7 @@ pub fn check_stability(self, def_id: DefId, id: NodeId, span: Span) {
                 if !self.stability.borrow().active_features.contains(feature) {
                     let msg = match *reason {
                         Some(ref r) => format!("use of unstable library feature '{}': {}",
-                                               &feature.as_str(), &r),
+                                               feature.as_str(), &r),
                         None => format!("use of unstable library feature '{}'", &feature)
                     };
                     emit_feature_err(&self.sess.parse_sess, &feature.as_str(), span,
@@ -656,10 +656,11 @@ fn lookup_deprecation_uncached(self, id: DefId) -> Option<DeprecationEntry> {
 /// Given the list of enabled features that were not language features (i.e. that
 /// were expected to be library features), and the list of features used from
 /// libraries, identify activated features that don't exist and error about them.
-pub fn check_unused_or_stable_features<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                                                 access_levels: &AccessLevels) {
+pub fn check_unused_or_stable_features<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
     let sess = &tcx.sess;
 
+    let access_levels = &ty::queries::privacy_access_levels::get(tcx, DUMMY_SP, LOCAL_CRATE);
+
     if tcx.stability.borrow().staged_api[&LOCAL_CRATE] && tcx.sess.features.borrow().staged_api {
         let _task = tcx.dep_graph.in_task(DepNode::StabilityIndex);
         let krate = tcx.hir.krate();
index 34977822bc69d8c8600105e7989ce5eed8e7c01f..d8ca30477205c53ccfcfd0e5bc61dab9c16828ae 100644 (file)
@@ -139,6 +139,21 @@ pub enum AutoBorrow<'tcx> {
     RawPtr(hir::Mutability),
 }
 
+/// Information for `CoerceUnsized` impls, storing information we
+/// have computed about the coercion.
+///
+/// This struct can be obtained via the `coerce_impl_info` query.
+/// Demanding this struct also has the side-effect of reporting errors
+/// for inappropriate impls.
+#[derive(Clone, Copy, RustcEncodable, RustcDecodable, Debug)]
+pub struct CoerceUnsizedInfo {
+    /// If this is a "custom coerce" impl, then what kind of custom
+    /// coercion is it? This applies to impls of `CoerceUnsized` for
+    /// structs, primarily, where we store a bit of info about which
+    /// fields need to be coerced.
+    pub custom_kind: Option<CustomCoerceUnsized>
+}
+
 #[derive(Clone, Copy, RustcEncodable, RustcDecodable, Debug)]
 pub enum CustomCoerceUnsized {
     /// Records the index of the field being coerced.
index 5543223105b444c37f5d8e512588661627c094a1..da56514ea82fbf9e485c3f1f761ff9253f9b54cc 100644 (file)
@@ -15,7 +15,7 @@
 use lint;
 use middle;
 use hir::TraitMap;
-use hir::def::Def;
+use hir::def::{Def, ExportMap};
 use hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
 use hir::map as hir_map;
 use hir::map::DisambiguatedDefPathData;
@@ -416,6 +416,9 @@ pub struct GlobalCtxt<'tcx> {
     /// is relevant; generated by resolve.
     pub trait_map: TraitMap,
 
+    /// Export map produced by name resolution.
+    pub export_map: ExportMap,
+
     pub named_region_map: resolve_lifetime::NamedRegionMap,
 
     pub region_maps: RegionMaps,
@@ -698,6 +701,7 @@ pub fn create_and_enter<F, R>(s: &'tcx Session,
             region_maps: region_maps,
             variance_computed: Cell::new(false),
             trait_map: resolutions.trait_map,
+            export_map: resolutions.export_map,
             fulfilled_predicates: RefCell::new(fulfilled_predicates),
             hir: hir,
             maps: maps::Maps::new(dep_graph, providers),
index 874e032bc46445490ffd30dd05b2fdf78e99dd42..38699105290551e97d7a344ca5d1ff75db8cf532 100644 (file)
@@ -202,7 +202,8 @@ fn push_impl_path<T>(self,
         } else {
             // for local crates, check whether type info is
             // available; typeck might not have completed yet
-            self.maps.impl_trait_ref.borrow().contains_key(&impl_def_id)
+            self.maps.impl_trait_ref.borrow().contains_key(&impl_def_id) &&
+                self.maps.ty.borrow().contains_key(&impl_def_id)
         };
 
         if !use_types {
index ac8c38c7d585664d05d58efd4832d59e7a61e38e..5a57b0072107c666e6c504f2dc39ab37718513a7 100644 (file)
@@ -11,8 +11,9 @@
 use dep_graph::{DepGraph, DepNode, DepTrackingMap, DepTrackingMapConfig};
 use hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
 use middle::const_val::ConstVal;
+use middle::privacy::AccessLevels;
 use mir;
-use ty::{self, Ty, TyCtxt};
+use ty::{self, CrateInherentImpls, Ty, TyCtxt};
 
 use rustc_data_structures::indexed_vec::IndexVec;
 use std::cell::{RefCell, RefMut};
@@ -176,9 +177,15 @@ fn describe(tcx: TyCtxt, (_, def_id): (CrateNum, DefId)) -> String {
     }
 }
 
-impl<'tcx> QueryDescription for queries::coherent_inherent_impls<'tcx> {
+impl<'tcx> QueryDescription for queries::crate_inherent_impls<'tcx> {
+    fn describe(_: TyCtxt, k: CrateNum) -> String {
+        format!("all inherent impls defined in crate `{:?}`", k)
+    }
+}
+
+impl<'tcx> QueryDescription for queries::crate_inherent_impls_overlap_check<'tcx> {
     fn describe(_: TyCtxt, _: CrateNum) -> String {
-        format!("coherence checking all inherent impls")
+        format!("check for overlap between inherent impls defined in this crate")
     }
 }
 
@@ -189,6 +196,12 @@ fn describe(tcx: TyCtxt, def: ty::InstanceDef<'tcx>) -> String {
     }
 }
 
+impl<'tcx> QueryDescription for queries::privacy_access_levels<'tcx> {
+    fn describe(_: TyCtxt, _: CrateNum) -> String {
+        format!("privacy access levels")
+    }
+}
+
 macro_rules! define_maps {
     (<$tcx:tt>
      $($(#[$attr:meta])*
@@ -368,7 +381,7 @@ fn default() -> Self {
     /// Maps a DefId of a type to a list of its inherent impls.
     /// Contains implementations of methods that are inherent to a type.
     /// Methods in these implementations don't need to be exported.
-    pub inherent_impls: InherentImpls(DefId) -> Vec<DefId>,
+    pub inherent_impls: InherentImpls(DefId) -> Rc<Vec<DefId>>,
 
     /// Maps from the def-id of a function/method or const/static
     /// to its MIR. Mutation is done at an item granularity to
@@ -393,19 +406,30 @@ fn default() -> Self {
     pub closure_type: ItemSignature(DefId) -> ty::PolyFnSig<'tcx>,
 
     /// Caches CoerceUnsized kinds for impls on custom types.
-    pub custom_coerce_unsized_kind: ItemSignature(DefId)
-        -> ty::adjustment::CustomCoerceUnsized,
+    pub coerce_unsized_info: ItemSignature(DefId)
+        -> ty::adjustment::CoerceUnsizedInfo,
 
     pub typeck_tables: TypeckTables(DefId) -> &'tcx ty::TypeckTables<'tcx>,
 
     pub coherent_trait: coherent_trait_dep_node((CrateNum, DefId)) -> (),
 
-    pub coherent_inherent_impls: coherent_inherent_impls_dep_node(CrateNum) -> (),
+    /// Gets a complete map from all types to their inherent impls.
+    /// Not meant to be used directly outside of coherence.
+    /// (Defined only for LOCAL_CRATE)
+    pub crate_inherent_impls: crate_inherent_impls_dep_node(CrateNum) -> CrateInherentImpls,
+
+    /// Checks all types in the krate for overlap in their inherent impls. Reports errors.
+    /// Not meant to be used directly outside of coherence.
+    /// (Defined only for LOCAL_CRATE)
+    pub crate_inherent_impls_overlap_check: crate_inherent_impls_dep_node(CrateNum) -> (),
 
     /// Results of evaluating monomorphic constants embedded in
     /// other items, such as enum variant explicit discriminants.
     pub monomorphic_const_eval: MonomorphicConstEval(DefId) -> Result<ConstVal<'tcx>, ()>,
 
+    /// Performs the privacy check and computes "access levels".
+    pub privacy_access_levels: PrivacyAccessLevels(CrateNum) -> Rc<AccessLevels>,
+
     pub mir_shims: mir_shim(ty::InstanceDef<'tcx>) -> &'tcx RefCell<mir::Mir<'tcx>>
 }
 
@@ -413,7 +437,7 @@ fn coherent_trait_dep_node((_, def_id): (CrateNum, DefId)) -> DepNode<DefId> {
     DepNode::CoherenceCheckTrait(def_id)
 }
 
-fn coherent_inherent_impls_dep_node(_: CrateNum) -> DepNode<DefId> {
+fn crate_inherent_impls_dep_node(_: CrateNum) -> DepNode<DefId> {
     DepNode::Coherence
 }
 
index 36d1ae74e9114546b5a5f4fa6164bd8e9d417e78..6a4e7db21dd127d2d20bf57cb32a2517e4b1d0d1 100644 (file)
 
 use dep_graph::{self, DepNode};
 use hir::{map as hir_map, FreevarMap, TraitMap};
-use middle;
 use hir::def::{Def, CtorKind, ExportMap};
 use hir::def_id::{CrateNum, DefId, DefIndex, CRATE_DEF_INDEX, LOCAL_CRATE};
 use middle::const_val::ConstVal;
 use middle::lang_items::{FnTraitLangItem, FnMutTraitLangItem, FnOnceTraitLangItem};
+use middle::privacy::AccessLevels;
 use middle::region::{CodeExtent, ROOT_CODE_EXTENT};
 use middle::resolve_lifetime::ObjectLifetimeDefault;
 use mir::Mir;
@@ -31,7 +31,7 @@
 use ty::util::IntTypeExt;
 use ty::walk::TypeWalker;
 use util::common::MemoizationMap;
-use util::nodemap::{NodeSet, FxHashMap};
+use util::nodemap::{NodeSet, DefIdMap, FxHashMap};
 
 use serialize::{self, Encodable, Encoder};
 use std::borrow::Cow;
 
 /// The complete set of all analyses described in this module. This is
 /// produced by the driver and fed to trans and later passes.
+///
+/// NB: These contents are being migrated into queries using the
+/// *on-demand* infrastructure.
 #[derive(Clone)]
 pub struct CrateAnalysis {
-    pub export_map: ExportMap,
-    pub access_levels: middle::privacy::AccessLevels,
+    pub access_levels: Rc<AccessLevels>,
     pub reachable: NodeSet,
     pub name: String,
     pub glob_map: Option<hir::GlobMap>,
@@ -122,6 +124,7 @@ pub struct Resolutions {
     pub freevars: FreevarMap,
     pub trait_map: TraitMap,
     pub maybe_unused_trait_imports: NodeSet,
+    pub export_map: ExportMap,
 }
 
 #[derive(Clone, Copy, PartialEq, Eq, Debug)]
@@ -2054,8 +2057,8 @@ pub fn trait_relevant_for_never(self, did: DefId) -> bool {
         })
     }
 
-    pub fn custom_coerce_unsized_kind(self, did: DefId) -> adjustment::CustomCoerceUnsized {
-        queries::custom_coerce_unsized_kind::get(self, DUMMY_SP, did)
+    pub fn coerce_unsized_info(self, did: DefId) -> adjustment::CoerceUnsizedInfo {
+        queries::coerce_unsized_info::get(self, DUMMY_SP, did)
     }
 
     pub fn associated_item(self, def_id: DefId) -> AssociatedItem {
@@ -2345,34 +2348,6 @@ pub fn trait_has_default_impl(self, trait_def_id: DefId) -> bool {
         def.flags.get().intersects(TraitFlags::HAS_DEFAULT_IMPL)
     }
 
-    /// Populates the type context with all the inherent implementations for
-    /// the given type if necessary.
-    pub fn populate_inherent_implementations_for_type_if_necessary(self,
-                                                                   span: Span,
-                                                                   type_id: DefId) {
-        if type_id.is_local() {
-            // Make sure coherence of inherent impls ran already.
-            ty::queries::coherent_inherent_impls::force(self, span, LOCAL_CRATE);
-            return
-        }
-
-        // The type is not local, hence we are reading this out of
-        // metadata and don't need to track edges.
-        let _ignore = self.dep_graph.in_ignore();
-
-        if self.populated_external_types.borrow().contains(&type_id) {
-            return
-        }
-
-        debug!("populate_inherent_implementations_for_type_if_necessary: searching for {:?}",
-               type_id);
-
-        let inherent_impls = self.sess.cstore.inherent_implementations_for_type(type_id);
-
-        self.maps.inherent_impls.borrow_mut().insert(type_id, inherent_impls);
-        self.populated_external_types.borrow_mut().insert(type_id);
-    }
-
     /// Populates the type context with all the implementations for the given
     /// trait if necessary.
     pub fn populate_implementations_for_trait_if_necessary(self, trait_id: DefId) {
@@ -2637,3 +2612,16 @@ pub fn provide(providers: &mut ty::maps::Providers) {
         ..*providers
     };
 }
+
+
+/// A map for the local crate mapping each type to a vector of its
+/// inherent impls. This is not meant to be used outside of coherence;
+/// rather, you should request the vector for a specific type via
+/// `ty::queries::inherent_impls::get(def_id)` so as to minimize your
+/// dependencies (constructing this map requires touching the entire
+/// crate).
+#[derive(Clone, Debug)]
+pub struct CrateInherentImpls {
+    pub inherent_impls: DefIdMap<Rc<Vec<DefId>>>,
+}
+
index 85e861b405a9f8620b9480902e0208ce33666abd..730abc54568e1364e12747db4329b8caea0acd1d 100644 (file)
@@ -11,7 +11,7 @@ crate-type = ["dylib"]
 [dependencies]
 syntax = { path = "../libsyntax" }
 serialize = { path = "../libserialize" }
-log = { path = "../liblog" }
+log = "0.3"
 
 [features]
 jemalloc = []
index d53318f1768480674f384b513b06e281fda7ff82..af99c0e938724d95023ff8c3831fdbb6e3d44670 100644 (file)
@@ -10,7 +10,7 @@ crate-type = ["dylib"]
 test = false
 
 [dependencies]
-log = { path = "../liblog" }
+log = "0.3"
 syntax = { path = "../libsyntax" }
 syntax_pos = { path = "../libsyntax_pos" }
 graphviz = { path = "../libgraphviz" }
index c0f681680a967b0350e27146d28486ae89a6c48b..b728d4d534516672e436312a336f40f627017943 100644 (file)
@@ -267,11 +267,11 @@ pub fn fixup_fragment_sets<'a, 'tcx>(this: &MoveData<'tcx>, tcx: TyCtxt<'a, 'tcx
     // First, filter out duplicates
     moved.sort();
     moved.dedup();
-    debug!("fragments 1 moved: {:?}", path_lps(&moved[..]));
+    debug!("fragments 1 moved: {:?}", path_lps(&moved));
 
     assigned.sort();
     assigned.dedup();
-    debug!("fragments 1 assigned: {:?}", path_lps(&assigned[..]));
+    debug!("fragments 1 assigned: {:?}", path_lps(&assigned));
 
     // Second, build parents from the moved and assigned.
     for m in &moved {
@@ -291,14 +291,14 @@ pub fn fixup_fragment_sets<'a, 'tcx>(this: &MoveData<'tcx>, tcx: TyCtxt<'a, 'tcx
 
     parents.sort();
     parents.dedup();
-    debug!("fragments 2 parents: {:?}", path_lps(&parents[..]));
+    debug!("fragments 2 parents: {:?}", path_lps(&parents));
 
     // Third, filter the moved and assigned fragments down to just the non-parents
-    moved.retain(|f| non_member(*f, &parents[..]));
-    debug!("fragments 3 moved: {:?}", path_lps(&moved[..]));
+    moved.retain(|f| non_member(*f, &parents));
+    debug!("fragments 3 moved: {:?}", path_lps(&moved));
 
-    assigned.retain(|f| non_member(*f, &parents[..]));
-    debug!("fragments 3 assigned: {:?}", path_lps(&assigned[..]));
+    assigned.retain(|f| non_member(*f, &parents));
+    debug!("fragments 3 assigned: {:?}", path_lps(&assigned));
 
     // Fourth, build the leftover from the moved, assigned, and parents.
     for m in &moved {
@@ -316,16 +316,16 @@ pub fn fixup_fragment_sets<'a, 'tcx>(this: &MoveData<'tcx>, tcx: TyCtxt<'a, 'tcx
 
     unmoved.sort();
     unmoved.dedup();
-    debug!("fragments 4 unmoved: {:?}", frag_lps(&unmoved[..]));
+    debug!("fragments 4 unmoved: {:?}", frag_lps(&unmoved));
 
     // Fifth, filter the leftover fragments down to its core.
     unmoved.retain(|f| match *f {
         AllButOneFrom(_) => true,
-        Just(mpi) => non_member(mpi, &parents[..]) &&
-            non_member(mpi, &moved[..]) &&
-            non_member(mpi, &assigned[..])
+        Just(mpi) => non_member(mpi, &parents) &&
+            non_member(mpi, &moved) &&
+            non_member(mpi, &assigned)
     });
-    debug!("fragments 5 unmoved: {:?}", frag_lps(&unmoved[..]));
+    debug!("fragments 5 unmoved: {:?}", frag_lps(&unmoved));
 
     // Swap contents back in.
     fragments.unmoved_fragments = unmoved;
index 20d495976b05f47a8c1ece2b23566775f8fee3bd..59c3e68aadab2b4f2b9182f76334af3b30e92c19 100644 (file)
@@ -112,7 +112,7 @@ fn borrowck_fn<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, body_id: hir::BodyId) {
                                                  &flowed_moves.move_data,
                                                  owner_id);
 
-    check_loans::check_loans(bccx, &loan_dfcx, &flowed_moves, &all_loans[..], body);
+    check_loans::check_loans(bccx, &loan_dfcx, &flowed_moves, &all_loans, body);
 }
 
 fn build_borrowck_dataflow_data<'a, 'tcx>(this: &mut BorrowckCtxt<'a, 'tcx>,
index 0da9525efd8566e53520961e8b4addd8e9231a94..e3a2bfa392738586fe06cd374082d3222c6bf50d 100644 (file)
@@ -88,7 +88,7 @@ fn build_set<O:DataFlowOperator, F>(&self,
                 set.push_str(", ");
             }
             let loan_str = self.borrowck_ctxt.loan_path_to_string(&lp);
-            set.push_str(&loan_str[..]);
+            set.push_str(&loan_str);
             saw_some = true;
             true
         });
index 780b2c16a32ecefc5bd556d2a88b6409b9246554..907410f74dca4f268697e32d036a606894924996 100644 (file)
@@ -10,7 +10,7 @@ crate-type = ["dylib"]
 
 [dependencies]
 arena = { path = "../libarena" }
-log = { path = "../liblog" }
+log = "0.3"
 rustc = { path = "../librustc" }
 rustc_back = { path = "../librustc_back" }
 rustc_const_math = { path = "../librustc_const_math" }
index 53a7e87292818d773a7af3dfcd859d0b8467adcd..c1dc5f5f7a2b8d10e0f5a99565480dfab83f9993 100644 (file)
@@ -680,10 +680,10 @@ fn is_useful_specialized<'p, 'a:'p, 'tcx: 'a>(
     }).collect();
     let wild_patterns: Vec<_> = wild_patterns_owned.iter().collect();
     let matrix = Matrix(m.iter().flat_map(|r| {
-        specialize(cx, &r[..], &ctor, &wild_patterns)
+        specialize(cx, &r, &ctor, &wild_patterns)
     }).collect());
     match specialize(cx, v, &ctor, &wild_patterns) {
-        Some(v) => match is_useful(cx, &matrix, &v[..], witness) {
+        Some(v) => match is_useful(cx, &matrix, &v, witness) {
             UsefulWithWitness(witnesses) => UsefulWithWitness(
                 witnesses.into_iter()
                     .map(|witness| witness.apply_constructor(cx, &ctor, lty))
index e2b9f174ff0c200a6e174347d0e85cf5c7239fff..9d55281d019d9ee579a4d755e47b6303d674ec04 100644 (file)
@@ -311,7 +311,7 @@ fn check_arms<'a, 'tcx>(cx: &mut MatchCheckCtxt<'a, 'tcx>,
         for &(pat, hir_pat) in pats {
             let v = vec![pat];
 
-            match is_useful(cx, &seen, &v[..], LeaveOutWitness) {
+            match is_useful(cx, &seen, &v, LeaveOutWitness) {
                 NotUseful => {
                     match source {
                         hir::MatchSource::IfLetDesugar { .. } => {
index e2e16059d98718c534335bf277927d4cb35c7853..343b1ed68b80431cb3e734dff8adc4764073cd87 100644 (file)
@@ -9,5 +9,5 @@ path = "lib.rs"
 crate-type = ["dylib"]
 
 [dependencies]
-log = { path = "../liblog" }
+log = "0.3"
 serialize = { path = "../libserialize" }
index d4bd9e707fdcb9780a618a6ba30151061b479109..c03c2890ba34c6cb4723c3eb00ed514a8bf2e5a5 100644 (file)
@@ -91,8 +91,8 @@ impl<A: Array> Deref for AccumulateVec<A> {
     type Target = [A::Element];
     fn deref(&self) -> &Self::Target {
         match *self {
-            AccumulateVec::Array(ref v) => &v[..],
-            AccumulateVec::Heap(ref v) => &v[..],
+            AccumulateVec::Array(ref v) => v,
+            AccumulateVec::Heap(ref v) => v,
         }
     }
 }
@@ -100,8 +100,8 @@ fn deref(&self) -> &Self::Target {
 impl<A: Array> DerefMut for AccumulateVec<A> {
     fn deref_mut(&mut self) -> &mut [A::Element] {
         match *self {
-            AccumulateVec::Array(ref mut v) => &mut v[..],
-            AccumulateVec::Heap(ref mut v) => &mut v[..],
+            AccumulateVec::Array(ref mut v) => v,
+            AccumulateVec::Heap(ref mut v) => v,
         }
     }
 }
index 4359581a897f57153df7e6179f8bb9f4ba553548..cf54229fa7f52e59130cbd39c43ee7656c90d1f4 100644 (file)
@@ -48,7 +48,7 @@ pub fn encode(n: u64, base: u64) -> String {
 #[test]
 fn test_encode() {
     fn test(n: u64, base: u64) {
-        assert_eq!(Ok(n), u64::from_str_radix(&encode(n, base)[..], base as u32));
+        assert_eq!(Ok(n), u64::from_str_radix(&encode(n, base), base as u32));
     }
 
     for base in 2..37 {
index 31492e2621945dd292d556f556fd61d4a3d36fb6..9d97a83f693c32f7cb528def5b49ff3ac47f1178 100644 (file)
@@ -35,7 +35,7 @@ pub struct Blake2bCtx {
 impl ::std::fmt::Debug for Blake2bCtx {
     fn fmt(&self, fmt: &mut ::std::fmt::Formatter) -> Result<(), ::std::fmt::Error> {
         try!(write!(fmt, "hash: "));
-        for v in &self.h[..] {
+        for v in &self.h {
             try!(write!(fmt, "{:x}", v));
         }
         Ok(())
index 2e9e054e97eafe7dd0172d5f116710d65f1c7669..572ce98d3ae8e30a0f19a8b9a69b887c9873eb3e 100644 (file)
@@ -91,13 +91,13 @@ unsafe fn from_slice_mut(s: &mut [Word]) -> &mut Self {
 impl<T: Idx> Deref for IdxSetBuf<T> {
     type Target = IdxSet<T>;
     fn deref(&self) -> &IdxSet<T> {
-        unsafe { IdxSet::from_slice(&self.bits[..]) }
+        unsafe { IdxSet::from_slice(&self.bits) }
     }
 }
 
 impl<T: Idx> DerefMut for IdxSetBuf<T> {
     fn deref_mut(&mut self) -> &mut IdxSet<T> {
-        unsafe { IdxSet::from_slice_mut(&mut self.bits[..]) }
+        unsafe { IdxSet::from_slice_mut(&mut self.bits) }
     }
 }
 
@@ -135,11 +135,11 @@ pub fn contains(&self, elem: &T) -> bool {
     }
 
     pub fn words(&self) -> &[Word] {
-        &self.bits[..]
+        &self.bits
     }
 
     pub fn words_mut(&mut self) -> &mut [Word] {
-        &mut self.bits[..]
+        &mut self.bits
     }
 
     pub fn clone_from(&mut self, other: &IdxSet<T>) {
index caa5c8b7e00581562638ea76af04f39571305455..5b5113caa8e8c0dd9d68e7e8078b71991efa107e 100644 (file)
@@ -11,7 +11,8 @@ crate-type = ["dylib"]
 [dependencies]
 arena = { path = "../libarena" }
 graphviz = { path = "../libgraphviz" }
-log = { path = "../liblog" }
+log = { version = "0.3", features = ["release_max_level_info"] }
+env_logger = { version = "0.4", default-features = false }
 proc_macro_plugin = { path = "../libproc_macro_plugin" }
 rustc = { path = "../librustc" }
 rustc_back = { path = "../librustc_back" }
index d37553d7d660ea8b3374df940bc229ea9b2137b7..4873b21c548740f98480346794ea5abc76309c64 100644 (file)
@@ -48,6 +48,7 @@
 use std::io::{self, Write};
 use std::iter;
 use std::path::{Path, PathBuf};
+use std::rc::Rc;
 use syntax::{ast, diagnostics, visit};
 use syntax::attr;
 use syntax::ext::base::ExtCtxt;
@@ -198,13 +199,13 @@ macro_rules! controller_entry_point {
 
             result?;
 
-            if log_enabled!(::log::INFO) {
+            if log_enabled!(::log::LogLevel::Info) {
                 println!("Pre-trans");
                 tcx.print_debug_stats();
             }
             let trans = phase_4_translate_to_llvm(tcx, analysis, &incremental_hashes_map);
 
-            if log_enabled!(::log::INFO) {
+            if log_enabled!(::log::LogLevel::Info) {
                 println!("Post-trans");
                 tcx.print_debug_stats();
             }
@@ -257,10 +258,7 @@ fn keep_hygiene_data(sess: &Session) -> bool {
 }
 
 fn keep_ast(sess: &Session) -> bool {
-    sess.opts.debugging_opts.keep_ast ||
-    sess.opts.debugging_opts.save_analysis ||
-    sess.opts.debugging_opts.save_analysis_csv ||
-    sess.opts.debugging_opts.save_analysis_api
+    sess.opts.debugging_opts.keep_ast || ::save_analysis(sess)
 }
 
 /// The name used for source code that doesn't originate in a file
@@ -807,18 +805,18 @@ pub fn phase_2_configure_and_expand<F>(sess: &Session,
         expanded_crate: krate,
         defs: resolver.definitions,
         analysis: ty::CrateAnalysis {
-            export_map: resolver.export_map,
-            access_levels: AccessLevels::default(),
+            access_levels: Rc::new(AccessLevels::default()),
             reachable: NodeSet(),
             name: crate_name.to_string(),
             glob_map: if resolver.make_glob_map { Some(resolver.glob_map) } else { None },
         },
         resolutions: Resolutions {
             freevars: resolver.freevars,
+            export_map: resolver.export_map,
             trait_map: resolver.trait_map,
             maybe_unused_trait_imports: resolver.maybe_unused_trait_imports,
         },
-        hir_forest: hir_forest
+        hir_forest: hir_forest,
     })
 }
 
@@ -888,6 +886,7 @@ macro_rules! try_with_f {
 
     let mut local_providers = ty::maps::Providers::default();
     mir::provide(&mut local_providers);
+    rustc_privacy::provide(&mut local_providers);
     typeck::provide(&mut local_providers);
     ty::provide(&mut local_providers);
 
@@ -931,9 +930,7 @@ macro_rules! try_with_f {
              || consts::check_crate(tcx));
 
         analysis.access_levels =
-            time(time_passes, "privacy checking", || {
-                rustc_privacy::check_crate(tcx, &analysis.export_map)
-            });
+            time(time_passes, "privacy checking", || rustc_privacy::check_crate(tcx));
 
         time(time_passes,
              "intrinsic checking",
@@ -1000,19 +997,15 @@ macro_rules! try_with_f {
         analysis.reachable =
             time(time_passes,
                  "reachability checking",
-                 || reachable::find_reachable(tcx, &analysis.access_levels));
+                 || reachable::find_reachable(tcx));
 
-        time(time_passes, "death checking", || {
-            middle::dead::check_crate(tcx, &analysis.access_levels);
-        });
+        time(time_passes, "death checking", || middle::dead::check_crate(tcx));
 
         time(time_passes, "unused lib feature checking", || {
-            stability::check_unused_or_stable_features(tcx, &analysis.access_levels)
+            stability::check_unused_or_stable_features(tcx)
         });
 
-        time(time_passes,
-             "lint checking",
-             || lint::check_crate(tcx, &analysis.access_levels));
+        time(time_passes, "lint checking", || lint::check_crate(tcx));
 
         // The above three passes generate errors w/o aborting
         if sess.err_count() > 0 {
index 62d75126557284b806dcdf173c991fedf7c14bbb..c90dde3a5f6e0232694ae37dec7e11e70c91b446 100644 (file)
@@ -35,6 +35,7 @@
 extern crate arena;
 extern crate getopts;
 extern crate graphviz;
+extern crate env_logger;
 extern crate libc;
 extern crate rustc;
 extern crate rustc_back;
@@ -66,6 +67,7 @@
 
 use rustc_resolve as resolve;
 use rustc_save_analysis as save;
+use rustc_save_analysis::DumpHandler;
 use rustc_trans::back::link;
 use rustc_trans::back::write::{create_target_machine, RELOC_MODEL_ARGS, CODE_GEN_MODEL_ARGS};
 use rustc::dep_graph::DepGraph;
@@ -232,7 +234,7 @@ fn make_output(matches: &getopts::Matches) -> (Option<PathBuf>, Option<PathBuf>)
 // Extract input (string or file and optional path) from matches.
 fn make_input(free_matches: &[String]) -> Option<(Input, Option<PathBuf>)> {
     if free_matches.len() == 1 {
-        let ifile = &free_matches[0][..];
+        let ifile = &free_matches[0];
         if ifile == "-" {
             let mut src = String::new();
             io::stdin().read_to_string(&mut src).unwrap();
@@ -506,8 +508,9 @@ fn build_controller(&mut self,
                                         state.expanded_crate.unwrap(),
                                         state.analysis.unwrap(),
                                         state.crate_name.unwrap(),
-                                        state.out_dir,
-                                        save_analysis_format(state.session))
+                                        DumpHandler::new(save_analysis_format(state.session),
+                                                         state.out_dir,
+                                                         state.crate_name.unwrap()))
                 });
             };
             control.after_analysis.run_callback_on_error = true;
@@ -799,7 +802,7 @@ fn sort_lint_groups(lints: Vec<(&'static str, Vec<lint::LintId>, bool)>)
         for lint in lints {
             let name = lint.name_lower().replace("_", "-");
             println!("    {}  {:7.7}  {}",
-                     padded(&name[..]),
+                     padded(&name),
                      lint.default_level.as_str(),
                      lint.desc);
         }
@@ -837,7 +840,7 @@ fn sort_lint_groups(lints: Vec<(&'static str, Vec<lint::LintId>, bool)>)
                          .map(|x| x.to_string().replace("_", "-"))
                          .collect::<Vec<String>>()
                          .join(", ");
-            println!("    {}  {}", padded(&name[..]), desc);
+            println!("    {}  {}", padded(&name), desc);
         }
         println!("\n");
     };
@@ -944,7 +947,7 @@ pub fn handle_options(args: &[String]) -> Option<getopts::Matches> {
                                                  .into_iter()
                                                  .map(|x| x.opt_group)
                                                  .collect();
-    let matches = match getopts::getopts(&args[..], &all_groups) {
+    let matches = match getopts::getopts(&args, &all_groups) {
         Ok(m) => m,
         Err(f) => early_error(ErrorOutputType::default(), &f.to_string()),
     };
@@ -1083,7 +1086,7 @@ fn flush(&mut self) -> io::Result<()> {
                       format!("we would appreciate a bug report: {}", BUG_REPORT_URL)];
             for note in &xs {
                 handler.emit(&MultiSpan::new(),
-                             &note[..],
+                             &note,
                              errors::Level::Note);
             }
             if match env::var_os("RUST_BACKTRACE") {
@@ -1127,6 +1130,7 @@ pub fn diagnostics_registry() -> errors::registry::Registry {
 }
 
 pub fn main() {
+    env_logger::init().unwrap();
     let result = run(|| run_compiler(&env::args().collect::<Vec<_>>(),
                                      &mut RustcDefaultCalls,
                                      None,
index 6cd97e9559885048ac4f6905ecc257b5bf45e545..18dc504ca8aa913568bbf5fd48fae2c8ff5728db 100644 (file)
@@ -589,7 +589,7 @@ fn all_matching_node_ids<'a, 'hir>(&'a self,
                                        -> NodesMatchingUII<'a, 'hir> {
         match *self {
             ItemViaNode(node_id) => NodesMatchingDirect(Some(node_id).into_iter()),
-            ItemViaPath(ref parts) => NodesMatchingSuffix(map.nodes_matching_suffix(&parts[..])),
+            ItemViaPath(ref parts) => NodesMatchingSuffix(map.nodes_matching_suffix(&parts)),
         }
     }
 
@@ -600,7 +600,7 @@ fn to_one_node_id(self, user_option: &str, sess: &Session, map: &hir_map::Map) -
                                   user_option,
                                   self.reconstructed_input(),
                                   is_wrong_because);
-            sess.fatal(&message[..])
+            sess.fatal(&message)
         };
 
         let mut saw_node = ast::DUMMY_NODE_ID;
@@ -771,7 +771,7 @@ fn print_flowgraph<'a, 'tcx, W: Write>(variants: Vec<borrowck_dot::Variant>,
     fn expand_err_details(r: io::Result<()>) -> io::Result<()> {
         r.map_err(|ioerr| {
             io::Error::new(io::ErrorKind::Other,
-                           &format!("graphviz::render failed: {}", ioerr)[..])
+                           format!("graphviz::render failed: {}", ioerr))
         })
     }
 }
index 9568cc3d6de0e35c8cc00fce917dc91b73e344c0..af2416f787ea4669e693fa0162bca977c6ac7ece 100644 (file)
@@ -289,7 +289,7 @@ pub fn t_pair(&self, ty1: Ty<'tcx>, ty2: Ty<'tcx>) -> Ty<'tcx> {
 
     pub fn t_param(&self, index: u32) -> Ty<'tcx> {
         let name = format!("T{}", index);
-        self.infcx.tcx.mk_param(index, Symbol::intern(&name[..]))
+        self.infcx.tcx.mk_param(index, Symbol::intern(&name))
     }
 
     pub fn re_early_bound(&self, index: u32, name: &'static str) -> &'tcx ty::Region {
index e3ee7527545042211481ecf9d36f6e228c23f0bc..7bf2efa4b885f8eef332581957e4a84f795f75c8 100644 (file)
@@ -13,6 +13,6 @@ graphviz = { path = "../libgraphviz" }
 rustc = { path = "../librustc" }
 rustc_data_structures = { path = "../librustc_data_structures" }
 serialize = { path = "../libserialize" }
-log = { path = "../liblog" }
+log = "0.3"
 syntax = { path = "../libsyntax" }
 syntax_pos = { path = "../libsyntax_pos" }
index b67caa6750a81b59de426b5923fb98199345a6e7..5c20f65274f54ffbcd86a5323b3bb704dbe328fb 100644 (file)
@@ -99,9 +99,9 @@ pub fn read_file(sess: &Session, path: &Path) -> io::Result<Option<Vec<u8>>> {
         let rustc_version_str_len = rustc_version_str_len[0] as usize;
         let mut buffer = Vec::with_capacity(rustc_version_str_len);
         buffer.resize(rustc_version_str_len, 0);
-        file.read_exact(&mut buffer[..])?;
+        file.read_exact(&mut buffer)?;
 
-        if &buffer[..] != rustc_version().as_bytes() {
+        if buffer != rustc_version().as_bytes() {
             report_format_mismatch(sess, path, "Different compiler version");
             return Ok(None);
         }
index 4d5c0d7ba0ae1130f253a8e05113c9078ed202e3..c3c5461ff7c506a612d45564e01aa6b8a8a41938 100644 (file)
@@ -10,7 +10,7 @@ crate-type = ["dylib"]
 test = false
 
 [dependencies]
-log = { path = "../liblog" }
+log = "0.3"
 rustc = { path = "../librustc" }
 rustc_back = { path = "../librustc_back" }
 rustc_const_eval = { path = "../librustc_const_eval" }
index 353b86820c405e3811d9f438cc5a55cd797fdeea..c4220e9a0d3dc1420022e6a7b5b2a3c1ff9c8c98 100644 (file)
@@ -88,7 +88,7 @@ fn to_camel_case(s: &str) -> String {
             } else {
                 format!("{} `{}` should have a camel case name such as `{}`", sort, name, c)
             };
-            cx.span_lint(NON_CAMEL_CASE_TYPES, span, &m[..]);
+            cx.span_lint(NON_CAMEL_CASE_TYPES, span, &m);
         }
     }
 }
index f0276f90f274d6d99ffa2f37b88512c1481a2b37..0ee9d4a42c7f81a26c15efdb0a37ab3eb5af6b45 100644 (file)
@@ -334,7 +334,7 @@ fn enter_lint_attrs(&mut self, _: &LateContext, attrs: &[ast::Attribute]) {
             attr.check_name("doc") &&
             match attr.meta_item_list() {
                 None => false,
-                Some(l) => attr::list_contains_name(&l[..], "hidden"),
+                Some(l) => attr::list_contains_name(&l, "hidden"),
             }
         });
         self.doc_hidden_stack.push(doc_hidden);
index abba8afd9da8699f33f5a4691fd4ede7a5477619..86bf209ccf8c8d92c5c841f684f356f869293b20 100644 (file)
@@ -146,7 +146,7 @@ fn check_stmt(&mut self, cx: &LateContext, s: &hir::Stmt) {
             ty::TyBool => return,
             ty::TyAdt(def, _) => {
                 let attrs = cx.tcx.get_attrs(def.did);
-                check_must_use(cx, &attrs[..], s.span)
+                check_must_use(cx, &attrs, s.span)
             }
             _ => false,
         };
index 42717ec289c34d019bf2b6f1a462b214f39fab59..2b945e0a3afaff2f8e7d8ffe10335be2e719bbea 100644 (file)
@@ -140,7 +140,7 @@ fn main() {
         cfg.flag(flag);
     }
 
-    for component in &components[..] {
+    for component in &components {
         let mut flag = String::from("-DLLVM_COMPONENT_");
         flag.push_str(&component.to_uppercase());
         cfg.flag(&flag);
@@ -173,7 +173,7 @@ fn main() {
     if !is_crossed {
         cmd.arg("--system-libs");
     }
-    cmd.args(&components[..]);
+    cmd.args(&components);
 
     for lib in output(&mut cmd).split_whitespace() {
         let name = if lib.starts_with("-l") {
index 6f7f03ca216b9a3c83543c1448358e278f962285..e8b906092730e5cbee6f873daa302fd57eaf46a6 100644 (file)
@@ -10,7 +10,7 @@ crate-type = ["dylib"]
 
 [dependencies]
 flate = { path = "../libflate" }
-log = { path = "../liblog" }
+log = "0.3"
 proc_macro = { path = "../libproc_macro" }
 rustc = { path = "../librustc" }
 rustc_back = { path = "../librustc_back" }
index fcdb968dc06152ceca6c81d41140876336ab79f8..04a8b88f8a594a694537722cfda51677330f75f6 100644 (file)
@@ -669,7 +669,7 @@ pub fn find_plugin_registrar(&mut self, span: Span, name: &str)
                                   name,
                                   config::host_triple(),
                                   self.sess.opts.target_triple);
-            span_fatal!(self.sess, span, E0456, "{}", &message[..]);
+            span_fatal!(self.sess, span, E0456, "{}", &message);
         }
 
         let root = ekrate.metadata.get_root();
@@ -1058,7 +1058,7 @@ fn postprocess(&mut self, krate: &ast::Crate) {
         self.inject_allocator_crate();
         self.inject_panic_runtime(krate);
 
-        if log_enabled!(log::INFO) {
+        if log_enabled!(log::LogLevel::Info) {
             dump_crates(&self.cstore);
         }
 
index 2a67b79eaa52e6022ef699ee90fcbad76e91380c..3e9b6a6226ab507d7f7087251b31f75fa6bdf9ea 100644 (file)
@@ -88,9 +88,9 @@ pub fn provide<$lt>(providers: &mut Providers<$lt>) {
     }
     associated_item => { cdata.get_associated_item(def_id.index) }
     impl_trait_ref => { cdata.get_impl_trait(def_id.index, tcx) }
-    custom_coerce_unsized_kind => {
-        cdata.get_custom_coerce_unsized_kind(def_id.index).unwrap_or_else(|| {
-            bug!("custom_coerce_unsized_kind: `{:?}` is missing its kind", def_id);
+    coerce_unsized_info => {
+        cdata.get_coerce_unsized_info(def_id.index).unwrap_or_else(|| {
+            bug!("coerce_unsized_info: `{:?}` is missing its info", def_id);
         })
     }
     mir => {
@@ -109,6 +109,7 @@ pub fn provide<$lt>(providers: &mut Providers<$lt>) {
     typeck_tables => { cdata.item_body_tables(def_id.index, tcx) }
     closure_kind => { cdata.closure_kind(def_id.index) }
     closure_type => { cdata.closure_ty(def_id.index, tcx) }
+    inherent_impls => { Rc::new(cdata.get_inherent_implementations_for_type(def_id.index)) }
 }
 
 impl CrateStore for cstore::CStore {
@@ -162,12 +163,6 @@ fn fn_arg_names(&self, did: DefId) -> Vec<ast::Name>
         self.get_crate_data(did.krate).get_fn_arg_names(did.index)
     }
 
-    fn inherent_implementations_for_type(&self, def_id: DefId) -> Vec<DefId>
-    {
-        self.dep_graph.read(DepNode::MetaData(def_id));
-        self.get_crate_data(def_id.krate).get_inherent_implementations_for_type(def_id.index)
-    }
-
     fn implementations_of_trait(&self, filter: Option<DefId>) -> Vec<DefId>
     {
         if let Some(def_id) = filter {
@@ -496,12 +491,12 @@ fn extern_mod_stmt_cnum(&self, emod_id: ast::NodeId) -> Option<CrateNum>
         self.do_extern_mod_stmt_cnum(emod_id)
     }
 
-    fn encode_metadata<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                                 reexports: &def::ExportMap,
+    fn encode_metadata<'a, 'tcx>(&self,
+                                 tcx: TyCtxt<'a, 'tcx, 'tcx>,
                                  link_meta: &LinkMeta,
                                  reachable: &NodeSet) -> Vec<u8>
     {
-        encoder::encode_metadata(tcx, self, reexports, link_meta, reachable)
+        encoder::encode_metadata(tcx, self, link_meta, reachable)
     }
 
     fn metadata_encoding_version(&self) -> &[u8]
index 6ccdf8092f210491c856774bb92ea632ccaf91bc..3de1e3442c69ddb6c0ecf63f8a31df8062bbad8d 100644 (file)
@@ -643,10 +643,10 @@ pub fn get_impl_polarity(&self, id: DefIndex) -> hir::ImplPolarity {
         self.get_impl_data(id).polarity
     }
 
-    pub fn get_custom_coerce_unsized_kind(&self,
-                                          id: DefIndex)
-                                          -> Option<ty::adjustment::CustomCoerceUnsized> {
-        self.get_impl_data(id).coerce_unsized_kind
+    pub fn get_coerce_unsized_info(&self,
+                                   id: DefIndex)
+                                   -> Option<ty::adjustment::CoerceUnsizedInfo> {
+        self.get_impl_data(id).coerce_unsized_info
     }
 
     pub fn get_impl_trait(&self,
index 044ed529ef74cfa50c96f36683172d5a04999174..38d774992a55111b8d901b75a5d192730f76ba38 100644 (file)
@@ -13,7 +13,6 @@
 use schema::*;
 
 use rustc::middle::cstore::{LinkMeta, LinkagePreference, NativeLibrary};
-use rustc::hir::def;
 use rustc::hir::def_id::{CrateNum, CRATE_DEF_INDEX, DefIndex, DefId};
 use rustc::hir::map::definitions::DefPathTable;
 use rustc::middle::dependency_format::Linkage;
@@ -48,7 +47,6 @@
 pub struct EncodeContext<'a, 'tcx: 'a> {
     opaque: opaque::Encoder<'a>,
     pub tcx: TyCtxt<'a, 'tcx, 'tcx>,
-    reexports: &'a def::ExportMap,
     link_meta: &'a LinkMeta,
     cstore: &'a cstore::CStore,
     exported_symbols: &'a NodeSet,
@@ -306,7 +304,7 @@ fn encode_info_for_mod(&mut self,
         let def_id = tcx.hir.local_def_id(id);
 
         let data = ModData {
-            reexports: match self.reexports.get(&id) {
+            reexports: match tcx.export_map.get(&id) {
                 Some(exports) if *vis == hir::Public => self.lazy_seq_ref(exports),
                 _ => LazySeq::empty(),
             },
@@ -695,7 +693,7 @@ fn encode_info_for_item(&mut self, (def_id, item): (DefId, &'tcx hir::Item)) ->
                 let data = ImplData {
                     polarity: hir::ImplPolarity::Positive,
                     parent_impl: None,
-                    coerce_unsized_kind: None,
+                    coerce_unsized_info: None,
                     trait_ref: tcx.impl_trait_ref(def_id).map(|trait_ref| self.lazy(&trait_ref)),
                 };
 
@@ -715,13 +713,21 @@ fn encode_info_for_item(&mut self, (def_id, item): (DefId, &'tcx hir::Item)) ->
                     None
                 };
 
+                // if this is an impl of `CoerceUnsized`, create its
+                // "unsized info", else just store None
+                let coerce_unsized_info =
+                    trait_ref.and_then(|t| {
+                        if Some(t.def_id) == tcx.lang_items.coerce_unsized_trait() {
+                            Some(ty::queries::coerce_unsized_info::get(tcx, item.span, def_id))
+                        } else {
+                            None
+                        }
+                    });
+
                 let data = ImplData {
                     polarity: polarity,
                     parent_impl: parent,
-                    coerce_unsized_kind: tcx.maps.custom_coerce_unsized_kind
-                        .borrow()
-                        .get(&def_id)
-                        .cloned(),
+                    coerce_unsized_info: coerce_unsized_info,
                     trait_ref: trait_ref.map(|trait_ref| self.lazy(&trait_ref)),
                 };
 
@@ -920,14 +926,14 @@ fn encode_addl_info_for_item(&mut self, item: &hir::Item) {
                 self.encode_fields(def_id);
             }
             hir::ItemImpl(..) => {
-                for &trait_item_def_id in &self.tcx.associated_item_def_ids(def_id)[..] {
+                for &trait_item_def_id in self.tcx.associated_item_def_ids(def_id).iter() {
                     self.record(trait_item_def_id,
                                 EncodeContext::encode_info_for_impl_item,
                                 trait_item_def_id);
                 }
             }
             hir::ItemTrait(..) => {
-                for &item_def_id in &self.tcx.associated_item_def_ids(def_id)[..] {
+                for &item_def_id in self.tcx.associated_item_def_ids(def_id).iter() {
                     self.record(item_def_id,
                                 EncodeContext::encode_info_for_trait_item,
                                 item_def_id);
@@ -1423,7 +1429,6 @@ fn encode_crate_root(&mut self) -> Lazy<CrateRoot> {
 
 pub fn encode_metadata<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                                  cstore: &cstore::CStore,
-                                 reexports: &def::ExportMap,
                                  link_meta: &LinkMeta,
                                  exported_symbols: &NodeSet)
                                  -> Vec<u8> {
@@ -1437,7 +1442,6 @@ pub fn encode_metadata<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
         let mut ecx = EncodeContext {
             opaque: opaque::Encoder::new(&mut cursor),
             tcx: tcx,
-            reexports: reexports,
             link_meta: link_meta,
             cstore: cstore,
             exported_symbols: exported_symbols,
index a6771083fc34eaf438313e920cbc78707a569ba2..e8bc8b01652a62a5c71161b6b8e489a21ea21a6f 100644 (file)
@@ -477,15 +477,15 @@ fn find_library_crate(&mut self) -> Option<Library> {
                 Some(file) => file,
             };
             let (hash, found_kind) =
-                if file.starts_with(&rlib_prefix[..]) && file.ends_with(".rlib") {
+                if file.starts_with(&rlib_prefix) && file.ends_with(".rlib") {
                     (&file[(rlib_prefix.len())..(file.len() - ".rlib".len())], CrateFlavor::Rlib)
-                } else if file.starts_with(&rlib_prefix[..]) && file.ends_with(".rmeta") {
+                } else if file.starts_with(&rlib_prefix) && file.ends_with(".rmeta") {
                     (&file[(rlib_prefix.len())..(file.len() - ".rmeta".len())], CrateFlavor::Rmeta)
                 } else if file.starts_with(&dylib_prefix) &&
                                              file.ends_with(&dypair.1) {
                     (&file[(dylib_prefix.len())..(file.len() - dypair.1.len())], CrateFlavor::Dylib)
                 } else {
-                    if file.starts_with(&staticlib_prefix[..]) && file.ends_with(&staticpair.1) {
+                    if file.starts_with(&staticlib_prefix) && file.ends_with(&staticpair.1) {
                         staticlibs.push(CrateMismatch {
                             path: path.to_path_buf(),
                             got: "static".to_string(),
index 4a20913d0b3fd19e16cef779f1817c42880f1a81..abb482a50ebc2621487e53fb43c3af4c5e8f7b73 100644 (file)
@@ -285,7 +285,9 @@ pub struct TraitData<'tcx> {
 pub struct ImplData<'tcx> {
     pub polarity: hir::ImplPolarity,
     pub parent_impl: Option<DefId>,
-    pub coerce_unsized_kind: Option<ty::adjustment::CustomCoerceUnsized>,
+
+    /// This is `Some` only for impls of `CoerceUnsized`.
+    pub coerce_unsized_info: Option<ty::adjustment::CoerceUnsizedInfo>,
     pub trait_ref: Option<Lazy<ty::TraitRef<'tcx>>>,
 }
 
index 531be0b6ae9f5f620528982f7edf7f829b7f7bb0..6e42e02d5109b6c5e1fd1ea83ec8d55c244ea379 100644 (file)
@@ -10,7 +10,7 @@ crate-type = ["dylib"]
 
 [dependencies]
 graphviz = { path = "../libgraphviz" }
-log = { path = "../liblog" }
+log = "0.3"
 rustc = { path = "../librustc" }
 rustc_const_eval = { path = "../librustc_const_eval" }
 rustc_const_math = { path = "../librustc_const_math" }
index cc710e0ac3563772aa88214d498e9d12488f992d..d2560c2f8203f0557957ea0bbb968fbaa127bba3 100644 (file)
@@ -9,10 +9,10 @@ path = "lib.rs"
 crate-type = ["dylib"]
 
 [dependencies]
-log = { path = "../liblog" }
+log = "0.3"
 rustc = { path = "../librustc" }
 rustc_const_eval = { path = "../librustc_const_eval" }
 rustc_const_math = { path = "../librustc_const_math" }
 syntax = { path = "../libsyntax" }
 syntax_pos = { path = "../libsyntax_pos" }
-rustc_errors = { path = "../librustc_errors" }
\ No newline at end of file
+rustc_errors = { path = "../librustc_errors" }
index 1bfc445fca98d72ca4f7c8f03a92b63dda875fc1..efe9963cecc7300a955394726544c1af4130cc4a 100644 (file)
@@ -126,19 +126,19 @@ fn dylink_registrar(&mut self,
             // inside this crate, so continue would spew "macro undefined"
             // errors
             Err(err) => {
-                self.sess.span_fatal(span, &err[..])
+                self.sess.span_fatal(span, &err)
             }
         };
 
         unsafe {
             let registrar =
-                match lib.symbol(&symbol[..]) {
+                match lib.symbol(&symbol) {
                     Ok(registrar) => {
                         mem::transmute::<*mut u8,PluginRegistrarFun>(registrar)
                     }
                     // again fatal if we can't register macros
                     Err(err) => {
-                        self.sess.span_fatal(span, &err[..])
+                        self.sess.span_fatal(span, &err)
                     }
                 };
 
index e32ec25a7e8f77857f40bf5242a5529ad17096e2..64821f5d44bf495d4126bc25516121db4720d933 100644 (file)
@@ -17,6 +17,7 @@
       html_root_url = "https://doc.rust-lang.org/nightly/")]
 #![deny(warnings)]
 
+#![cfg_attr(stage0, feature(field_init_shorthand))]
 #![feature(rustc_diagnostic_macros)]
 #![feature(rustc_private)]
 #![feature(staged_api)]
 #[macro_use] extern crate syntax;
 extern crate syntax_pos;
 
-use rustc::dep_graph::DepNode;
 use rustc::hir::{self, PatKind};
-use rustc::hir::def::{self, Def};
-use rustc::hir::def_id::{CRATE_DEF_INDEX, DefId};
+use rustc::hir::def::Def;
+use rustc::hir::def_id::{CRATE_DEF_INDEX, LOCAL_CRATE, CrateNum, DefId};
 use rustc::hir::intravisit::{self, Visitor, NestedVisitorMap};
 use rustc::hir::itemlikevisit::DeepVisitor;
 use rustc::hir::pat_util::EnumerateAndAdjustIterator;
 use rustc::middle::privacy::{AccessLevel, AccessLevels};
 use rustc::ty::{self, TyCtxt, Ty, TypeFoldable};
 use rustc::ty::fold::TypeVisitor;
+use rustc::ty::maps::Providers;
 use rustc::util::nodemap::NodeSet;
 use syntax::ast;
-use syntax_pos::Span;
+use syntax_pos::{DUMMY_SP, Span};
 
 use std::cmp;
 use std::mem::replace;
+use std::rc::Rc;
 
 pub mod diagnostics;
 
@@ -71,7 +73,6 @@ fn visit_vis(&mut self, vis: &'tcx hir::Visibility) {
 
 struct EmbargoVisitor<'a, 'tcx: 'a> {
     tcx: TyCtxt<'a, 'tcx, 'tcx>,
-    export_map: &'a def::ExportMap,
 
     // Accessibility levels for reachable nodes
     access_levels: AccessLevels,
@@ -324,7 +325,7 @@ fn visit_mod(&mut self, m: &'tcx hir::Mod, _sp: Span, id: ast::NodeId) {
         // This code is here instead of in visit_item so that the
         // crate module gets processed as well.
         if self.prev_level.is_some() {
-            if let Some(exports) = self.export_map.get(&id) {
+            if let Some(exports) = self.tcx.export_map.get(&id) {
                 for export in exports {
                     if let Some(node_id) = self.tcx.hir.as_local_node_id(export.def.def_id()) {
                         self.update(node_id, Some(AccessLevel::Exported));
@@ -1204,10 +1205,23 @@ fn visit_expr(&mut self, _: &'tcx hir::Expr) {}
     fn visit_pat(&mut self, _: &'tcx hir::Pat) {}
 }
 
-pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                             export_map: &def::ExportMap)
-                             -> AccessLevels {
-    let _task = tcx.dep_graph.in_task(DepNode::Privacy);
+pub fn provide(providers: &mut Providers) {
+    *providers = Providers {
+        privacy_access_levels,
+        ..*providers
+    };
+}
+
+pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Rc<AccessLevels> {
+    tcx.dep_graph.with_ignore(|| { // FIXME
+        ty::queries::privacy_access_levels::get(tcx, DUMMY_SP, LOCAL_CRATE)
+    })
+}
+
+fn privacy_access_levels<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+                                   krate: CrateNum)
+                                   -> Rc<AccessLevels> {
+    assert_eq!(krate, LOCAL_CRATE);
 
     let krate = tcx.hir.krate();
 
@@ -1226,7 +1240,6 @@ pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
     // items which are reachable from external crates based on visibility.
     let mut visitor = EmbargoVisitor {
         tcx: tcx,
-        export_map: export_map,
         access_levels: Default::default(),
         prev_level: Some(AccessLevel::Public),
         changed: false,
@@ -1270,7 +1283,7 @@ pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
         krate.visit_all_item_likes(&mut DeepVisitor::new(&mut visitor));
     }
 
-    visitor.access_levels
+    Rc::new(visitor.access_levels)
 }
 
 __build_diagnostic_array! { librustc_privacy, DIAGNOSTICS }
index 5ce4c74e735fd6fb11015095554c5777730d2878..0968ea31b754f9e7b2d0e47f5ccd871156483204 100644 (file)
@@ -10,7 +10,7 @@ crate-type = ["dylib"]
 test = false
 
 [dependencies]
-log = { path = "../liblog" }
+log = "0.3"
 syntax = { path = "../libsyntax" }
 rustc = { path = "../librustc" }
 arena = { path = "../libarena" }
index c33d5b9b6e16bb14bb4980039c00deac67c22c5c..86e0d0039d1a7c4a517308a66177409be5516995 100644 (file)
@@ -539,7 +539,7 @@ fn legacy_import_macro(&mut self,
                            binding: &'a NameBinding<'a>,
                            span: Span,
                            allow_shadowing: bool) {
-        if self.builtin_macros.insert(name, binding).is_some() && !allow_shadowing {
+        if self.global_macros.insert(name, binding).is_some() && !allow_shadowing {
             let msg = format!("`{}` is already in scope", name);
             let note =
                 "macro-expanded `#[macro_use]`s may not shadow existing macros (see RFC 1560)";
index f832e0f9a4811146e7f83cb2e57b1396b29b2de2..879d8816488b20e8a1e8fc420b4215ee59e986a2 100644 (file)
@@ -75,7 +75,7 @@
 use std::rc::Rc;
 
 use resolve_imports::{ImportDirective, ImportDirectiveSubclass, NameResolution, ImportResolver};
-use macros::{InvocationData, LegacyBinding, LegacyScope};
+use macros::{InvocationData, LegacyBinding, LegacyScope, MacroBinding};
 
 // NB: This module needs to be declared first so diagnostics are
 // registered before they are used.
@@ -1174,7 +1174,7 @@ pub struct Resolver<'a> {
 
     crate_loader: &'a mut CrateLoader,
     macro_names: FxHashSet<Name>,
-    builtin_macros: FxHashMap<Name, &'a NameBinding<'a>>,
+    global_macros: FxHashMap<Name, &'a NameBinding<'a>>,
     lexical_macro_resolutions: Vec<(Name, &'a Cell<LegacyScope<'a>>)>,
     macro_map: FxHashMap<DefId, Rc<SyntaxExtension>>,
     macro_defs: FxHashMap<Mark, DefId>,
@@ -1372,7 +1372,7 @@ pub fn new(session: &'a Session,
 
             crate_loader: crate_loader,
             macro_names: FxHashSet(),
-            builtin_macros: FxHashMap(),
+            global_macros: FxHashMap(),
             lexical_macro_resolutions: Vec::new(),
             macro_map: FxHashMap(),
             macro_exports: Vec::new(),
@@ -2429,9 +2429,9 @@ fn resolve_qpath_anywhere(&mut self,
                 };
             }
         }
-        let is_builtin = self.builtin_macros.get(&path[0].name).cloned()
+        let is_global = self.global_macros.get(&path[0].name).cloned()
             .map(|binding| binding.get_macro(self).kind() == MacroKind::Bang).unwrap_or(false);
-        if primary_ns != MacroNS && (is_builtin || self.macro_names.contains(&path[0].name)) {
+        if primary_ns != MacroNS && (is_global || self.macro_names.contains(&path[0].name)) {
             // Return some dummy definition, it's enough for error reporting.
             return Some(
                 PathResolution::new(Def::Macro(DefId::local(CRATE_DEF_INDEX), MacroKind::Bang))
@@ -2566,6 +2566,7 @@ fn resolve_path(&mut self,
                 self.resolve_ident_in_module(module, ident, ns, false, record_used)
             } else if opt_ns == Some(MacroNS) {
                 self.resolve_lexical_macro_path_segment(ident, ns, record_used)
+                    .map(MacroBinding::binding)
             } else {
                 match self.resolve_ident_in_lexical_scope(ident, ns, record_used) {
                     Some(LexicalScopeBinding::Item(binding)) => Ok(binding),
@@ -3223,7 +3224,7 @@ fn report_errors(&mut self) {
             };
             let msg1 = format!("`{}` could refer to the name {} here", name, participle(b1));
             let msg2 = format!("`{}` could also refer to the name {} here", name, participle(b2));
-            let note = if !lexical && b1.is_glob_import() {
+            let note = if b1.expansion == Mark::root() || !lexical && b1.is_glob_import() {
                 format!("consider adding an explicit import of `{}` to disambiguate", name)
             } else if let Def::Macro(..) = b1.def() {
                 format!("macro-expanded {} do not shadow",
@@ -3243,11 +3244,15 @@ fn report_errors(&mut self) {
                 let msg = format!("`{}` is ambiguous", name);
                 self.session.add_lint(lint::builtin::LEGACY_IMPORTS, id, span, msg);
             } else {
-                self.session.struct_span_err(span, &format!("`{}` is ambiguous", name))
-                    .span_note(b1.span, &msg1)
-                    .span_note(b2.span, &msg2)
-                    .note(&note)
-                    .emit();
+                let mut err =
+                    self.session.struct_span_err(span, &format!("`{}` is ambiguous", name));
+                err.span_note(b1.span, &msg1);
+                match b2.def() {
+                    Def::Macro(..) if b2.span == DUMMY_SP =>
+                        err.note(&format!("`{}` is also a builtin macro", name)),
+                    _ => err.span_note(b2.span, &msg2),
+                };
+                err.note(&note).emit();
             }
         }
 
@@ -3361,14 +3366,13 @@ fn check_proc_macro_attrs(&mut self, attrs: &[ast::Attribute]) {
         if self.proc_macro_enabled { return; }
 
         for attr in attrs {
-            let name = unwrap_or!(attr.name(), continue);
-            let maybe_binding = self.builtin_macros.get(&name).cloned().or_else(|| {
-                let ident = Ident::with_empty_ctxt(name);
-                self.resolve_lexical_macro_path_segment(ident, MacroNS, None).ok()
-            });
-
-            if let Some(binding) = maybe_binding {
-                if let SyntaxExtension::AttrProcMacro(..) = *binding.get_macro(self) {
+            if attr.path.segments.len() > 1 {
+                continue
+            }
+            let ident = attr.path.segments[0].identifier;
+            let result = self.resolve_lexical_macro_path_segment(ident, MacroNS, None);
+            if let Ok(binding) = result {
+                if let SyntaxExtension::AttrProcMacro(..) = *binding.binding().get_macro(self) {
                     attr::mark_known(attr);
 
                     let msg = "attribute procedural macros are experimental";
@@ -3376,7 +3380,7 @@ fn check_proc_macro_attrs(&mut self, attrs: &[ast::Attribute]) {
 
                     feature_err(&self.session.parse_sess, feature,
                                 attr.span, GateIssue::Language, msg)
-                        .span_note(binding.span, "procedural macro imported here")
+                        .span_note(binding.span(), "procedural macro imported here")
                         .emit();
                 }
             }
index 99fc1c142f6815fd2cc097699e3fd0be0e02cd2b..3d6c6896549a4bb8f5996bc7615526b73218f41e 100644 (file)
@@ -81,11 +81,29 @@ pub struct LegacyBinding<'a> {
     pub span: Span,
 }
 
+#[derive(Copy, Clone)]
 pub enum MacroBinding<'a> {
     Legacy(&'a LegacyBinding<'a>),
+    Global(&'a NameBinding<'a>),
     Modern(&'a NameBinding<'a>),
 }
 
+impl<'a> MacroBinding<'a> {
+    pub fn span(self) -> Span {
+        match self {
+            MacroBinding::Legacy(binding) => binding.span,
+            MacroBinding::Global(binding) | MacroBinding::Modern(binding) => binding.span,
+        }
+    }
+
+    pub fn binding(self) -> &'a NameBinding<'a> {
+        match self {
+            MacroBinding::Global(binding) | MacroBinding::Modern(binding) => binding,
+            MacroBinding::Legacy(_) => panic!("unexpected MacroBinding::Legacy"),
+        }
+    }
+}
+
 impl<'a> base::Resolver for Resolver<'a> {
     fn next_node_id(&mut self) -> ast::NodeId {
         self.session.next_node_id()
@@ -171,7 +189,7 @@ fn add_builtin(&mut self, ident: ast::Ident, ext: Rc<SyntaxExtension>) {
             vis: ty::Visibility::Invisible,
             expansion: Mark::root(),
         });
-        self.builtin_macros.insert(ident.name, binding);
+        self.global_macros.insert(ident.name, binding);
     }
 
     fn resolve_imports(&mut self) {
@@ -189,7 +207,7 @@ fn find_legacy_attr_invoc(&mut self, attrs: &mut Vec<ast::Attribute>)
                 attr::mark_known(&attrs[i]);
             }
 
-            match self.builtin_macros.get(&name).cloned() {
+            match self.global_macros.get(&name).cloned() {
                 Some(binding) => match *binding.get_macro(self) {
                     MultiModifier(..) | MultiDecorator(..) | SyntaxExtension::AttrProcMacro(..) => {
                         return Some(attrs.remove(i))
@@ -221,7 +239,7 @@ fn find_legacy_attr_invoc(&mut self, attrs: &mut Vec<ast::Attribute>)
                     }
                     let trait_name = traits[j].segments[0].identifier.name;
                     let legacy_name = Symbol::intern(&format!("derive_{}", trait_name));
-                    if !self.builtin_macros.contains_key(&legacy_name) {
+                    if !self.global_macros.contains_key(&legacy_name) {
                         continue
                     }
                     let span = traits.remove(j).span;
@@ -378,18 +396,18 @@ fn resolve_macro_to_def(&mut self, scope: Mark, path: &ast::Path, kind: MacroKin
         }
 
         let name = path[0].name;
-        let result = match self.resolve_legacy_scope(&invocation.legacy_scope, name, false) {
-            Some(MacroBinding::Legacy(binding)) => Ok(Def::Macro(binding.def_id, MacroKind::Bang)),
-            Some(MacroBinding::Modern(binding)) => Ok(binding.def_ignoring_ambiguity()),
-            None => match self.resolve_lexical_macro_path_segment(path[0], MacroNS, None) {
-                Ok(binding) => Ok(binding.def_ignoring_ambiguity()),
-                Err(Determinacy::Undetermined) if !force =>
-                    return Err(Determinacy::Undetermined),
+        let legacy_resolution = self.resolve_legacy_scope(&invocation.legacy_scope, name, false);
+        let result = if let Some(MacroBinding::Legacy(binding)) = legacy_resolution {
+            Ok(Def::Macro(binding.def_id, MacroKind::Bang))
+        } else {
+            match self.resolve_lexical_macro_path_segment(path[0], MacroNS, None) {
+                Ok(binding) => Ok(binding.binding().def_ignoring_ambiguity()),
+                Err(Determinacy::Undetermined) if !force => return Err(Determinacy::Undetermined),
                 Err(_) => {
                     self.found_unresolved_macro = true;
                     Err(Determinacy::Determined)
                 }
-            },
+            }
         };
 
         self.current_module.legacy_macro_resolutions.borrow_mut()
@@ -403,42 +421,56 @@ pub fn resolve_lexical_macro_path_segment(&mut self,
                                               ident: Ident,
                                               ns: Namespace,
                                               record_used: Option<Span>)
-                                              -> Result<&'a NameBinding<'a>, Determinacy> {
-        let mut module = self.current_module;
-        let mut potential_expanded_shadower: Option<&NameBinding> = None;
+                                              -> Result<MacroBinding<'a>, Determinacy> {
+        let mut module = Some(self.current_module);
+        let mut potential_illegal_shadower = Err(Determinacy::Determined);
+        let determinacy =
+            if record_used.is_some() { Determinacy::Determined } else { Determinacy::Undetermined };
         loop {
-            // Since expanded macros may not shadow the lexical scope (enforced below),
-            // we can ignore unresolved invocations (indicated by the penultimate argument).
-            match self.resolve_ident_in_module(module, ident, ns, true, record_used) {
+            let result = if let Some(module) = module {
+                // Since expanded macros may not shadow the lexical scope and
+                // globs may not shadow global macros (both enforced below),
+                // we resolve with restricted shadowing (indicated by the penultimate argument).
+                self.resolve_ident_in_module(module, ident, ns, true, record_used)
+                    .map(MacroBinding::Modern)
+            } else {
+                self.global_macros.get(&ident.name).cloned().ok_or(determinacy)
+                    .map(MacroBinding::Global)
+            };
+
+            match result.map(MacroBinding::binding) {
                 Ok(binding) => {
                     let span = match record_used {
                         Some(span) => span,
-                        None => return Ok(binding),
+                        None => return result,
                     };
-                    match potential_expanded_shadower {
-                        Some(shadower) if shadower.def() != binding.def() => {
+                    if let Ok(MacroBinding::Modern(shadower)) = potential_illegal_shadower {
+                        if shadower.def() != binding.def() {
                             let name = ident.name;
                             self.ambiguity_errors.push(AmbiguityError {
                                 span: span, name: name, b1: shadower, b2: binding, lexical: true,
                                 legacy: false,
                             });
-                            return Ok(shadower);
+                            return potential_illegal_shadower;
                         }
-                        _ if binding.expansion == Mark::root() => return Ok(binding),
-                        _ => potential_expanded_shadower = Some(binding),
+                    }
+                    if binding.expansion != Mark::root() ||
+                       (binding.is_glob_import() && module.unwrap().def().is_some()) {
+                        potential_illegal_shadower = result;
+                    } else {
+                        return result;
                     }
                 },
                 Err(Determinacy::Undetermined) => return Err(Determinacy::Undetermined),
                 Err(Determinacy::Determined) => {}
             }
 
-            match module.kind {
-                ModuleKind::Block(..) => module = module.parent.unwrap(),
-                ModuleKind::Def(..) => return match potential_expanded_shadower {
-                    Some(binding) => Ok(binding),
-                    None if record_used.is_some() => Err(Determinacy::Determined),
-                    None => Err(Determinacy::Undetermined),
+            module = match module {
+                Some(module) => match module.kind {
+                    ModuleKind::Block(..) => module.parent,
+                    ModuleKind::Def(..) => None,
                 },
+                None => return potential_illegal_shadower,
             }
         }
     }
@@ -488,11 +520,11 @@ pub fn resolve_legacy_scope(&mut self,
 
         let binding = if let Some(binding) = binding {
             MacroBinding::Legacy(binding)
-        } else if let Some(binding) = self.builtin_macros.get(&name).cloned() {
+        } else if let Some(binding) = self.global_macros.get(&name).cloned() {
             if !self.use_extern_macros {
                 self.record_use(Ident::with_empty_ctxt(name), MacroNS, binding, DUMMY_SP);
             }
-            MacroBinding::Modern(binding)
+            MacroBinding::Global(binding)
         } else {
             return None;
         };
@@ -524,21 +556,15 @@ pub fn finalize_current_module_macro_resolutions(&mut self) {
             let legacy_resolution = self.resolve_legacy_scope(legacy_scope, ident.name, true);
             let resolution = self.resolve_lexical_macro_path_segment(ident, MacroNS, Some(span));
             match (legacy_resolution, resolution) {
-                (Some(legacy_resolution), Ok(resolution)) => {
-                    let (legacy_span, participle) = match legacy_resolution {
-                        MacroBinding::Modern(binding)
-                            if binding.def() == resolution.def() => continue,
-                        MacroBinding::Modern(binding) => (binding.span, "imported"),
-                        MacroBinding::Legacy(binding) => (binding.span, "defined"),
-                    };
-                    let msg1 = format!("`{}` could refer to the macro {} here", ident, participle);
+                (Some(MacroBinding::Legacy(legacy_binding)), Ok(MacroBinding::Modern(binding))) => {
+                    let msg1 = format!("`{}` could refer to the macro defined here", ident);
                     let msg2 = format!("`{}` could also refer to the macro imported here", ident);
                     self.session.struct_span_err(span, &format!("`{}` is ambiguous", ident))
-                        .span_note(legacy_span, &msg1)
-                        .span_note(resolution.span, &msg2)
+                        .span_note(legacy_binding.span, &msg1)
+                        .span_note(binding.span, &msg2)
                         .emit();
                 },
-                (Some(MacroBinding::Modern(binding)), Err(_)) => {
+                (Some(MacroBinding::Global(binding)), Ok(MacroBinding::Global(_))) => {
                     self.record_use(ident, MacroNS, binding, span);
                     self.err_if_macro_use_proc_macro(ident.name, span, binding);
                 },
@@ -567,11 +593,11 @@ fn suggest_macro_name(&mut self, name: &str, kind: MacroKind,
             find_best_match_for_name(self.macro_names.iter(), name, None)
         } else {
             None
-        // Then check builtin macros.
+        // Then check global macros.
         }.or_else(|| {
             // FIXME: get_macro needs an &mut Resolver, can we do it without cloning?
-            let builtin_macros = self.builtin_macros.clone();
-            let names = builtin_macros.iter().filter_map(|(name, binding)| {
+            let global_macros = self.global_macros.clone();
+            let names = global_macros.iter().filter_map(|(name, binding)| {
                 if binding.get_macro(self).kind() == kind {
                     Some(name)
                 } else {
index 2f4ac12cd7363bf2604c8d738206947802300f27..43654c8ce6f6875b13aa5009fabec257024ae784 100644 (file)
@@ -145,7 +145,7 @@ pub fn resolve_ident_in_module(&mut self,
                                    module: Module<'a>,
                                    ident: Ident,
                                    ns: Namespace,
-                                   ignore_unresolved_invocations: bool,
+                                   restricted_shadowing: bool,
                                    record_used: Option<Span>)
                                    -> Result<&'a NameBinding<'a>, Determinacy> {
         self.populate_module_if_necessary(module);
@@ -158,9 +158,8 @@ pub fn resolve_ident_in_module(&mut self,
             if let Some(binding) = resolution.binding {
                 if let Some(shadowed_glob) = resolution.shadows_glob {
                     let name = ident.name;
-                    // If we ignore unresolved invocations, we must forbid
-                    // expanded shadowing to avoid time travel.
-                    if ignore_unresolved_invocations &&
+                    // Forbid expanded shadowing to avoid time travel.
+                    if restricted_shadowing &&
                        binding.expansion != Mark::root() &&
                        ns != MacroNS && // In MacroNS, `try_define` always forbids this shadowing
                        binding.def() != shadowed_glob.def() {
@@ -215,7 +214,7 @@ pub fn resolve_ident_in_module(&mut self,
         }
 
         let no_unresolved_invocations =
-            ignore_unresolved_invocations || module.unresolved_invocations.borrow().is_empty();
+            restricted_shadowing || module.unresolved_invocations.borrow().is_empty();
         match resolution.binding {
             // In `MacroNS`, expanded bindings do not shadow (enforced in `try_define`).
             Some(binding) if no_unresolved_invocations || ns == MacroNS =>
@@ -225,6 +224,9 @@ pub fn resolve_ident_in_module(&mut self,
         }
 
         // Check if the globs are determined
+        if restricted_shadowing && module.def().is_some() {
+            return Err(Determined);
+        }
         for directive in module.globs.borrow().iter() {
             if self.is_accessible(directive.vis.get()) {
                 if let Some(module) = directive.imported_module.get() {
index 06c5150fd13ade909458604075f3dcacd69a2b9c..07a5c266fc026798a23251015f1ed319788da744 100644 (file)
@@ -9,7 +9,7 @@ path = "lib.rs"
 crate-type = ["dylib"]
 
 [dependencies]
-log = { path = "../liblog" }
+log = "0.3"
 rustc = { path = "../librustc" }
 syntax = { path = "../libsyntax" }
 syntax_pos = { path = "../libsyntax_pos" }
index 59340ae87ee5da6e607b7e48ce976efd988b9228..4bab135ff12f70db54b2a65ef5007de61d07dcea 100644 (file)
@@ -423,7 +423,7 @@ fn make_values_str(pairs: &[(&'static str, &str)]) -> String {
 
     let strs = pairs.map(|(f, v)| format!(",{},\"{}\"", f, escape(String::from(v))));
     strs.fold(String::new(), |mut s, ss| {
-        s.push_str(&ss[..]);
+        s.push_str(&ss);
         s
     })
 }
index acc877d3947758fb20a222e51c2f6a896fe5c3e4..2d1e12bf0a10dfd379e4d90488a6f915a9059d9e 100644 (file)
 use data::{self, VariableKind};
 use dump::Dump;
 
-pub struct JsonDumper<'b, W: Write + 'b> {
-    output: &'b mut W,
+pub struct JsonDumper<O: DumpOutput> {
     result: Analysis,
+    output: O,
 }
 
-impl<'b, W: Write> JsonDumper<'b, W> {
-    pub fn new(writer: &'b mut W) -> JsonDumper<'b, W> {
-        JsonDumper { output: writer, result: Analysis::new() }
-    }
+pub trait DumpOutput {
+    fn dump(&mut self, result: &Analysis);
 }
 
-impl<'b, W: Write> Drop for JsonDumper<'b, W> {
-    fn drop(&mut self) {
-        if let Err(_) = write!(self.output, "{}", as_json(&self.result)) {
+pub struct WriteOutput<'b, W: Write + 'b> {
+    output: &'b mut W,
+}
+
+impl<'b, W: Write> DumpOutput for WriteOutput<'b, W> {
+    fn dump(&mut self, result: &Analysis) {
+        if let Err(_) = write!(self.output, "{}", as_json(&result)) {
             error!("Error writing output");
         }
     }
 }
 
+pub struct CallbackOutput<'b> {
+    callback: &'b mut FnMut(&Analysis),
+}
+
+impl<'b> DumpOutput for CallbackOutput<'b> {
+    fn dump(&mut self, result: &Analysis) {
+        (self.callback)(result)
+    }
+}
+
+impl<'b, W: Write> JsonDumper<WriteOutput<'b, W>> {
+    pub fn new(writer: &'b mut W) -> JsonDumper<WriteOutput<'b, W>> {
+        JsonDumper { output: WriteOutput { output: writer }, result: Analysis::new() }
+    }
+}
+
+impl<'b> JsonDumper<CallbackOutput<'b>> {
+    pub fn with_callback(callback: &'b mut FnMut(&Analysis)) -> JsonDumper<CallbackOutput<'b>> {
+        JsonDumper { output: CallbackOutput { callback: callback }, result: Analysis::new() }
+    }
+}
+
+impl<O: DumpOutput> Drop for JsonDumper<O> {
+    fn drop(&mut self) {
+        self.output.dump(&self.result);
+    }
+}
+
 macro_rules! impl_fn {
     ($fn_name: ident, $data_type: ident, $bucket: ident) => {
         fn $fn_name(&mut self, data: $data_type) {
@@ -49,7 +79,7 @@ fn $fn_name(&mut self, data: $data_type) {
     }
 }
 
-impl<'b, W: Write + 'b> Dump for JsonDumper<'b, W> {
+impl<'b, O: DumpOutput + 'b> Dump for JsonDumper<O> {
     fn crate_prelude(&mut self, data: CratePreludeData) {
         self.result.prelude = Some(data)
     }
index 5e2b1df9d34f89451f28737760f2f4648dc487bc..e5c04f6b61ec297643bb9cd1726fb33bcff8b115 100644 (file)
@@ -48,6 +48,7 @@
 use rustc::hir::map::Node;
 use rustc::hir::def_id::DefId;
 use rustc::session::config::CrateType::CrateTypeExecutable;
+use rustc::session::Session;
 use rustc::ty::{self, TyCtxt};
 
 use std::env;
@@ -866,55 +867,131 @@ fn extension(&self) -> &'static str {
     }
 }
 
-pub fn process_crate<'l, 'tcx>(tcx: TyCtxt<'l, 'tcx, 'tcx>,
-                               krate: &ast::Crate,
-                               analysis: &'l ty::CrateAnalysis,
-                               cratename: &str,
-                               odir: Option<&Path>,
-                               format: Format) {
-    let _ignore = tcx.dep_graph.in_ignore();
+/// Defines what to do with the results of saving the analysis.
+pub trait SaveHandler {
+    fn save<'l, 'tcx>(&mut self,
+                      save_ctxt: SaveContext<'l, 'tcx>,
+                      krate: &ast::Crate,
+                      cratename: &str);
+}
 
-    assert!(analysis.glob_map.is_some());
+/// Dump the save-analysis results to a file.
+pub struct DumpHandler<'a> {
+    format: Format,
+    odir: Option<&'a Path>,
+    cratename: String
+}
 
-    info!("Dumping crate {}", cratename);
+impl<'a> DumpHandler<'a> {
+    pub fn new(format: Format, odir: Option<&'a Path>, cratename: &str) -> DumpHandler<'a> {
+        DumpHandler {
+            format: format,
+            odir: odir,
+            cratename: cratename.to_owned()
+        }
+    }
 
-    // find a path to dump our data to
-    let mut root_path = match env::var_os("RUST_SAVE_ANALYSIS_FOLDER") {
-        Some(val) => PathBuf::from(val),
-        None => match odir {
-            Some(val) => val.join("save-analysis"),
-            None => PathBuf::from("save-analysis-temp"),
-        },
-    };
+    fn output_file(&self, sess: &Session) -> File {
+        let mut root_path = match env::var_os("RUST_SAVE_ANALYSIS_FOLDER") {
+            Some(val) => PathBuf::from(val),
+            None => match self.odir {
+                Some(val) => val.join("save-analysis"),
+                None => PathBuf::from("save-analysis-temp"),
+            },
+        };
 
-    if let Err(e) = std::fs::create_dir_all(&root_path) {
-        tcx.sess.err(&format!("Could not create directory {}: {}",
-                              root_path.display(),
-                              e));
+        if let Err(e) = std::fs::create_dir_all(&root_path) {
+            error!("Could not create directory {}: {}", root_path.display(), e);
+        }
+
+        {
+            let disp = root_path.display();
+            info!("Writing output to {}", disp);
+        }
+
+        let executable = sess.crate_types.borrow().iter().any(|ct| *ct == CrateTypeExecutable);
+        let mut out_name = if executable {
+            "".to_owned()
+        } else {
+            "lib".to_owned()
+        };
+        out_name.push_str(&self.cratename);
+        out_name.push_str(&sess.opts.cg.extra_filename);
+        out_name.push_str(self.format.extension());
+        root_path.push(&out_name);
+        let output_file = File::create(&root_path).unwrap_or_else(|e| {
+            let disp = root_path.display();
+            sess.fatal(&format!("Could not open {}: {}", disp, e));
+        });
+        root_path.pop();
+        output_file
     }
+}
+
+impl<'a> SaveHandler for DumpHandler<'a> {
+    fn save<'l, 'tcx>(&mut self,
+                      save_ctxt: SaveContext<'l, 'tcx>,
+                      krate: &ast::Crate,
+                      cratename: &str) {
+        macro_rules! dump {
+            ($new_dumper: expr) => {{
+                let mut dumper = $new_dumper;
+                let mut visitor = DumpVisitor::new(save_ctxt, &mut dumper);
+
+                visitor.dump_crate_info(cratename, krate);
+                visit::walk_crate(&mut visitor, krate);
+            }}
+        }
+
+        let output = &mut self.output_file(&save_ctxt.tcx.sess);
 
-    {
-        let disp = root_path.display();
-        info!("Writing output to {}", disp);
+        match self.format {
+            Format::Csv => dump!(CsvDumper::new(output)),
+            Format::Json => dump!(JsonDumper::new(output)),
+            Format::JsonApi => dump!(JsonApiDumper::new(output)),
+        }
     }
+}
 
-    // Create output file.
-    let executable = tcx.sess.crate_types.borrow().iter().any(|ct| *ct == CrateTypeExecutable);
-    let mut out_name = if executable {
-        "".to_owned()
-    } else {
-        "lib".to_owned()
-    };
-    out_name.push_str(&cratename);
-    out_name.push_str(&tcx.sess.opts.cg.extra_filename);
-    out_name.push_str(format.extension());
-    root_path.push(&out_name);
-    let mut output_file = File::create(&root_path).unwrap_or_else(|e| {
-        let disp = root_path.display();
-        tcx.sess.fatal(&format!("Could not open {}: {}", disp, e));
-    });
-    root_path.pop();
-    let output = &mut output_file;
+/// Call a callback with the results of save-analysis.
+pub struct CallbackHandler<'b> {
+    pub callback: &'b mut FnMut(&rls_data::Analysis),
+}
+
+impl<'b> SaveHandler for CallbackHandler<'b> {
+    fn save<'l, 'tcx>(&mut self,
+                      save_ctxt: SaveContext<'l, 'tcx>,
+                      krate: &ast::Crate,
+                      cratename: &str) {
+        macro_rules! dump {
+            ($new_dumper: expr) => {{
+                let mut dumper = $new_dumper;
+                let mut visitor = DumpVisitor::new(save_ctxt, &mut dumper);
+
+                visitor.dump_crate_info(cratename, krate);
+                visit::walk_crate(&mut visitor, krate);
+            }}
+        }
+
+        // We're using the JsonDumper here because it has the format of the
+        // save-analysis results that we will pass to the callback. IOW, we are
+        // using the JsonDumper to collect the save-analysis results, but not
+        // actually to dump them to a file. This is all a bit convoluted and
+        // there is certainly a simpler design here trying to get out (FIXME).
+        dump!(JsonDumper::with_callback(self.callback))
+    }
+}
+
+pub fn process_crate<'l, 'tcx, H: SaveHandler>(tcx: TyCtxt<'l, 'tcx, 'tcx>,
+                                               krate: &ast::Crate,
+                                               analysis: &'l ty::CrateAnalysis,
+                                               cratename: &str,
+                                               mut handler: H) {
+    let _ignore = tcx.dep_graph.in_ignore();
+
+    assert!(analysis.glob_map.is_some());
+
+    info!("Dumping crate {}", cratename);
 
     let save_ctxt = SaveContext {
         tcx: tcx,
@@ -923,21 +1000,7 @@ pub fn process_crate<'l, 'tcx>(tcx: TyCtxt<'l, 'tcx, 'tcx>,
         span_utils: SpanUtils::new(&tcx.sess),
     };
 
-    macro_rules! dump {
-        ($new_dumper: expr) => {{
-            let mut dumper = $new_dumper;
-            let mut visitor = DumpVisitor::new(save_ctxt, &mut dumper);
-
-            visitor.dump_crate_info(cratename, krate);
-            visit::walk_crate(&mut visitor, krate);
-        }}
-    }
-
-    match format {
-        Format::Csv => dump!(CsvDumper::new(output)),
-        Format::Json => dump!(JsonDumper::new(output)),
-        Format::JsonApi => dump!(JsonApiDumper::new(output)),
-    }
+    handler.save(save_ctxt, krate, cratename)
 }
 
 // Utility functions for the module.
index b5c67ad998b69e4ea448eebe5cca0e99078b42f8..07dcb2fc29dc66d4f7db46140fdefd859a2238d6 100644 (file)
@@ -11,7 +11,7 @@ test = false
 
 [dependencies]
 flate = { path = "../libflate" }
-log = { path = "../liblog" }
+log = "0.3"
 rustc = { path = "../librustc" }
 rustc_back = { path = "../librustc_back" }
 rustc_bitflags = { path = "../librustc_bitflags" }
index 27a19d211c2908ef4f5910aa1f4915859d28cede..1530708b4b8882c6361608a1936496ae90769e93 100644 (file)
@@ -369,7 +369,7 @@ fn unadjusted<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
             match sig.inputs().last().unwrap().sty {
                 ty::TyTuple(ref tupled_arguments, _) => {
                     inputs = &sig.inputs()[0..sig.inputs().len() - 1];
-                    &tupled_arguments[..]
+                    &tupled_arguments
                 }
                 _ => {
                     bug!("argument to function with \"rust-call\" ABI \
index 058f37f62dd82b70f87814f6ce567b2236d05c99..5c1ced573402e07d8314d0654ee458b6bbde3a18 100644 (file)
@@ -229,11 +229,11 @@ fn generic_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
                  variant_fill].iter().cloned().collect();
             match name {
                 None => {
-                    Type::struct_(cx, &fields[..], false)
+                    Type::struct_(cx, &fields, false)
                 }
                 Some(name) => {
                     let mut llty = Type::named_struct(cx, name);
-                    llty.set_struct_body(&fields[..], false);
+                    llty.set_struct_body(&fields, false);
                     llty
                 }
             }
@@ -330,7 +330,7 @@ fn struct_wrapped_nullable_bitdiscr(
     alignment: Alignment,
 ) -> ValueRef {
     let llptrptr = bcx.gepi(scrutinee,
-        &discrfield.iter().map(|f| *f as usize).collect::<Vec<_>>()[..]);
+        &discrfield.iter().map(|f| *f as usize).collect::<Vec<_>>());
     let llptr = bcx.load(llptrptr, alignment.to_align());
     let cmp = if nndiscr == 0 { IntEQ } else { IntNE };
     bcx.icmp(cmp, llptr, C_null(val_ty(llptr)))
@@ -402,7 +402,7 @@ pub fn trans_set_discr<'a, 'tcx>(bcx: &Builder<'a, 'tcx>, t: Ty<'tcx>, val: Valu
                     base::call_memset(bcx, llptr, fill_byte, size, align, false);
                 } else {
                     let path = discrfield.iter().map(|&i| i as usize).collect::<Vec<_>>();
-                    let llptrptr = bcx.gepi(val, &path[..]);
+                    let llptrptr = bcx.gepi(val, &path);
                     let llptrty = val_ty(llptrptr).element_type();
                     bcx.store(C_null(llptrty), llptrptr, None);
                 }
index 12e4e57964f985e47c9ba31226693965c73b4dd7..b6195765b27c29711856314d9afc1524b0e012e2 100644 (file)
@@ -77,14 +77,14 @@ pub fn trans_inline_asm<'a, 'tcx>(
           .chain(arch_clobbers.iter().map(|s| s.to_string()))
           .collect::<Vec<String>>().join(",");
 
-    debug!("Asm Constraints: {}", &all_constraints[..]);
+    debug!("Asm Constraints: {}", &all_constraints);
 
     // Depending on how many outputs we have, the return type is different
     let num_outputs = output_types.len();
     let output_type = match num_outputs {
         0 => Type::void(bcx.ccx),
         1 => output_types[0],
-        _ => Type::struct_(bcx.ccx, &output_types[..], false)
+        _ => Type::struct_(bcx.ccx, &output_types, false)
     };
 
     let dialect = match ia.dialect {
index 11ab6dcaa87f99fa02a99fac8d9068d8c462d2c1..0f908b7d0698bc48e7bd1fd529d46212c5433a0f 100644 (file)
@@ -65,10 +65,10 @@ pub fn find_library(name: &str, search_paths: &[PathBuf], sess: &Session)
 
     for path in search_paths {
         debug!("looking for {} inside {:?}", name, path);
-        let test = path.join(&oslibname[..]);
+        let test = path.join(&oslibname);
         if test.exists() { return test }
         if oslibname != unixlibname {
-            let test = path.join(&unixlibname[..]);
+            let test = path.join(&unixlibname);
             if test.exists() { return test }
         }
     }
index cf1e10b317b1ed030a4acbdf60bc28131d477d61..6d17b2f0eeda3514c3b6079c2c0d92cc7ce315ff 100644 (file)
@@ -91,7 +91,7 @@ pub fn find_crate_name(sess: Option<&Session>,
                        attrs: &[ast::Attribute],
                        input: &Input) -> String {
     let validate = |s: String, span: Option<Span>| {
-        cstore::validate_crate_name(sess, &s[..], span);
+        cstore::validate_crate_name(sess, &s, span);
         s
     };
 
@@ -109,7 +109,7 @@ pub fn find_crate_name(sess: Option<&Session>,
                     let msg = format!("--crate-name and #[crate_name] are \
                                        required to match, but `{}` != `{}`",
                                       s, name);
-                    sess.span_err(attr.span, &msg[..]);
+                    sess.span_err(attr.span, &msg);
                 }
             }
             return validate(s.clone(), None);
@@ -417,7 +417,7 @@ fn object_filenames(trans: &CrateTranslation,
                     outputs: &OutputFilenames)
                     -> Vec<PathBuf> {
     trans.modules.iter().map(|module| {
-        outputs.temp_path(OutputType::Object, Some(&module.name[..]))
+        outputs.temp_path(OutputType::Object, Some(&module.name))
     }).collect()
 }
 
@@ -551,7 +551,7 @@ fn link_rlib<'a>(sess: &'a Session,
                                                  e))
                 }
 
-                let bc_data_deflated = flate::deflate_bytes(&bc_data[..]);
+                let bc_data_deflated = flate::deflate_bytes(&bc_data);
 
                 let mut bc_file_deflated = match fs::File::create(&bc_deflated_filename) {
                     Ok(file) => file,
@@ -819,12 +819,12 @@ fn escape_string(s: &[u8]) -> String {
                                          pname,
                                          prog.status))
                     .note(&format!("{:?}", &cmd))
-                    .note(&escape_string(&output[..]))
+                    .note(&escape_string(&output))
                     .emit();
                 sess.abort_if_errors();
             }
-            info!("linker stderr:\n{}", escape_string(&prog.stderr[..]));
-            info!("linker stdout:\n{}", escape_string(&prog.stdout[..]));
+            info!("linker stderr:\n{}", escape_string(&prog.stderr));
+            info!("linker stdout:\n{}", escape_string(&prog.stdout));
         },
         Err(e) => {
             sess.struct_err(&format!("could not exec the linker `{}`: {}", pname, e))
index 0ef3f351a2a4bffdb58292beb058e482a73416cc..e23ddd2542a808f8ae0b765c2c86a95ecafa3f45 100644 (file)
@@ -61,7 +61,7 @@ pub fn run(sess: &session::Session,
     }
 
     let export_threshold =
-        symbol_export::crates_export_threshold(&sess.crate_types.borrow()[..]);
+        symbol_export::crates_export_threshold(&sess.crate_types.borrow());
 
     let symbol_filter = &|&(ref name, level): &(String, _)| {
         if symbol_export::is_below_threshold(level, export_threshold) {
@@ -147,7 +147,7 @@ pub fn run(sess: &session::Session,
                                                         bc_decoded.len() as libc::size_t) {
                     write::llvm_err(sess.diagnostic(),
                                     format!("failed to load bc of `{}`",
-                                            &name[..]));
+                                            name));
                 }
             });
         }
index 9c982be3fa03ef7215009152e8ebd779bfe0c9dd..104e7bc6a52bde0cf04a013fe4ef0f4d47169a62 100644 (file)
@@ -37,8 +37,8 @@ pub fn get_rpath_flags(config: &mut RPathConfig) -> Vec<String> {
 
     let libs = config.used_crates.clone();
     let libs = libs.into_iter().filter_map(|(_, l)| l.option()).collect::<Vec<_>>();
-    let rpaths = get_rpaths(config, &libs[..]);
-    flags.extend_from_slice(&rpaths_to_flags(&rpaths[..]));
+    let rpaths = get_rpaths(config, &libs);
+    flags.extend_from_slice(&rpaths_to_flags(&rpaths));
 
     // Use DT_RUNPATH instead of DT_RPATH if available
     if config.linker_is_gnu {
@@ -84,14 +84,14 @@ fn log_rpaths(desc: &str, rpaths: &[String]) {
         }
     }
 
-    log_rpaths("relative", &rel_rpaths[..]);
-    log_rpaths("fallback", &fallback_rpaths[..]);
+    log_rpaths("relative", &rel_rpaths);
+    log_rpaths("fallback", &fallback_rpaths);
 
     let mut rpaths = rel_rpaths;
-    rpaths.extend_from_slice(&fallback_rpaths[..]);
+    rpaths.extend_from_slice(&fallback_rpaths);
 
     // Remove duplicates
-    let rpaths = minimize_rpaths(&rpaths[..]);
+    let rpaths = minimize_rpaths(&rpaths);
     return rpaths;
 }
 
@@ -177,7 +177,7 @@ fn minimize_rpaths(rpaths: &[String]) -> Vec<String> {
     let mut set = HashSet::new();
     let mut minimized = Vec::new();
     for rpath in rpaths {
-        if set.insert(&rpath[..]) {
+        if set.insert(rpath) {
             minimized.push(rpath.clone());
         }
     }
index 005fb3533ab0b793510bdec9fde95630c2a46744..23a67ef5046eebec0a527e24a0859eff3f359dce 100644 (file)
@@ -154,7 +154,7 @@ pub fn exported_symbols(&self,
                             cnum: CrateNum)
                             -> &[(String, SymbolExportLevel)] {
         match self.exports.get(&cnum) {
-            Some(exports) => &exports[..],
+            Some(exports) => exports,
             None => &[]
         }
     }
@@ -167,7 +167,7 @@ pub fn for_each_exported_symbol<F>(&self,
     {
         for &(ref name, export_level) in self.exported_symbols(cnum) {
             if is_below_threshold(export_level, export_threshold) {
-                f(&name[..], export_level)
+                f(&name, export_level)
             }
         }
     }
index 518995dfedcc282fac84b41ada9f0ebb288837be..3ad04e10cb027d20f4a555929eb43eff1a11dd62 100644 (file)
@@ -341,7 +341,7 @@ pub fn sanitize(s: &str) -> String {
     if !result.is_empty() &&
         result.as_bytes()[0] != '_' as u8 &&
         ! (result.as_bytes()[0] as char).is_xid_start() {
-        return format!("_{}", &result[..]);
+        return format!("_{}", result);
     }
 
     return result;
index 377ff34cb7e0db7a745dfc4e802b95137a302586..5a017e4fb8a9aa07e163887f28c443960278b1da 100644 (file)
@@ -105,7 +105,7 @@ fn dump(&mut self, handler: &Handler) {
                 Some(ref code) => {
                     handler.emit_with_code(&MultiSpan::new(),
                                            &diag.msg,
-                                           &code[..],
+                                           &code,
                                            diag.lvl);
                 },
                 None => {
@@ -189,8 +189,8 @@ pub fn create_target_machine(sess: &Session) -> TargetMachineRef {
     let fdata_sections = ffunction_sections;
 
     let code_model_arg = match sess.opts.cg.code_model {
-        Some(ref s) => &s[..],
-        None => &sess.target.target.options.code_model[..],
+        Some(ref s) => &s,
+        None => &sess.target.target.options.code_model,
     };
 
     let code_model = match CODE_GEN_MODEL_ARGS.iter().find(
@@ -397,7 +397,7 @@ struct HandlerFreeVars<'a> {
     let msg = llvm::build_string(|s| llvm::LLVMRustWriteSMDiagnosticToString(diag, s))
         .expect("non-UTF8 SMDiagnostic");
 
-    report_inline_asm(cgcx, &msg[..], cookie);
+    report_inline_asm(cgcx, &msg, cookie);
 }
 
 unsafe extern "C" fn diagnostic_handler(info: DiagnosticInfoRef, user: *mut c_void) {
@@ -823,7 +823,7 @@ pub fn run_passes(sess: &Session,
         if trans.modules.len() == 1 {
             // 1) Only one codegen unit.  In this case it's no difficulty
             //    to copy `foo.0.x` to `foo.x`.
-            let module_name = Some(&(trans.modules[0].name)[..]);
+            let module_name = Some(&trans.modules[0].name[..]);
             let path = crate_output.temp_path(output_type, module_name);
             copy_gracefully(&path,
                             &crate_output.path(output_type));
@@ -939,7 +939,7 @@ pub fn run_passes(sess: &Session,
 
         if metadata_config.emit_bc && !user_wants_bitcode {
             let path = crate_output.temp_path(OutputType::Bitcode,
-                                              Some(&trans.metadata_module.name[..]));
+                                              Some(&trans.metadata_module.name));
             remove(sess, &path);
         }
     }
index 80b563729f5ce3678a641d900e8559834d15037f..ec45c5593632ed3fddbc5bf18924ffdec73fc740 100644 (file)
@@ -514,7 +514,7 @@ pub fn call_memcpy<'a, 'tcx>(b: &Builder<'a, 'tcx>,
                                n_bytes: ValueRef,
                                align: u32) {
     let ccx = b.ccx;
-    let ptr_width = &ccx.sess().target.target.target_pointer_width[..];
+    let ptr_width = &ccx.sess().target.target.target_pointer_width;
     let key = format!("llvm.memcpy.p0i8.p0i8.i{}", ptr_width);
     let memcpy = ccx.get_intrinsic(&key);
     let src_ptr = b.pointercast(src, Type::i8p(ccx));
@@ -550,7 +550,7 @@ pub fn call_memset<'a, 'tcx>(b: &Builder<'a, 'tcx>,
                              size: ValueRef,
                              align: ValueRef,
                              volatile: bool) -> ValueRef {
-    let ptr_width = &b.ccx.sess().target.target.target_pointer_width[..];
+    let ptr_width = &b.ccx.sess().target.target.target_pointer_width;
     let intrinsic_key = format!("llvm.memset.p0i8.i{}", ptr_width);
     let llintrinsicfn = b.ccx.get_intrinsic(&intrinsic_key);
     let volatile = C_bool(b.ccx, volatile);
@@ -755,7 +755,6 @@ enum MetadataKind {
 
     let cstore = &cx.tcx().sess.cstore;
     let metadata = cstore.encode_metadata(cx.tcx(),
-                                          cx.export_map(),
                                           cx.link_meta(),
                                           exported_symbols);
     if kind == MetadataKind::Uncompressed {
@@ -766,7 +765,7 @@ enum MetadataKind {
     let mut compressed = cstore.metadata_encoding_version().to_vec();
     compressed.extend_from_slice(&flate::deflate_bytes(&metadata));
 
-    let llmeta = C_bytes_in_context(cx.metadata_llcx(), &compressed[..]);
+    let llmeta = C_bytes_in_context(cx.metadata_llcx(), &compressed);
     let llconst = C_struct_in_context(cx.metadata_llcx(), &[llmeta], false);
     let name = cx.metadata_symbol_name();
     let buf = CString::new(name).unwrap();
@@ -797,7 +796,7 @@ fn internalize_symbols<'a, 'tcx>(sess: &Session,
                                  symbol_map: &SymbolMap<'tcx>,
                                  exported_symbols: &ExportedSymbols) {
     let export_threshold =
-        symbol_export::crates_export_threshold(&sess.crate_types.borrow()[..]);
+        symbol_export::crates_export_threshold(&sess.crate_types.borrow());
 
     let exported_symbols = exported_symbols
         .exported_symbols(LOCAL_CRATE)
@@ -1036,7 +1035,7 @@ pub fn find_exported_symbols(tcx: TyCtxt, reachable: NodeSet) -> NodeSet {
                 (generics.parent_types == 0 && generics.types.is_empty()) &&
                 // Functions marked with #[inline] are only ever translated
                 // with "internal" linkage and are never exported.
-                !attr::requests_inline(&attributes[..])
+                !attr::requests_inline(&attributes)
             }
 
             _ => false
@@ -1056,7 +1055,7 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
     // particular items that will be processed.
     let krate = tcx.hir.krate();
 
-    let ty::CrateAnalysis { export_map, reachable, name, .. } = analysis;
+    let ty::CrateAnalysis { reachable, name, .. } = analysis;
     let exported_symbols = find_exported_symbols(tcx, reachable);
 
     let check_overflow = tcx.sess.overflow_checks();
@@ -1064,7 +1063,6 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
     let link_meta = link::build_link_meta(incremental_hashes_map, &name);
 
     let shared_ccx = SharedCrateContext::new(tcx,
-                                             export_map,
                                              link_meta.clone(),
                                              exported_symbols,
                                              check_overflow);
@@ -1576,7 +1574,7 @@ fn collect_and_partition_translation_items<'a, 'tcx>(scx: &SharedCrateContext<'a
                 cgus.dedup();
                 for &(ref cgu_name, linkage) in cgus.iter() {
                     output.push_str(" ");
-                    output.push_str(&cgu_name[..]);
+                    output.push_str(&cgu_name);
 
                     let linkage_abbrev = match linkage {
                         llvm::Linkage::ExternalLinkage => "External",
index a62f07042a7035749ba970c1d685e2ca6957fed1..8b1010d89fd9fd1c03c5f0e657de371ae090529c 100644 (file)
@@ -627,7 +627,7 @@ pub fn gepi(&self, base: ValueRef, ixs: &[usize]) -> ValueRef {
         } else {
             let v = ixs.iter().map(|i| C_i32(self.ccx, *i as i32)).collect::<Vec<ValueRef>>();
             self.count_insn("gepi");
-            self.inbounds_gep(base, &v[..])
+            self.inbounds_gep(base, &v)
         }
     }
 
@@ -835,8 +835,8 @@ pub fn add_span_comment(&self, sp: Span, text: &str) {
             let s = format!("{} ({})",
                             text,
                             self.ccx.sess().codemap().span_to_string(sp));
-            debug!("{}", &s[..]);
-            self.add_comment(&s[..]);
+            debug!("{}", s);
+            self.add_comment(&s);
         }
     }
 
index 1c1395f1b7762604309616c59b07979824036bf8..73602dc420b3f9999be6534e41e5df674004cc91 100644 (file)
@@ -14,7 +14,6 @@
                        DepTrackingMapConfig, WorkProduct};
 use middle::cstore::LinkMeta;
 use rustc::hir;
-use rustc::hir::def::ExportMap;
 use rustc::hir::def_id::DefId;
 use rustc::traits;
 use debuginfo;
@@ -68,7 +67,6 @@ pub struct SharedCrateContext<'a, 'tcx: 'a> {
     metadata_llmod: ModuleRef,
     metadata_llcx: ContextRef,
 
-    export_map: ExportMap,
     exported_symbols: NodeSet,
     link_meta: LinkMeta,
     tcx: TyCtxt<'a, 'tcx, 'tcx>,
@@ -402,7 +400,6 @@ unsafe fn create_context_and_module(sess: &Session, mod_name: &str) -> (ContextR
 
 impl<'b, 'tcx> SharedCrateContext<'b, 'tcx> {
     pub fn new(tcx: TyCtxt<'b, 'tcx, 'tcx>,
-               export_map: ExportMap,
                link_meta: LinkMeta,
                exported_symbols: NodeSet,
                check_overflow: bool)
@@ -459,7 +456,6 @@ pub fn new(tcx: TyCtxt<'b, 'tcx, 'tcx>,
         SharedCrateContext {
             metadata_llmod: metadata_llmod,
             metadata_llcx: metadata_llcx,
-            export_map: export_map,
             exported_symbols: exported_symbols,
             link_meta: link_meta,
             empty_param_env: tcx.empty_parameter_environment(),
@@ -499,10 +495,6 @@ pub fn metadata_llcx(&self) -> ContextRef {
         self.metadata_llcx
     }
 
-    pub fn export_map<'a>(&'a self) -> &'a ExportMap {
-        &self.export_map
-    }
-
     pub fn exported_symbols<'a>(&'a self) -> &'a NodeSet {
         &self.exported_symbols
     }
@@ -702,10 +694,6 @@ pub fn td(&self) -> llvm::TargetDataRef {
         unsafe { llvm::LLVMRustGetModuleDataLayout(self.llmod()) }
     }
 
-    pub fn export_map<'a>(&'a self) -> &'a ExportMap {
-        &self.shared.export_map
-    }
-
     pub fn exported_symbols<'a>(&'a self) -> &'a NodeSet {
         &self.shared.exported_symbols
     }
index fcf6937d4b6d58670681f042e9bc6deedc71076b..382ca8ef01001ecfd31d922b869d143d5e3a04a0 100644 (file)
@@ -287,7 +287,7 @@ pub fn custom_coerce_unsize_info<'scx, 'tcx>(scx: &SharedCrateContext<'scx, 'tcx
 
     match fulfill_obligation(scx, DUMMY_SP, trait_ref) {
         traits::VtableImpl(traits::VtableImplData { impl_def_id, .. }) => {
-            scx.tcx().custom_coerce_unsized_kind(impl_def_id)
+            scx.tcx().coerce_unsized_info(impl_def_id).custom_kind.unwrap()
         }
         vtable => {
             bug!("invalid CoerceUnsized vtable: {:?}", vtable);
index f08d26373e50e979ef977f2c2346a77275eda918..07998aa4a30ea0aee8b6a633cd8ea96c529d2843 100644 (file)
@@ -10,7 +10,7 @@ crate-type = ["dylib"]
 test = false
 
 [dependencies]
-log = { path = "../liblog" }
+log = "0.3"
 syntax = { path = "../libsyntax" }
 arena = { path = "../libarena" }
 fmt_macros = { path = "../libfmt_macros" }
index dfa7ababca0bbd8b97f143cbc9efcdca24d44ec6..5b0418921563a2dea56b4fa4bbf47897b57f3fc8 100644 (file)
@@ -479,14 +479,9 @@ fn assemble_inherent_impl_for_primitive(&mut self, lang_def_id: Option<DefId>) {
     }
 
     fn assemble_inherent_impl_candidates_for_type(&mut self, def_id: DefId) {
-        // Read the inherent implementation candidates for this type from the
-        // metadata if necessary.
-        self.tcx.populate_inherent_implementations_for_type_if_necessary(self.span, def_id);
-
-        if let Some(impl_infos) = self.tcx.maps.inherent_impls.borrow().get(&def_id) {
-            for &impl_def_id in impl_infos.iter() {
-                self.assemble_inherent_impl_probe(impl_def_id);
-            }
+        let impl_def_ids = ty::queries::inherent_impls::get(self.tcx, self.span, def_id);
+        for &impl_def_id in impl_def_ids.iter() {
+            self.assemble_inherent_impl_probe(impl_def_id);
         }
     }
 
index 3cdf9fc93ae600617a4c84aa935dfdb1adcaec07..47b41a75cf5316a9f041d1200ce362f260a45aea 100644 (file)
@@ -18,6 +18,7 @@
 use rustc::ty::{self, Ty, TyCtxt};
 use rustc::ty::ParameterEnvironment;
 use rustc::ty::TypeFoldable;
+use rustc::ty::adjustment::CoerceUnsizedInfo;
 use rustc::ty::subst::Subst;
 use rustc::ty::util::CopyImplementationError;
 use rustc::infer;
@@ -159,11 +160,26 @@ fn visit_implementation_of_copy<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
 }
 
 fn visit_implementation_of_coerce_unsized<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                                                    coerce_unsized_trait: DefId,
+                                                    _: DefId,
                                                     impl_did: DefId) {
     debug!("visit_implementation_of_coerce_unsized: impl_did={:?}",
            impl_did);
 
+    // Just compute this for the side-effects, in particular reporting
+    // errors; other parts of the code may demand it for the info of
+    // course.
+    if impl_did.is_local() {
+        let span = tcx.def_span(impl_did);
+        ty::queries::coerce_unsized_info::get(tcx, span, impl_did);
+    }
+}
+
+pub fn coerce_unsized_info<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+                                     impl_did: DefId)
+                                     -> CoerceUnsizedInfo {
+    debug!("compute_coerce_unsized_info(impl_did={:?})", impl_did);
+    let coerce_unsized_trait = tcx.lang_items.coerce_unsized_trait().unwrap();
+
     let unsize_trait = match tcx.lang_items.require(UnsizeTraitLangItem) {
         Ok(id) => id,
         Err(err) => {
@@ -171,16 +187,14 @@ fn visit_implementation_of_coerce_unsized<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
         }
     };
 
-    let impl_node_id = if let Some(n) = tcx.hir.as_local_node_id(impl_did) {
-        n
-    } else {
-        debug!("visit_implementation_of_coerce_unsized(): impl not \
-                in this crate");
-        return;
-    };
+    // this provider should only get invoked for local def-ids
+    let impl_node_id = tcx.hir.as_local_node_id(impl_did).unwrap_or_else(|| {
+        bug!("coerce_unsized_info: invoked for non-local def-id {:?}", impl_did)
+    });
 
     let source = tcx.item_type(impl_did);
     let trait_ref = tcx.impl_trait_ref(impl_did).unwrap();
+    assert_eq!(trait_ref.def_id, coerce_unsized_trait);
     let target = trait_ref.substs.type_at(1);
     debug!("visit_implementation_of_coerce_unsized: {:?} -> {:?} (bound)",
            source,
@@ -192,6 +206,8 @@ fn visit_implementation_of_coerce_unsized<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
     let target = target.subst(tcx, &param_env.free_substs);
     assert!(!source.has_escaping_regions());
 
+    let err_info = CoerceUnsizedInfo { custom_kind: None };
+
     debug!("visit_implementation_of_coerce_unsized: {:?} -> {:?} (free)",
            source,
            target);
@@ -234,7 +250,7 @@ fn visit_implementation_of_coerce_unsized<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                                definition; expected {}, found {}",
                               source_path,
                               target_path);
-                    return;
+                    return err_info;
                 }
 
                 let fields = &def_a.struct_variant().fields;
@@ -268,7 +284,7 @@ fn visit_implementation_of_coerce_unsized<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                               "the trait `CoerceUnsized` may only be implemented \
                                for a coercion between structures with one field \
                                being coerced, none found");
-                    return;
+                    return err_info;
                 } else if diff_fields.len() > 1 {
                     let item = tcx.hir.expect_item(impl_node_id);
                     let span = if let ItemImpl(.., Some(ref t), _, _) = item.node {
@@ -295,7 +311,7 @@ fn visit_implementation_of_coerce_unsized<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                                           .join(", ")));
                     err.span_label(span, &format!("requires multiple coercions"));
                     err.emit();
-                    return;
+                    return err_info;
                 }
 
                 let (i, a, b) = diff_fields[0];
@@ -309,7 +325,7 @@ fn visit_implementation_of_coerce_unsized<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                           E0376,
                           "the trait `CoerceUnsized` may only be implemented \
                            for a coercion between structures");
-                return;
+                return err_info;
             }
         };
 
@@ -331,8 +347,8 @@ fn visit_implementation_of_coerce_unsized<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
             .caller_bounds);
         infcx.resolve_regions_and_report_errors(&free_regions, impl_node_id);
 
-        if let Some(kind) = kind {
-            tcx.maps.custom_coerce_unsized_kind.borrow_mut().insert(impl_did, kind);
+        CoerceUnsizedInfo {
+            custom_kind: kind
         }
-    });
+    })
 }
diff --git a/src/librustc_typeck/coherence/inherent.rs b/src/librustc_typeck/coherence/inherent.rs
deleted file mode 100644 (file)
index e3b4ba9..0000000
+++ /dev/null
@@ -1,356 +0,0 @@
-// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use rustc::dep_graph::DepNode;
-use rustc::hir::def_id::DefId;
-use rustc::hir;
-use rustc::hir::itemlikevisit::ItemLikeVisitor;
-use rustc::lint;
-use rustc::traits::{self, Reveal};
-use rustc::ty::{self, TyCtxt};
-
-use syntax::ast;
-use syntax_pos::Span;
-
-struct InherentCollect<'a, 'tcx: 'a> {
-    tcx: TyCtxt<'a, 'tcx, 'tcx>
-}
-
-impl<'a, 'tcx, 'v> ItemLikeVisitor<'v> for InherentCollect<'a, 'tcx> {
-    fn visit_item(&mut self, item: &hir::Item) {
-        let (unsafety, ty) = match item.node {
-            hir::ItemImpl(unsafety, .., None, ref ty, _) => (unsafety, ty),
-            _ => return
-        };
-
-        match unsafety {
-            hir::Unsafety::Normal => {
-                // OK
-            }
-            hir::Unsafety::Unsafe => {
-                span_err!(self.tcx.sess,
-                          item.span,
-                          E0197,
-                          "inherent impls cannot be declared as unsafe");
-            }
-        }
-
-        let def_id = self.tcx.hir.local_def_id(item.id);
-        let self_ty = self.tcx.item_type(def_id);
-        match self_ty.sty {
-            ty::TyAdt(def, _) => {
-                self.check_def_id(item, def.did);
-            }
-            ty::TyDynamic(ref data, ..) if data.principal().is_some() => {
-                self.check_def_id(item, data.principal().unwrap().def_id());
-            }
-            ty::TyChar => {
-                self.check_primitive_impl(def_id,
-                                          self.tcx.lang_items.char_impl(),
-                                          "char",
-                                          "char",
-                                          item.span);
-            }
-            ty::TyStr => {
-                self.check_primitive_impl(def_id,
-                                          self.tcx.lang_items.str_impl(),
-                                          "str",
-                                          "str",
-                                          item.span);
-            }
-            ty::TySlice(_) => {
-                self.check_primitive_impl(def_id,
-                                          self.tcx.lang_items.slice_impl(),
-                                          "slice",
-                                          "[T]",
-                                          item.span);
-            }
-            ty::TyRawPtr(ty::TypeAndMut { ty: _, mutbl: hir::MutImmutable }) => {
-                self.check_primitive_impl(def_id,
-                                          self.tcx.lang_items.const_ptr_impl(),
-                                          "const_ptr",
-                                          "*const T",
-                                          item.span);
-            }
-            ty::TyRawPtr(ty::TypeAndMut { ty: _, mutbl: hir::MutMutable }) => {
-                self.check_primitive_impl(def_id,
-                                          self.tcx.lang_items.mut_ptr_impl(),
-                                          "mut_ptr",
-                                          "*mut T",
-                                          item.span);
-            }
-            ty::TyInt(ast::IntTy::I8) => {
-                self.check_primitive_impl(def_id,
-                                          self.tcx.lang_items.i8_impl(),
-                                          "i8",
-                                          "i8",
-                                          item.span);
-            }
-            ty::TyInt(ast::IntTy::I16) => {
-                self.check_primitive_impl(def_id,
-                                          self.tcx.lang_items.i16_impl(),
-                                          "i16",
-                                          "i16",
-                                          item.span);
-            }
-            ty::TyInt(ast::IntTy::I32) => {
-                self.check_primitive_impl(def_id,
-                                          self.tcx.lang_items.i32_impl(),
-                                          "i32",
-                                          "i32",
-                                          item.span);
-            }
-            ty::TyInt(ast::IntTy::I64) => {
-                self.check_primitive_impl(def_id,
-                                          self.tcx.lang_items.i64_impl(),
-                                          "i64",
-                                          "i64",
-                                          item.span);
-            }
-            ty::TyInt(ast::IntTy::I128) => {
-                self.check_primitive_impl(def_id,
-                                          self.tcx.lang_items.i128_impl(),
-                                          "i128",
-                                          "i128",
-                                          item.span);
-            }
-            ty::TyInt(ast::IntTy::Is) => {
-                self.check_primitive_impl(def_id,
-                                          self.tcx.lang_items.isize_impl(),
-                                          "isize",
-                                          "isize",
-                                          item.span);
-            }
-            ty::TyUint(ast::UintTy::U8) => {
-                self.check_primitive_impl(def_id,
-                                          self.tcx.lang_items.u8_impl(),
-                                          "u8",
-                                          "u8",
-                                          item.span);
-            }
-            ty::TyUint(ast::UintTy::U16) => {
-                self.check_primitive_impl(def_id,
-                                          self.tcx.lang_items.u16_impl(),
-                                          "u16",
-                                          "u16",
-                                          item.span);
-            }
-            ty::TyUint(ast::UintTy::U32) => {
-                self.check_primitive_impl(def_id,
-                                          self.tcx.lang_items.u32_impl(),
-                                          "u32",
-                                          "u32",
-                                          item.span);
-            }
-            ty::TyUint(ast::UintTy::U64) => {
-                self.check_primitive_impl(def_id,
-                                          self.tcx.lang_items.u64_impl(),
-                                          "u64",
-                                          "u64",
-                                          item.span);
-            }
-            ty::TyUint(ast::UintTy::U128) => {
-                self.check_primitive_impl(def_id,
-                                          self.tcx.lang_items.u128_impl(),
-                                          "u128",
-                                          "u128",
-                                          item.span);
-            }
-            ty::TyUint(ast::UintTy::Us) => {
-                self.check_primitive_impl(def_id,
-                                          self.tcx.lang_items.usize_impl(),
-                                          "usize",
-                                          "usize",
-                                          item.span);
-            }
-            ty::TyFloat(ast::FloatTy::F32) => {
-                self.check_primitive_impl(def_id,
-                                          self.tcx.lang_items.f32_impl(),
-                                          "f32",
-                                          "f32",
-                                          item.span);
-            }
-            ty::TyFloat(ast::FloatTy::F64) => {
-                self.check_primitive_impl(def_id,
-                                          self.tcx.lang_items.f64_impl(),
-                                          "f64",
-                                          "f64",
-                                          item.span);
-            }
-            ty::TyError => {
-                return;
-            }
-            _ => {
-                struct_span_err!(self.tcx.sess,
-                                 ty.span,
-                                 E0118,
-                                 "no base type found for inherent implementation")
-                    .span_label(ty.span, &format!("impl requires a base type"))
-                    .note(&format!("either implement a trait on it or create a newtype \
-                                    to wrap it instead"))
-                    .emit();
-                return;
-            }
-        }
-    }
-
-    fn visit_trait_item(&mut self, _trait_item: &hir::TraitItem) {
-    }
-
-    fn visit_impl_item(&mut self, _impl_item: &hir::ImplItem) {
-    }
-}
-
-impl<'a, 'tcx> InherentCollect<'a, 'tcx> {
-    fn check_def_id(&self, item: &hir::Item, def_id: DefId) {
-        if def_id.is_local() {
-            // Add the implementation to the mapping from implementation to base
-            // type def ID, if there is a base type for this implementation and
-            // the implementation does not have any associated traits.
-            let impl_def_id = self.tcx.hir.local_def_id(item.id);
-
-            // Subtle: it'd be better to collect these into a local map
-            // and then write the vector only once all items are known,
-            // but that leads to degenerate dep-graphs. The problem is
-            // that the write of that big vector winds up having reads
-            // from *all* impls in the krate, since we've lost the
-            // precision basically.  This would be ok in the firewall
-            // model so once we've made progess towards that we can modify
-            // the strategy here. In the meantime, using `push` is ok
-            // because we are doing this as a pre-pass before anyone
-            // actually reads from `inherent_impls` -- and we know this is
-            // true beacuse we hold the refcell lock.
-            self.tcx.maps.inherent_impls.borrow_mut().push(def_id, impl_def_id);
-        } else {
-            struct_span_err!(self.tcx.sess,
-                             item.span,
-                             E0116,
-                             "cannot define inherent `impl` for a type outside of the crate \
-                              where the type is defined")
-                .span_label(item.span,
-                            &format!("impl for type defined outside of crate."))
-                .note("define and implement a trait or new type instead")
-                .emit();
-        }
-    }
-
-    fn check_primitive_impl(&self,
-                            impl_def_id: DefId,
-                            lang_def_id: Option<DefId>,
-                            lang: &str,
-                            ty: &str,
-                            span: Span) {
-        match lang_def_id {
-            Some(lang_def_id) if lang_def_id == impl_def_id => {
-                // OK
-            }
-            _ => {
-                struct_span_err!(self.tcx.sess,
-                                 span,
-                                 E0390,
-                                 "only a single inherent implementation marked with `#[lang = \
-                                  \"{}\"]` is allowed for the `{}` primitive",
-                                 lang,
-                                 ty)
-                    .span_help(span, "consider using a trait to implement these methods")
-                    .emit();
-            }
-        }
-    }
-}
-
-struct InherentOverlapChecker<'a, 'tcx: 'a> {
-    tcx: TyCtxt<'a, 'tcx, 'tcx>
-}
-
-impl<'a, 'tcx> InherentOverlapChecker<'a, 'tcx> {
-    fn check_for_common_items_in_impls(&self, impl1: DefId, impl2: DefId) {
-        #[derive(Copy, Clone, PartialEq)]
-        enum Namespace {
-            Type,
-            Value,
-        }
-
-        let name_and_namespace = |def_id| {
-            let item = self.tcx.associated_item(def_id);
-            (item.name, match item.kind {
-                ty::AssociatedKind::Type => Namespace::Type,
-                ty::AssociatedKind::Const |
-                ty::AssociatedKind::Method => Namespace::Value,
-            })
-        };
-
-        let impl_items1 = self.tcx.associated_item_def_ids(impl1);
-        let impl_items2 = self.tcx.associated_item_def_ids(impl2);
-
-        for &item1 in &impl_items1[..] {
-            let (name, namespace) = name_and_namespace(item1);
-
-            for &item2 in &impl_items2[..] {
-                if (name, namespace) == name_and_namespace(item2) {
-                    let msg = format!("duplicate definitions with name `{}`", name);
-                    let node_id = self.tcx.hir.as_local_node_id(item1).unwrap();
-                    self.tcx.sess.add_lint(lint::builtin::OVERLAPPING_INHERENT_IMPLS,
-                                           node_id,
-                                           self.tcx.span_of_impl(item1).unwrap(),
-                                           msg);
-                }
-            }
-        }
-    }
-
-    fn check_for_overlapping_inherent_impls(&self, ty_def_id: DefId) {
-        let _task = self.tcx.dep_graph.in_task(DepNode::CoherenceOverlapInherentCheck(ty_def_id));
-
-        let inherent_impls = self.tcx.maps.inherent_impls.borrow();
-        let impls = match inherent_impls.get(&ty_def_id) {
-            Some(impls) => impls,
-            None => return,
-        };
-
-        for (i, &impl1_def_id) in impls.iter().enumerate() {
-            for &impl2_def_id in &impls[(i + 1)..] {
-                self.tcx.infer_ctxt((), Reveal::UserFacing).enter(|infcx| {
-                    if traits::overlapping_impls(&infcx, impl1_def_id, impl2_def_id).is_some() {
-                        self.check_for_common_items_in_impls(impl1_def_id, impl2_def_id)
-                    }
-                });
-            }
-        }
-    }
-}
-
-impl<'a, 'tcx, 'v> ItemLikeVisitor<'v> for InherentOverlapChecker<'a, 'tcx> {
-    fn visit_item(&mut self, item: &'v hir::Item) {
-        match item.node {
-            hir::ItemEnum(..) |
-            hir::ItemStruct(..) |
-            hir::ItemTrait(..) |
-            hir::ItemUnion(..) => {
-                let type_def_id = self.tcx.hir.local_def_id(item.id);
-                self.check_for_overlapping_inherent_impls(type_def_id);
-            }
-            _ => {}
-        }
-    }
-
-    fn visit_trait_item(&mut self, _trait_item: &hir::TraitItem) {
-    }
-
-    fn visit_impl_item(&mut self, _impl_item: &hir::ImplItem) {
-    }
-}
-
-pub fn check<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
-    tcx.visit_all_item_likes_in_krate(DepNode::CoherenceCheckImpl,
-                                      &mut InherentCollect { tcx });
-    tcx.visit_all_item_likes_in_krate(DepNode::CoherenceOverlapCheckSpecial,
-                                      &mut InherentOverlapChecker { tcx });
-}
diff --git a/src/librustc_typeck/coherence/inherent_impls.rs b/src/librustc_typeck/coherence/inherent_impls.rs
new file mode 100644 (file)
index 0000000..3a39df5
--- /dev/null
@@ -0,0 +1,325 @@
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! The code in this module gathers up all of the inherent impls in
+//! the current crate and organizes them in a map. It winds up
+//! touching the whole crate and thus must be recomputed completely
+//! for any change, but it is very cheap to compute. In practice, most
+//! code in the compiler never *directly* requests this map. Instead,
+//! it requests the inherent impls specific to some type (via
+//! `ty::queries::inherent_impls::get(def_id)`). That value, however,
+//! is computed by selecting an idea from this table.
+
+use rustc::dep_graph::DepNode;
+use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
+use rustc::hir;
+use rustc::hir::itemlikevisit::ItemLikeVisitor;
+use rustc::ty::{self, CrateInherentImpls, TyCtxt};
+use rustc::util::nodemap::DefIdMap;
+
+use std::rc::Rc;
+use syntax::ast;
+use syntax_pos::{DUMMY_SP, Span};
+
+/// On-demand query: yields a map containing all types mapped to their inherent impls.
+pub fn crate_inherent_impls<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+                                      crate_num: CrateNum)
+                                      -> CrateInherentImpls {
+    assert_eq!(crate_num, LOCAL_CRATE);
+
+    let krate = tcx.hir.krate();
+    let mut collect = InherentCollect {
+        tcx,
+        impls_map: CrateInherentImpls {
+            inherent_impls: DefIdMap()
+        }
+    };
+    krate.visit_all_item_likes(&mut collect);
+    collect.impls_map
+}
+
+/// On-demand query: yields a vector of the inherent impls for a specific type.
+pub fn inherent_impls<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+                                ty_def_id: DefId)
+                                -> Rc<Vec<DefId>> {
+    assert!(ty_def_id.is_local());
+
+    // NB. Until we adopt the red-green dep-tracking algorithm (see
+    // [the plan] for details on that), we do some hackery here to get
+    // the dependencies correct.  Basically, we use a `with_ignore` to
+    // read the result we want. If we didn't have the `with_ignore`,
+    // we would wind up with a dependency on the entire crate, which
+    // we don't want. Then we go and add dependencies on all the impls
+    // in the result (which is what we wanted).
+    //
+    // The result is a graph with an edge from `Hir(I)` for every impl
+    // `I` defined on some type `T` to `CoherentInherentImpls(T)`,
+    // thus ensuring that if any of those impls change, the set of
+    // inherent impls is considered dirty.
+    //
+    // [the plan]: https://github.com/rust-lang/rust-roadmap/issues/4
+
+    let result = tcx.dep_graph.with_ignore(|| {
+        let crate_map = ty::queries::crate_inherent_impls::get(tcx, DUMMY_SP, ty_def_id.krate);
+        match crate_map.inherent_impls.get(&ty_def_id) {
+            Some(v) => v.clone(),
+            None => Rc::new(vec![]),
+        }
+    });
+
+    for &impl_def_id in &result[..] {
+        tcx.dep_graph.read(DepNode::Hir(impl_def_id));
+    }
+
+    result
+}
+
+struct InherentCollect<'a, 'tcx: 'a> {
+    tcx: TyCtxt<'a, 'tcx, 'tcx>,
+    impls_map: CrateInherentImpls,
+}
+
+impl<'a, 'tcx, 'v> ItemLikeVisitor<'v> for InherentCollect<'a, 'tcx> {
+    fn visit_item(&mut self, item: &hir::Item) {
+        let (unsafety, ty) = match item.node {
+            hir::ItemImpl(unsafety, .., None, ref ty, _) => (unsafety, ty),
+            _ => return
+        };
+
+        match unsafety {
+            hir::Unsafety::Normal => {
+                // OK
+            }
+            hir::Unsafety::Unsafe => {
+                span_err!(self.tcx.sess,
+                          item.span,
+                          E0197,
+                          "inherent impls cannot be declared as unsafe");
+            }
+        }
+
+        let def_id = self.tcx.hir.local_def_id(item.id);
+        let self_ty = self.tcx.item_type(def_id);
+        match self_ty.sty {
+            ty::TyAdt(def, _) => {
+                self.check_def_id(item, def.did);
+            }
+            ty::TyDynamic(ref data, ..) if data.principal().is_some() => {
+                self.check_def_id(item, data.principal().unwrap().def_id());
+            }
+            ty::TyChar => {
+                self.check_primitive_impl(def_id,
+                                          self.tcx.lang_items.char_impl(),
+                                          "char",
+                                          "char",
+                                          item.span);
+            }
+            ty::TyStr => {
+                self.check_primitive_impl(def_id,
+                                          self.tcx.lang_items.str_impl(),
+                                          "str",
+                                          "str",
+                                          item.span);
+            }
+            ty::TySlice(_) => {
+                self.check_primitive_impl(def_id,
+                                          self.tcx.lang_items.slice_impl(),
+                                          "slice",
+                                          "[T]",
+                                          item.span);
+            }
+            ty::TyRawPtr(ty::TypeAndMut { ty: _, mutbl: hir::MutImmutable }) => {
+                self.check_primitive_impl(def_id,
+                                          self.tcx.lang_items.const_ptr_impl(),
+                                          "const_ptr",
+                                          "*const T",
+                                          item.span);
+            }
+            ty::TyRawPtr(ty::TypeAndMut { ty: _, mutbl: hir::MutMutable }) => {
+                self.check_primitive_impl(def_id,
+                                          self.tcx.lang_items.mut_ptr_impl(),
+                                          "mut_ptr",
+                                          "*mut T",
+                                          item.span);
+            }
+            ty::TyInt(ast::IntTy::I8) => {
+                self.check_primitive_impl(def_id,
+                                          self.tcx.lang_items.i8_impl(),
+                                          "i8",
+                                          "i8",
+                                          item.span);
+            }
+            ty::TyInt(ast::IntTy::I16) => {
+                self.check_primitive_impl(def_id,
+                                          self.tcx.lang_items.i16_impl(),
+                                          "i16",
+                                          "i16",
+                                          item.span);
+            }
+            ty::TyInt(ast::IntTy::I32) => {
+                self.check_primitive_impl(def_id,
+                                          self.tcx.lang_items.i32_impl(),
+                                          "i32",
+                                          "i32",
+                                          item.span);
+            }
+            ty::TyInt(ast::IntTy::I64) => {
+                self.check_primitive_impl(def_id,
+                                          self.tcx.lang_items.i64_impl(),
+                                          "i64",
+                                          "i64",
+                                          item.span);
+            }
+            ty::TyInt(ast::IntTy::I128) => {
+                self.check_primitive_impl(def_id,
+                                          self.tcx.lang_items.i128_impl(),
+                                          "i128",
+                                          "i128",
+                                          item.span);
+            }
+            ty::TyInt(ast::IntTy::Is) => {
+                self.check_primitive_impl(def_id,
+                                          self.tcx.lang_items.isize_impl(),
+                                          "isize",
+                                          "isize",
+                                          item.span);
+            }
+            ty::TyUint(ast::UintTy::U8) => {
+                self.check_primitive_impl(def_id,
+                                          self.tcx.lang_items.u8_impl(),
+                                          "u8",
+                                          "u8",
+                                          item.span);
+            }
+            ty::TyUint(ast::UintTy::U16) => {
+                self.check_primitive_impl(def_id,
+                                          self.tcx.lang_items.u16_impl(),
+                                          "u16",
+                                          "u16",
+                                          item.span);
+            }
+            ty::TyUint(ast::UintTy::U32) => {
+                self.check_primitive_impl(def_id,
+                                          self.tcx.lang_items.u32_impl(),
+                                          "u32",
+                                          "u32",
+                                          item.span);
+            }
+            ty::TyUint(ast::UintTy::U64) => {
+                self.check_primitive_impl(def_id,
+                                          self.tcx.lang_items.u64_impl(),
+                                          "u64",
+                                          "u64",
+                                          item.span);
+            }
+            ty::TyUint(ast::UintTy::U128) => {
+                self.check_primitive_impl(def_id,
+                                          self.tcx.lang_items.u128_impl(),
+                                          "u128",
+                                          "u128",
+                                          item.span);
+            }
+            ty::TyUint(ast::UintTy::Us) => {
+                self.check_primitive_impl(def_id,
+                                          self.tcx.lang_items.usize_impl(),
+                                          "usize",
+                                          "usize",
+                                          item.span);
+            }
+            ty::TyFloat(ast::FloatTy::F32) => {
+                self.check_primitive_impl(def_id,
+                                          self.tcx.lang_items.f32_impl(),
+                                          "f32",
+                                          "f32",
+                                          item.span);
+            }
+            ty::TyFloat(ast::FloatTy::F64) => {
+                self.check_primitive_impl(def_id,
+                                          self.tcx.lang_items.f64_impl(),
+                                          "f64",
+                                          "f64",
+                                          item.span);
+            }
+            ty::TyError => {
+                return;
+            }
+            _ => {
+                struct_span_err!(self.tcx.sess,
+                                 ty.span,
+                                 E0118,
+                                 "no base type found for inherent implementation")
+                    .span_label(ty.span, &format!("impl requires a base type"))
+                    .note(&format!("either implement a trait on it or create a newtype \
+                                    to wrap it instead"))
+                    .emit();
+                return;
+            }
+        }
+    }
+
+    fn visit_trait_item(&mut self, _trait_item: &hir::TraitItem) {
+    }
+
+    fn visit_impl_item(&mut self, _impl_item: &hir::ImplItem) {
+    }
+}
+
+impl<'a, 'tcx> InherentCollect<'a, 'tcx> {
+    fn check_def_id(&mut self, item: &hir::Item, def_id: DefId) {
+        if def_id.is_local() {
+            // Add the implementation to the mapping from implementation to base
+            // type def ID, if there is a base type for this implementation and
+            // the implementation does not have any associated traits.
+            let impl_def_id = self.tcx.hir.local_def_id(item.id);
+            let mut rc_vec = self.impls_map.inherent_impls
+                                           .entry(def_id)
+                                           .or_insert_with(|| Rc::new(vec![]));
+
+            // At this point, there should not be any clones of the
+            // `Rc`, so we can still safely push into it in place:
+            Rc::get_mut(&mut rc_vec).unwrap().push(impl_def_id);
+        } else {
+            struct_span_err!(self.tcx.sess,
+                             item.span,
+                             E0116,
+                             "cannot define inherent `impl` for a type outside of the crate \
+                              where the type is defined")
+                .span_label(item.span,
+                            &format!("impl for type defined outside of crate."))
+                .note("define and implement a trait or new type instead")
+                .emit();
+        }
+    }
+
+    fn check_primitive_impl(&self,
+                            impl_def_id: DefId,
+                            lang_def_id: Option<DefId>,
+                            lang: &str,
+                            ty: &str,
+                            span: Span) {
+        match lang_def_id {
+            Some(lang_def_id) if lang_def_id == impl_def_id => {
+                // OK
+            }
+            _ => {
+                struct_span_err!(self.tcx.sess,
+                                 span,
+                                 E0390,
+                                 "only a single inherent implementation marked with `#[lang = \
+                                  \"{}\"]` is allowed for the `{}` primitive",
+                                 lang,
+                                 ty)
+                    .span_help(span, "consider using a trait to implement these methods")
+                    .emit();
+            }
+        }
+    }
+}
+
diff --git a/src/librustc_typeck/coherence/inherent_impls_overlap.rs b/src/librustc_typeck/coherence/inherent_impls_overlap.rs
new file mode 100644 (file)
index 0000000..4b36072
--- /dev/null
@@ -0,0 +1,102 @@
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
+use rustc::hir;
+use rustc::hir::itemlikevisit::ItemLikeVisitor;
+use rustc::lint;
+use rustc::traits::{self, Reveal};
+use rustc::ty::{self, TyCtxt};
+
+use syntax_pos::DUMMY_SP;
+
+pub fn crate_inherent_impls_overlap_check<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+                                                    crate_num: CrateNum) {
+    assert_eq!(crate_num, LOCAL_CRATE);
+    let krate = tcx.hir.krate();
+    krate.visit_all_item_likes(&mut InherentOverlapChecker { tcx });
+}
+
+struct InherentOverlapChecker<'a, 'tcx: 'a> {
+    tcx: TyCtxt<'a, 'tcx, 'tcx>
+}
+
+impl<'a, 'tcx> InherentOverlapChecker<'a, 'tcx> {
+    fn check_for_common_items_in_impls(&self, impl1: DefId, impl2: DefId) {
+        #[derive(Copy, Clone, PartialEq)]
+        enum Namespace {
+            Type,
+            Value,
+        }
+
+        let name_and_namespace = |def_id| {
+            let item = self.tcx.associated_item(def_id);
+            (item.name, match item.kind {
+                ty::AssociatedKind::Type => Namespace::Type,
+                ty::AssociatedKind::Const |
+                ty::AssociatedKind::Method => Namespace::Value,
+            })
+        };
+
+        let impl_items1 = self.tcx.associated_item_def_ids(impl1);
+        let impl_items2 = self.tcx.associated_item_def_ids(impl2);
+
+        for &item1 in &impl_items1[..] {
+            let (name, namespace) = name_and_namespace(item1);
+
+            for &item2 in &impl_items2[..] {
+                if (name, namespace) == name_and_namespace(item2) {
+                    let msg = format!("duplicate definitions with name `{}`", name);
+                    let node_id = self.tcx.hir.as_local_node_id(item1).unwrap();
+                    self.tcx.sess.add_lint(lint::builtin::OVERLAPPING_INHERENT_IMPLS,
+                                           node_id,
+                                           self.tcx.span_of_impl(item1).unwrap(),
+                                           msg);
+                }
+            }
+        }
+    }
+
+    fn check_for_overlapping_inherent_impls(&self, ty_def_id: DefId) {
+        let impls = ty::queries::inherent_impls::get(self.tcx, DUMMY_SP, ty_def_id);
+
+        for (i, &impl1_def_id) in impls.iter().enumerate() {
+            for &impl2_def_id in &impls[(i + 1)..] {
+                self.tcx.infer_ctxt((), Reveal::UserFacing).enter(|infcx| {
+                    if traits::overlapping_impls(&infcx, impl1_def_id, impl2_def_id).is_some() {
+                        self.check_for_common_items_in_impls(impl1_def_id, impl2_def_id)
+                    }
+                });
+            }
+        }
+    }
+}
+
+impl<'a, 'tcx, 'v> ItemLikeVisitor<'v> for InherentOverlapChecker<'a, 'tcx> {
+    fn visit_item(&mut self, item: &'v hir::Item) {
+        match item.node {
+            hir::ItemEnum(..) |
+            hir::ItemStruct(..) |
+            hir::ItemTrait(..) |
+            hir::ItemUnion(..) => {
+                let type_def_id = self.tcx.hir.local_def_id(item.id);
+                self.check_for_overlapping_inherent_impls(type_def_id);
+            }
+            _ => {}
+        }
+    }
+
+    fn visit_trait_item(&mut self, _trait_item: &hir::TraitItem) {
+    }
+
+    fn visit_impl_item(&mut self, _impl_item: &hir::ImplItem) {
+    }
+}
+
index 9ecf42daeaae52021fd7f60f0a4dfd91de90f3d7..b3a7b612dd5b8532af75ac7aee58742cbfb29953 100644 (file)
@@ -24,7 +24,8 @@
 use syntax_pos::DUMMY_SP;
 
 mod builtin;
-mod inherent;
+mod inherent_impls;
+mod inherent_impls_overlap;
 mod orphan;
 mod overlap;
 mod unsafety;
@@ -102,9 +103,16 @@ fn enforce_trait_manually_implementable(tcx: TyCtxt, impl_def_id: DefId, trait_d
 }
 
 pub fn provide(providers: &mut Providers) {
+    use self::builtin::coerce_unsized_info;
+    use self::inherent_impls::{crate_inherent_impls, inherent_impls};
+    use self::inherent_impls_overlap::crate_inherent_impls_overlap_check;
+
     *providers = Providers {
         coherent_trait,
-        coherent_inherent_impls,
+        crate_inherent_impls,
+        inherent_impls,
+        crate_inherent_impls_overlap_check,
+        coerce_unsized_info,
         ..*providers
     };
 }
@@ -123,10 +131,6 @@ fn coherent_trait<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
     builtin::check_trait(tcx, def_id);
 }
 
-fn coherent_inherent_impls<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, _: CrateNum) {
-    inherent::check(tcx);
-}
-
 pub fn check_coherence<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
     let _task = tcx.dep_graph.in_task(DepNode::Coherence);
     for &trait_def_id in tcx.hir.krate().trait_impls.keys() {
@@ -137,5 +141,7 @@ pub fn check_coherence<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
     orphan::check(tcx);
     overlap::check_default_impls(tcx);
 
-    ty::queries::coherent_inherent_impls::get(tcx, DUMMY_SP, LOCAL_CRATE);
+    // these queries are executed for side-effects (error reporting):
+    ty::queries::crate_inherent_impls::get(tcx, DUMMY_SP, LOCAL_CRATE);
+    ty::queries::crate_inherent_impls_overlap_check::get(tcx, DUMMY_SP, LOCAL_CRATE);
 }
index 93c0bd6d6d836fb646f64c781d02d05949126e29..1c479ce1d0157beaee792e163f9caff0f8832316 100644 (file)
@@ -11,11 +11,13 @@ crate-type = ["dylib"]
 
 [dependencies]
 arena = { path = "../libarena" }
+env_logger = { version = "0.4", default-features = false }
+log = "0.3"
 rustc = { path = "../librustc" }
 rustc_back = { path = "../librustc_back" }
 rustc_const_eval = { path = "../librustc_const_eval" }
-rustc_driver = { path = "../librustc_driver" }
 rustc_data_structures = { path = "../librustc_data_structures" }
+rustc_driver = { path = "../librustc_driver" }
 rustc_errors = { path = "../librustc_errors" }
 rustc_lint = { path = "../librustc_lint" }
 rustc_metadata = { path = "../librustc_metadata" }
@@ -24,7 +26,6 @@ rustc_trans = { path = "../librustc_trans" }
 serialize = { path = "../libserialize" }
 syntax = { path = "../libsyntax" }
 syntax_pos = { path = "../libsyntax_pos" }
-log = { path = "../liblog" }
 
 [build-dependencies]
 build_helper = { path = "../build_helper" }
index c4476483186c7a35ed1de6bb20e9de0e52ff179f..cc30fdf56fc346c08b411d8b027a6400cfbb33f1 100644 (file)
@@ -232,14 +232,12 @@ fn build_type_alias(cx: &DocContext, did: DefId) -> clean::Typedef {
 
 pub fn build_impls(cx: &DocContext, did: DefId) -> Vec<clean::Item> {
     let tcx = cx.tcx;
-    tcx.populate_inherent_implementations_for_type_if_necessary(DUMMY_SP, did);
     let mut impls = Vec::new();
 
-    if let Some(i) = tcx.maps.inherent_impls.borrow().get(&did) {
-        for &did in i.iter() {
-            build_impl(cx, did, &mut impls);
-        }
+    for &did in ty::queries::inherent_impls::get(tcx, DUMMY_SP, did).iter() {
+        build_impl(cx, did, &mut impls);
     }
+
     // If this is the first time we've inlined something from another crate, then
     // we inline *all* impls from all the crates into this crate. Note that there's
     // currently no way for us to filter this based on type, and we likely need
index 0a9db2c26464ce6370b4c4c573742e469bf55959..a47d5f9937a02c337745038043e5d09735b9c2b2 100644 (file)
@@ -13,7 +13,7 @@
 use rustc::dep_graph::DepGraph;
 use rustc::session::{self, config};
 use rustc::hir::def_id::DefId;
-use rustc::hir::def::{Def, ExportMap};
+use rustc::hir::def::Def;
 use rustc::middle::privacy::AccessLevels;
 use rustc::ty::{self, TyCtxt, GlobalArenas};
 use rustc::hir::map as hir_map;
@@ -64,7 +64,6 @@ pub struct DocContext<'a, 'tcx: 'a> {
     pub ty_substs: RefCell<FxHashMap<Def, clean::Type>>,
     /// Table node id of lifetime parameter definition -> substituted lifetime
     pub lt_substs: RefCell<FxHashMap<ast::NodeId, clean::Lifetime>>,
-    pub export_map: ExportMap,
 }
 
 impl<'a, 'tcx> DocContext<'a, 'tcx> {
@@ -180,13 +179,13 @@ pub fn run_core(search_paths: SearchPaths,
             sess.fatal("Compilation failed, aborting rustdoc");
         }
 
-        let ty::CrateAnalysis { access_levels, export_map, .. } = analysis;
+        let ty::CrateAnalysis { access_levels, .. } = analysis;
 
         // Convert from a NodeId set to a DefId set since we don't always have easy access
         // to the map from defid -> nodeid
         let access_levels = AccessLevels {
-            map: access_levels.map.into_iter()
-                                  .map(|(k, v)| (tcx.hir.local_def_id(k), v))
+            map: access_levels.map.iter()
+                                  .map(|(&k, &v)| (tcx.hir.local_def_id(k), v))
                                   .collect()
         };
 
@@ -198,7 +197,6 @@ pub fn run_core(search_paths: SearchPaths,
             renderinfo: Default::default(),
             ty_substs: Default::default(),
             lt_substs: Default::default(),
-            export_map: export_map,
         };
         debug!("crate: {:?}", tcx.hir.krate());
 
index 5c94032c6b9cf74df6fd754e13e4bd4b4079892a..612e765a499b7b93f3e5483c581e85e7d779a291 100644 (file)
@@ -2611,7 +2611,7 @@ fn render_attribute(attr: &ast::MetaItem) -> Option<String> {
     if attr.is_word() {
         Some(format!("{}", name))
     } else if let Some(v) = attr.value_str() {
-        Some(format!("{} = {:?}", name, &v.as_str()[..]))
+        Some(format!("{} = {:?}", name, v.as_str()))
     } else if let Some(values) = attr.meta_item_list() {
         let display: Vec<_> = values.iter().filter_map(|attr| {
             attr.meta_item().and_then(|mi| render_attribute(mi))
@@ -2642,7 +2642,7 @@ fn render_attributes(w: &mut fmt::Formatter, it: &clean::Item) -> fmt::Result {
 
     for attr in &it.attrs.other_attrs {
         let name = attr.name().unwrap();
-        if !ATTRIBUTE_WHITELIST.contains(&&name.as_str()[..]) {
+        if !ATTRIBUTE_WHITELIST.contains(&&*name.as_str()) {
             continue;
         }
         if let Some(s) = render_attribute(&attr.meta().unwrap()) {
index 84f69cd35045c91fd833f4dce04235ecd14048d6..8dd03f6edc4d57cde00ef209a2a30d2ad73b77f5 100644 (file)
@@ -30,6 +30,7 @@
 
 extern crate arena;
 extern crate getopts;
+extern crate env_logger;
 extern crate libc;
 extern crate rustc;
 extern crate rustc_const_eval;
@@ -99,6 +100,7 @@ struct Output {
 
 pub fn main() {
     const STACK_SIZE: usize = 32_000_000; // 32MB
+    env_logger::init().unwrap();
     let res = std::thread::Builder::new().stack_size(STACK_SIZE).spawn(move || {
         let s = env::args().collect::<Vec<_>>();
         main_args(&s)
index 4a909f8e2a9728c8117919d6cedc84104246e737..2793307697852b8aa1b36a0e971e2bc115f549e4 100644 (file)
@@ -198,7 +198,7 @@ pub fn visit_mod_contents(&mut self, span: Span, attrs: hir::HirVec<ast::Attribu
             self.visit_item(item, None, &mut om);
         }
         self.inside_public_path = orig_inside_public_path;
-        if let Some(exports) = self.cx.export_map.get(&id) {
+        if let Some(exports) = self.cx.tcx.export_map.get(&id) {
             for export in exports {
                 if let Def::Macro(def_id, ..) = export.def {
                     if def_id.krate == LOCAL_CRATE {
index 850885a8c0f3aaf76799ab398ce79bf132f8e00f..5b628d51d1513986eb31cbc17d2ff19756553849 100644 (file)
@@ -1290,28 +1290,42 @@ pub trait BufRead: Read {
     /// If an I/O error is encountered then all bytes read so far will be
     /// present in `buf` and its length will have been adjusted appropriately.
     ///
-    /// # Examples
-    ///
-    /// A locked standard input implements `BufRead`. In this example, we'll
-    /// read from standard input until we see an `a` byte.
-    ///
     /// [`fill_buf`]: #tymethod.fill_buf
     /// [`ErrorKind::Interrupted`]: enum.ErrorKind.html#variant.Interrupted
     ///
-    /// ```
-    /// use std::io;
-    /// use std::io::prelude::*;
+    /// # Examples
     ///
-    /// fn foo() -> io::Result<()> {
-    /// let stdin = io::stdin();
-    /// let mut stdin = stdin.lock();
-    /// let mut buffer = Vec::new();
+    /// [`std::io::Cursor`][`Cursor`] is a type that implements `BufRead`. In
+    /// this example, we use [`Cursor`] to read all the bytes in a byte slice
+    /// in hyphen delimited segments:
     ///
-    /// stdin.read_until(b'a', &mut buffer)?;
+    /// [`Cursor`]: struct.Cursor.html
     ///
-    /// println!("{:?}", buffer);
-    /// # Ok(())
-    /// # }
+    /// ```
+    /// use std::io::{self, BufRead};
+    ///
+    /// let mut cursor = io::Cursor::new(b"lorem-ipsum");
+    /// let mut buf = vec![];
+    ///
+    /// // cursor is at 'l'
+    /// let num_bytes = cursor.read_until(b'-', &mut buf)
+    ///     .expect("reading from cursor won't fail");
+    /// assert_eq!(num_bytes, 6);
+    /// assert_eq!(buf, b"lorem-");
+    /// buf.clear();
+    ///
+    /// // cursor is at 'i'
+    /// let num_bytes = cursor.read_until(b'-', &mut buf)
+    ///     .expect("reading from cursor won't fail");
+    /// assert_eq!(num_bytes, 5);
+    /// assert_eq!(buf, b"ipsum");
+    /// buf.clear();
+    ///
+    /// // cursor is at EOF
+    /// let num_bytes = cursor.read_until(b'-', &mut buf)
+    ///     .expect("reading from cursor won't fail");
+    /// assert_eq!(num_bytes, 0);
+    /// assert_eq!(buf, b"");
     /// ```
     #[stable(feature = "rust1", since = "1.0.0")]
     fn read_until(&mut self, byte: u8, buf: &mut Vec<u8>) -> Result<usize> {
@@ -1337,28 +1351,36 @@ fn read_until(&mut self, byte: u8, buf: &mut Vec<u8>) -> Result<usize> {
     ///
     /// # Examples
     ///
-    /// A locked standard input implements `BufRead`. In this example, we'll
-    /// read all of the lines from standard input. If we were to do this in
-    /// an actual project, the [`lines`] method would be easier, of
-    /// course.
+    /// [`std::io::Cursor`][`Cursor`] is a type that implements `BufRead`. In
+    /// this example, we use [`Cursor`] to read all the lines in a byte slice:
     ///
-    /// [`lines`]: #method.lines
-    /// [`read_until`]: #method.read_until
+    /// [`Cursor`]: struct.Cursor.html
     ///
     /// ```
-    /// use std::io;
-    /// use std::io::prelude::*;
-    ///
-    /// let stdin = io::stdin();
-    /// let mut stdin = stdin.lock();
-    /// let mut buffer = String::new();
-    ///
-    /// while stdin.read_line(&mut buffer).unwrap() > 0 {
-    ///     // work with buffer
-    ///     println!("{:?}", buffer);
-    ///
-    ///     buffer.clear();
-    /// }
+    /// use std::io::{self, BufRead};
+    ///
+    /// let mut cursor = io::Cursor::new(b"foo\nbar");
+    /// let mut buf = String::new();
+    ///
+    /// // cursor is at 'f'
+    /// let num_bytes = cursor.read_line(&mut buf)
+    ///     .expect("reading from cursor won't fail");
+    /// assert_eq!(num_bytes, 4);
+    /// assert_eq!(buf, "foo\n");
+    /// buf.clear();
+    ///
+    /// // cursor is at 'b'
+    /// let num_bytes = cursor.read_line(&mut buf)
+    ///     .expect("reading from cursor won't fail");
+    /// assert_eq!(num_bytes, 3);
+    /// assert_eq!(buf, "bar");
+    /// buf.clear();
+    ///
+    /// // cursor is at EOF
+    /// let num_bytes = cursor.read_line(&mut buf)
+    ///     .expect("reading from cursor won't fail");
+    /// assert_eq!(num_bytes, 0);
+    /// assert_eq!(buf, "");
     /// ```
     #[stable(feature = "rust1", since = "1.0.0")]
     fn read_line(&mut self, buf: &mut String) -> Result<usize> {
@@ -1378,24 +1400,28 @@ fn read_line(&mut self, buf: &mut String) -> Result<usize> {
     /// This function will yield errors whenever [`read_until`] would have
     /// also yielded an error.
     ///
-    /// # Examples
-    ///
-    /// A locked standard input implements `BufRead`. In this example, we'll
-    /// read some input from standard input, splitting on commas.
-    ///
     /// [`io::Result`]: type.Result.html
     /// [`Vec<u8>`]: ../vec/struct.Vec.html
     /// [`read_until`]: #method.read_until
     ///
+    /// # Examples
+    ///
+    /// [`std::io::Cursor`][`Cursor`] is a type that implements `BufRead`. In
+    /// this example, we use [`Cursor`] to iterate over all hyphen delimited
+    /// segments in a byte slice
+    ///
+    /// [`Cursor`]: struct.Cursor.html
+    ///
     /// ```
-    /// use std::io;
-    /// use std::io::prelude::*;
+    /// use std::io::{self, BufRead};
     ///
-    /// let stdin = io::stdin();
+    /// let cursor = io::Cursor::new(b"lorem-ipsum-dolor");
     ///
-    /// for content in stdin.lock().split(b',') {
-    ///     println!("{:?}", content.unwrap());
-    /// }
+    /// let mut split_iter = cursor.split(b'-').map(|l| l.unwrap());
+    /// assert_eq!(split_iter.next(), Some(b"lorem".to_vec()));
+    /// assert_eq!(split_iter.next(), Some(b"ipsum".to_vec()));
+    /// assert_eq!(split_iter.next(), Some(b"dolor".to_vec()));
+    /// assert_eq!(split_iter.next(), None);
     /// ```
     #[stable(feature = "rust1", since = "1.0.0")]
     fn split(self, byte: u8) -> Split<Self> where Self: Sized {
@@ -1413,17 +1439,22 @@ fn split(self, byte: u8) -> Split<Self> where Self: Sized {
     ///
     /// # Examples
     ///
-    /// A locked standard input implements `BufRead`:
+    /// [`std::io::Cursor`][`Cursor`] is a type that implements `BufRead`. In
+    /// this example, we use [`Cursor`] to iterate over all the lines in a byte
+    /// slice.
+    ///
+    /// [`Cursor`]: struct.Cursor.html
     ///
     /// ```
-    /// use std::io;
-    /// use std::io::prelude::*;
+    /// use std::io::{self, BufRead};
     ///
-    /// let stdin = io::stdin();
+    /// let cursor = io::Cursor::new(b"lorem\nipsum\r\ndolor");
     ///
-    /// for line in stdin.lock().lines() {
-    ///     println!("{}", line.unwrap());
-    /// }
+    /// let mut lines_iter = cursor.lines().map(|l| l.unwrap());
+    /// assert_eq!(lines_iter.next(), Some(String::from("lorem")));
+    /// assert_eq!(lines_iter.next(), Some(String::from("ipsum")));
+    /// assert_eq!(lines_iter.next(), Some(String::from("dolor")));
+    /// assert_eq!(lines_iter.next(), None);
     /// ```
     ///
     /// # Errors
index 68d4ca900195c5ed8c3161eaa5d9853942fb5500..c34491941d69010a25f60a5ec6aa3ac947c36f98 100644 (file)
 
 use fmt;
 
-#[cfg(any(target_os = "android",
-          target_os = "emscripten",
+#[cfg(any(target_os = "emscripten",
           all(target_os = "linux", any(target_arch = "aarch64",
                                        target_arch = "arm",
                                        target_arch = "powerpc",
                                        target_arch = "powerpc64",
                                        target_arch = "s390x")),
+          all(target_os = "android", any(target_arch = "aarch64",
+                                         target_arch = "arm")),
           all(target_os = "fuchsia", target_arch = "aarch64")))]
 #[stable(feature = "raw_os", since = "1.1.0")] pub type c_char = u8;
-#[cfg(not(any(target_os = "android",
-              target_os = "emscripten",
+#[cfg(not(any(target_os = "emscripten",
               all(target_os = "linux", any(target_arch = "aarch64",
                                            target_arch = "arm",
                                            target_arch = "powerpc",
                                            target_arch = "powerpc64",
                                            target_arch = "s390x")),
+              all(target_os = "android", any(target_arch = "aarch64",
+                                             target_arch = "arm")),
               all(target_os = "fuchsia", target_arch = "aarch64"))))]
 #[stable(feature = "raw_os", since = "1.1.0")] pub type c_char = i8;
 #[stable(feature = "raw_os", since = "1.1.0")] pub type c_schar = i8;
index 55118829eee9f950ef900cb1a5ca62c068c63eb9..d688f2fa504517cb3fe450b15f68cc8ea7005f6e 100644 (file)
@@ -641,7 +641,7 @@ fn inner(path: &Path) -> io::Result<UnixListener> {
                 let inner = Socket::new_raw(libc::AF_UNIX, libc::SOCK_STREAM)?;
                 let (addr, len) = sockaddr_un(path)?;
 
-                cvt(libc::bind(*inner.as_inner(), &addr as *const _ as *const _, len))?;
+                cvt(libc::bind(*inner.as_inner(), &addr as *const _ as *const _, len as _))?;
                 cvt(libc::listen(*inner.as_inner(), 128))?;
 
                 Ok(UnixListener(inner))
@@ -920,7 +920,7 @@ fn inner(path: &Path) -> io::Result<UnixDatagram> {
                 let socket = UnixDatagram::unbound()?;
                 let (addr, len) = sockaddr_un(path)?;
 
-                cvt(libc::bind(*socket.0.as_inner(), &addr as *const _ as *const _, len))?;
+                cvt(libc::bind(*socket.0.as_inner(), &addr as *const _ as *const _, len as _))?;
 
                 Ok(socket)
             }
index 5f1a6c2f7465150a9132072807c3f9259d617651..e9f41009064ca43ea267861b2bfe76cc8fd64718 100644 (file)
@@ -417,12 +417,26 @@ macro_rules! t {
         }
     }
 
+    // Android with api less than 21 define sig* functions inline, so it is not
+    // available for dynamic link. Implementing sigemptyset and sigaddset allow us
+    // to support older Android version (independent of libc version).
+    // The following implementations are based on https://git.io/vSkNf
+
     #[cfg(not(target_os = "android"))]
     extern {
+        #[cfg_attr(target_os = "netbsd", link_name = "__sigemptyset14")]
+        fn sigemptyset(set: *mut libc::sigset_t) -> libc::c_int;
+
         #[cfg_attr(target_os = "netbsd", link_name = "__sigaddset14")]
         fn sigaddset(set: *mut libc::sigset_t, signum: libc::c_int) -> libc::c_int;
     }
 
+    #[cfg(target_os = "android")]
+    unsafe fn sigemptyset(set: *mut libc::sigset_t) -> libc::c_int {
+        libc::memset(set as *mut _, 0, mem::size_of::<libc::sigset_t>());
+        return 0;
+    }
+
     #[cfg(target_os = "android")]
     unsafe fn sigaddset(set: *mut libc::sigset_t, signum: libc::c_int) -> libc::c_int {
         use slice;
@@ -450,7 +464,7 @@ fn test_process_mask() {
 
             let mut set: libc::sigset_t = mem::uninitialized();
             let mut old_set: libc::sigset_t = mem::uninitialized();
-            t!(cvt(libc::sigemptyset(&mut set)));
+            t!(cvt(sigemptyset(&mut set)));
             t!(cvt(sigaddset(&mut set, libc::SIGINT)));
             t!(cvt(libc::pthread_sigmask(libc::SIG_SETMASK, &set, &mut old_set)));
 
index a213273aac8fd55f174a91b145b2e27b139546ee..edd322ca6fa0708b1e4a928f7ecdb3ff66e5480c 100644 (file)
@@ -193,7 +193,16 @@ macro_rules! t {
             // need to clean things up now to avoid confusing the program
             // we're about to run.
             let mut set: libc::sigset_t = mem::uninitialized();
-            t!(cvt(libc::sigemptyset(&mut set)));
+            if cfg!(target_os = "android") {
+                // Implementing sigemptyset allow us to support older Android
+                // versions. See the comment about Android and sig* functions in
+                // process_common.rs
+                libc::memset(&mut set as *mut _ as *mut _,
+                             0,
+                             mem::size_of::<libc::sigset_t>());
+            } else {
+                t!(cvt(libc::sigemptyset(&mut set)));
+            }
             t!(cvt(libc::pthread_sigmask(libc::SIG_SETMASK, &set,
                                          ptr::null_mut())));
             let ret = sys::signal(libc::SIGPIPE, libc::SIG_DFL);
index 1afb3728c9d72e7d00d20a153631a312b72302e6..dfbc1b581ee55f39b4bb22366beb1240e3c4c79a 100644 (file)
@@ -257,8 +257,13 @@ fn to_handle(&self, stdio_id: c::DWORD, pipe: &mut Option<AnonPipe>)
             // INVALID_HANDLE_VALUE.
             Stdio::Inherit => {
                 match stdio::get(stdio_id) {
-                    Ok(io) => io.handle().duplicate(0, true,
-                                                    c::DUPLICATE_SAME_ACCESS),
+                    Ok(io) => {
+                        let io = Handle::new(io.handle());
+                        let ret = io.duplicate(0, true,
+                                               c::DUPLICATE_SAME_ACCESS);
+                        io.into_raw();
+                        return ret
+                    }
                     Err(..) => Ok(Handle::new(c::INVALID_HANDLE_VALUE)),
                 }
             }
index b1a57c349fbb909e942fc854571ed57967e708ee..d72e4b4438b7bc8d367638bcb55ec96a6bf77474 100644 (file)
 use sys::handle::Handle;
 use sys_common::io::read_to_end_uninitialized;
 
-pub struct NoClose(Option<Handle>);
-
 pub enum Output {
-    Console(NoClose),
-    Pipe(NoClose),
+    Console(c::HANDLE),
+    Pipe(c::HANDLE),
 }
 
 pub struct Stdin {
-    handle: Output,
     utf8: Mutex<io::Cursor<Vec<u8>>>,
 }
-pub struct Stdout(Output);
-pub struct Stderr(Output);
+pub struct Stdout;
+pub struct Stderr;
 
 pub fn get(handle: c::DWORD) -> io::Result<Output> {
     let handle = unsafe { c::GetStdHandle(handle) };
     if handle == c::INVALID_HANDLE_VALUE {
         Err(io::Error::last_os_error())
     } else if handle.is_null() {
-        Err(io::Error::new(io::ErrorKind::Other,
-                           "no stdio handle available for this process"))
+        Err(io::Error::from_raw_os_error(c::ERROR_INVALID_HANDLE as i32))
     } else {
-        let ret = NoClose::new(handle);
         let mut out = 0;
         match unsafe { c::GetConsoleMode(handle, &mut out) } {
-            0 => Ok(Output::Pipe(ret)),
-            _ => Ok(Output::Console(ret)),
+            0 => Ok(Output::Pipe(handle)),
+            _ => Ok(Output::Console(handle)),
         }
     }
 }
 
-fn write(out: &Output, data: &[u8]) -> io::Result<usize> {
-    let handle = match *out {
-        Output::Console(ref c) => c.get().raw(),
-        Output::Pipe(ref p) => return p.get().write(data),
+fn write(handle: c::DWORD, data: &[u8]) -> io::Result<usize> {
+    let handle = match try!(get(handle)) {
+        Output::Console(c) => c,
+        Output::Pipe(p) => {
+            let handle = Handle::new(p);
+            let ret = handle.write(data);
+            handle.into_raw();
+            return ret
+        }
     };
+
     // As with stdin on windows, stdout often can't handle writes of large
     // sizes. For an example, see #14940. For this reason, don't try to
     // write the entire output buffer on windows.
@@ -93,18 +94,20 @@ fn write(out: &Output, data: &[u8]) -> io::Result<usize> {
 
 impl Stdin {
     pub fn new() -> io::Result<Stdin> {
-        get(c::STD_INPUT_HANDLE).map(|handle| {
-            Stdin {
-                handle: handle,
-                utf8: Mutex::new(Cursor::new(Vec::new())),
-            }
+        Ok(Stdin {
+            utf8: Mutex::new(Cursor::new(Vec::new())),
         })
     }
 
     pub fn read(&self, buf: &mut [u8]) -> io::Result<usize> {
-        let handle = match self.handle {
-            Output::Console(ref c) => c.get().raw(),
-            Output::Pipe(ref p) => return p.get().read(buf),
+        let handle = match try!(get(c::STD_INPUT_HANDLE)) {
+            Output::Console(c) => c,
+            Output::Pipe(p) => {
+                let handle = Handle::new(p);
+                let ret = handle.read(buf);
+                handle.into_raw();
+                return ret
+            }
         };
         let mut utf8 = self.utf8.lock().unwrap();
         // Read more if the buffer is empty
@@ -125,11 +128,9 @@ pub fn read(&self, buf: &mut [u8]) -> io::Result<usize> {
                 Ok(utf8) => utf8.into_bytes(),
                 Err(..) => return Err(invalid_encoding()),
             };
-            if let Output::Console(_) = self.handle {
-                if let Some(&last_byte) = data.last() {
-                    if last_byte == CTRL_Z {
-                        data.pop();
-                    }
+            if let Some(&last_byte) = data.last() {
+                if last_byte == CTRL_Z {
+                    data.pop();
                 }
             }
             *utf8 = Cursor::new(data);
@@ -158,11 +159,11 @@ fn read_to_end(&mut self, buf: &mut Vec<u8>) -> io::Result<usize> {
 
 impl Stdout {
     pub fn new() -> io::Result<Stdout> {
-        get(c::STD_OUTPUT_HANDLE).map(Stdout)
+        Ok(Stdout)
     }
 
     pub fn write(&self, data: &[u8]) -> io::Result<usize> {
-        write(&self.0, data)
+        write(c::STD_OUTPUT_HANDLE, data)
     }
 
     pub fn flush(&self) -> io::Result<()> {
@@ -172,11 +173,11 @@ pub fn flush(&self) -> io::Result<()> {
 
 impl Stderr {
     pub fn new() -> io::Result<Stderr> {
-        get(c::STD_ERROR_HANDLE).map(Stderr)
+        Ok(Stderr)
     }
 
     pub fn write(&self, data: &[u8]) -> io::Result<usize> {
-        write(&self.0, data)
+        write(c::STD_ERROR_HANDLE, data)
     }
 
     pub fn flush(&self) -> io::Result<()> {
@@ -197,27 +198,12 @@ fn flush(&mut self) -> io::Result<()> {
     }
 }
 
-impl NoClose {
-    fn new(handle: c::HANDLE) -> NoClose {
-        NoClose(Some(Handle::new(handle)))
-    }
-
-    fn get(&self) -> &Handle { self.0.as_ref().unwrap() }
-}
-
-impl Drop for NoClose {
-    fn drop(&mut self) {
-        self.0.take().unwrap().into_raw();
-    }
-}
-
 impl Output {
-    pub fn handle(&self) -> &Handle {
-        let nc = match *self {
-            Output::Console(ref c) => c,
-            Output::Pipe(ref c) => c,
-        };
-        nc.0.as_ref().unwrap()
+    pub fn handle(&self) -> c::HANDLE {
+        match *self {
+            Output::Console(c) => c,
+            Output::Pipe(c) => c,
+        }
     }
 }
 
index 3cdeb511945756d7d8c451ad4ff89ff6aac8f164..9239c18e59717a2bf088859ff05a8dbbccfbe81a 100644 (file)
@@ -339,7 +339,7 @@ pub fn bind(addr: &SocketAddr) -> io::Result<TcpListener> {
 
         // Bind our new socket
         let (addrp, len) = addr.into_inner();
-        cvt(unsafe { c::bind(*sock.as_inner(), addrp, len) })?;
+        cvt(unsafe { c::bind(*sock.as_inner(), addrp, len as _) })?;
 
         // Start listening
         cvt(unsafe { c::listen(*sock.as_inner(), 128) })?;
@@ -430,7 +430,7 @@ pub fn bind(addr: &SocketAddr) -> io::Result<UdpSocket> {
 
         let sock = Socket::new(addr, c::SOCK_DGRAM)?;
         let (addrp, len) = addr.into_inner();
-        cvt(unsafe { c::bind(*sock.as_inner(), addrp, len) })?;
+        cvt(unsafe { c::bind(*sock.as_inner(), addrp, len as _) })?;
         Ok(UdpSocket { inner: sock })
     }
 
index 0b38f5450b63faf09251971ad20926318130c2ff..97d37266130af750a304561f1eb0ec33ae06f5e7 100644 (file)
@@ -10,7 +10,7 @@ crate-type = ["dylib"]
 
 [dependencies]
 serialize = { path = "../libserialize" }
-log = { path = "../liblog" }
+log = "0.3"
 rustc_bitflags = { path = "../librustc_bitflags" }
 syntax_pos = { path = "../libsyntax_pos" }
 rustc_errors = { path = "../librustc_errors" }
index f0e328a551d5f37003d53bf6f93a40617708ea83..2d0994a7b78fb1b798f00cd04366b20fab589d7c 100644 (file)
@@ -106,8 +106,8 @@ fn remove(&mut self, id: ast::NodeId) -> Expansion {
 impl<'a, 'b> Folder for PlaceholderExpander<'a, 'b> {
     fn fold_item(&mut self, item: P<ast::Item>) -> SmallVector<P<ast::Item>> {
         match item.node {
-            ast::ItemKind::Mac(ref mac) if !mac.node.path.segments.is_empty() => {}
             ast::ItemKind::Mac(_) => return self.remove(item.id).make_items(),
+            ast::ItemKind::MacroDef(_) => return SmallVector::one(item),
             _ => {}
         }
 
index 021c5398a4200459218a8f94d78df25ed77b6502..66f5520b8826371991822087a551cfc35277a80e 100644 (file)
@@ -119,7 +119,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
                 };
                 let mut p = Parser::new(cx.parse_sess(), tts, Some(directory), false);
                 p.root_module_name = cx.current_expansion.module.mod_path.last()
-                    .map(|id| (*id.name.as_str()).to_owned());
+                    .map(|id| id.name.as_str().to_string());
 
                 p.check_unknown_macro_variable();
                 // Let the context choose how to interpret the result.
index 7af432176cf6ea44d0160cb0387ee577f3ea9013..9d280a413e666c5403015bcbdc9ccec310febd68 100644 (file)
@@ -818,7 +818,7 @@ pub struct GatedCfg {
 
 impl GatedCfg {
     pub fn gate(cfg: &ast::MetaItem) -> Option<GatedCfg> {
-        let name = &*cfg.name().as_str();
+        let name = cfg.name().as_str();
         GATED_CFGS.iter()
                   .position(|info| info.0 == name)
                   .map(|idx| {
@@ -865,8 +865,7 @@ macro_rules! gate_feature {
 impl<'a> Context<'a> {
     fn check_attribute(&self, attr: &ast::Attribute, is_macro: bool) {
         debug!("check_attribute(attr = {:?})", attr);
-        let name = unwrap_or!(attr.name(), return);
-
+        let name = unwrap_or!(attr.name(), return).as_str();
         for &(n, ty, ref gateage) in BUILTIN_ATTRIBUTES {
             if name == n {
                 if let &Gated(_, ref name, ref desc, ref has_feature) = gateage {
@@ -885,12 +884,12 @@ fn check_attribute(&self, attr: &ast::Attribute, is_macro: bool) {
                 return;
             }
         }
-        if name.as_str().starts_with("rustc_") {
+        if name.starts_with("rustc_") {
             gate_feature!(self, rustc_attrs, attr.span,
                           "unless otherwise specified, attributes \
                            with the prefix `rustc_` \
                            are reserved for internal compiler diagnostics");
-        } else if name.as_str().starts_with("derive_") {
+        } else if name.starts_with("derive_") {
             gate_feature!(self, custom_derive, attr.span, EXPLAIN_DERIVE_UNDERSCORE);
         } else if !attr::is_known(attr) {
             // Only run the custom attribute lint during regular
index 649e90599345bf93aa5ee6bc0f7e4e8e3dad75ec..43a9d8c5f787c77bcc993e6f5a701c668e97b95f 100644 (file)
@@ -5151,15 +5151,15 @@ fn parse_item_mod(&mut self, outer_attrs: &[Attribute]) -> PResult<'a, ItemInfo>
 
     fn push_directory(&mut self, id: Ident, attrs: &[Attribute]) {
         if let Some(path) = attr::first_attr_value_str_by_name(attrs, "path") {
-            self.directory.path.push(&*path.as_str());
+            self.directory.path.push(&path.as_str());
             self.directory.ownership = DirectoryOwnership::Owned;
         } else {
-            self.directory.path.push(&*id.name.as_str());
+            self.directory.path.push(&id.name.as_str());
         }
     }
 
     pub fn submod_path_from_attr(attrs: &[ast::Attribute], dir_path: &Path) -> Option<PathBuf> {
-        attr::first_attr_value_str_by_name(attrs, "path").map(|d| dir_path.join(&*d.as_str()))
+        attr::first_attr_value_str_by_name(attrs, "path").map(|d| dir_path.join(&d.as_str()))
     }
 
     /// Returns either a path to a module, or .
index 6642c60d256b3c4e40e0cc437ddb60993544e1f6..2acbeee426beead4b72b286e16bfffacf89957ad 100644 (file)
@@ -72,9 +72,9 @@ fn decode<D: Decoder>(d: &mut D) -> Result<Symbol, D::Error> {
     }
 }
 
-impl<'a> PartialEq<&'a str> for Symbol {
-    fn eq(&self, other: &&str) -> bool {
-        *self.as_str() == **other
+impl<T: ::std::ops::Deref<Target=str>> PartialEq<T> for Symbol {
+    fn eq(&self, other: &T) -> bool {
+        self.as_str() == other.deref()
     }
 }
 
@@ -244,11 +244,47 @@ fn with_interner<T, F: FnOnce(&mut Interner) -> T>(f: F) -> T {
 /// destroyed. In particular, they must not access string contents. This can
 /// be fixed in the future by just leaking all strings until thread death
 /// somehow.
-#[derive(Clone, PartialEq, Hash, PartialOrd, Eq, Ord)]
+#[derive(Clone, Hash, PartialOrd, Eq, Ord)]
 pub struct InternedString {
     string: &'static str,
 }
 
+impl<U: ?Sized> ::std::convert::AsRef<U> for InternedString where str: ::std::convert::AsRef<U> {
+    fn as_ref(&self) -> &U {
+        self.string.as_ref()
+    }
+}
+
+impl<T: ::std::ops::Deref<Target = str>> ::std::cmp::PartialEq<T> for InternedString {
+    fn eq(&self, other: &T) -> bool {
+        self.string == other.deref()
+    }
+}
+
+impl ::std::cmp::PartialEq<InternedString> for str {
+    fn eq(&self, other: &InternedString) -> bool {
+        self == other.string
+    }
+}
+
+impl<'a> ::std::cmp::PartialEq<InternedString> for &'a str {
+    fn eq(&self, other: &InternedString) -> bool {
+        *self == other.string
+    }
+}
+
+impl ::std::cmp::PartialEq<InternedString> for String {
+    fn eq(&self, other: &InternedString) -> bool {
+        self == other.string
+    }
+}
+
+impl<'a> ::std::cmp::PartialEq<InternedString> for &'a String {
+    fn eq(&self, other: &InternedString) -> bool {
+        *self == other.string
+    }
+}
+
 impl !Send for InternedString { }
 
 impl ::std::ops::Deref for InternedString {
index e052d2cda3a42469b000be576f82b416a08bb20c..6fb6db9ca0282fd09cac855a9f6f3838714694ea 100644 (file)
@@ -616,7 +616,7 @@ fn mk_tests(cx: &TestCtxt) -> P<ast::Item> {
 
 fn is_test_crate(krate: &ast::Crate) -> bool {
     match attr::find_crate_name(&krate.attrs) {
-        Some(s) if "test" == &*s.as_str() => true,
+        Some(s) if "test" == s.as_str() => true,
         _ => false
     }
 }
index 960db792a623e4c2a1ab38cb60b12ba37481ff41..bdcec26cb838b211ba5980542da2ca62bb386b99 100644 (file)
@@ -10,7 +10,7 @@ crate-type = ["dylib"]
 
 [dependencies]
 fmt_macros = { path = "../libfmt_macros" }
-log = { path = "../liblog" }
+log = "0.3"
 proc_macro = { path = "../libproc_macro" }
 rustc_errors = { path = "../librustc_errors" }
 syntax = { path = "../libsyntax" }
diff --git a/src/test/compile-fail/imports/shadow_builtin_macros.rs b/src/test/compile-fail/imports/shadow_builtin_macros.rs
new file mode 100644 (file)
index 0000000..2b3ba1b
--- /dev/null
@@ -0,0 +1,72 @@
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// aux-build:two_macros.rs
+
+#![feature(use_extern_macros)]
+
+mod foo {
+    extern crate two_macros;
+    pub use self::two_macros::m as panic;
+}
+
+mod m1 {
+    use foo::panic; // ok
+    fn f() { panic!(); }
+}
+
+mod m2 {
+    use foo::*; //~ NOTE `panic` could refer to the name imported here
+    fn f() { panic!(); } //~ ERROR ambiguous
+    //~| NOTE `panic` is also a builtin macro
+    //~| NOTE consider adding an explicit import of `panic` to disambiguate
+}
+
+mod m3 {
+    ::two_macros::m!(use foo::panic;); //~ NOTE `panic` could refer to the name imported here
+    //~| NOTE in this expansion
+    fn f() { panic!(); } //~ ERROR ambiguous
+    //~| NOTE `panic` is also a builtin macro
+    //~| NOTE macro-expanded macro imports do not shadow
+}
+
+mod m4 {
+    macro_rules! panic { () => {} } // ok
+    panic!();
+}
+
+mod m5 {
+    macro_rules! m { () => {
+        macro_rules! panic { () => {} } //~ ERROR `panic` is already in scope
+        //~| NOTE macro-expanded `macro_rules!`s may not shadow existing macros
+    } }
+    m!(); //~ NOTE in this expansion
+    //~| NOTE in this expansion
+    panic!();
+}
+
+#[macro_use(n)] //~ NOTE `n` could also refer to the name imported here
+extern crate two_macros;
+mod bar {
+    pub use two_macros::m as n;
+}
+
+mod m6 {
+    use bar::n; // ok
+    n!();
+}
+
+mod m7 {
+    use bar::*; //~ NOTE `n` could refer to the name imported here
+    n!(); //~ ERROR ambiguous
+    //~| NOTE consider adding an explicit import of `n` to disambiguate
+}
+
+fn main() {}
diff --git a/src/test/compile-fail/issue-40749.rs b/src/test/compile-fail/issue-40749.rs
new file mode 100644 (file)
index 0000000..261ed49
--- /dev/null
@@ -0,0 +1,16 @@
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn main() {
+    [0; ..10];
+    //~^ ERROR mismatched types
+    //~| expected type `usize`
+    //~| found type `std::ops::RangeTo<{integer}>`
+}
diff --git a/src/test/compile-fail/static-lifetime-bound.rs b/src/test/compile-fail/static-lifetime-bound.rs
new file mode 100644 (file)
index 0000000..38534ab
--- /dev/null
@@ -0,0 +1,16 @@
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn f<'a: 'static>(_: &'a i32) {} //~WARN unnecessary lifetime parameter `'a`
+
+fn main() {
+    let x = 0;
+    f(&x); //~ERROR does not live long enough
+}
diff --git a/src/test/run-pass-fulldeps/auxiliary/logging_right_crate.rs b/src/test/run-pass-fulldeps/auxiliary/logging_right_crate.rs
deleted file mode 100644 (file)
index db26b10..0000000
+++ /dev/null
@@ -1,18 +0,0 @@
-// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-#![feature(rustc_private)]
-
-#[macro_use] extern crate log;
-
-pub fn foo<T>() {
-    fn death() -> isize { panic!() }
-    debug!("{}", (||{ death() })());
-}
diff --git a/src/test/run-pass-fulldeps/conditional-debug-macro-off.rs b/src/test/run-pass-fulldeps/conditional-debug-macro-off.rs
deleted file mode 100644 (file)
index c6beb5b..0000000
+++ /dev/null
@@ -1,23 +0,0 @@
-// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// compile-flags: -C debug-assertions=no
-// exec-env:RUST_LOG=conditional-debug-macro-off=4
-
-
-#![feature(rustc_private)]
-
-#[macro_use]
-extern crate log;
-
-pub fn main() {
-    // only panics if println! evaluates its argument.
-    debug!("{:?}", { if true { panic!() } });
-}
diff --git a/src/test/run-pass-fulldeps/logging-enabled-debug.rs b/src/test/run-pass-fulldeps/logging-enabled-debug.rs
deleted file mode 100644 (file)
index 3ae4884..0000000
+++ /dev/null
@@ -1,24 +0,0 @@
-// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// compile-flags:-C debug-assertions=no
-// exec-env:RUST_LOG=logging-enabled-debug=debug
-
-
-#![feature(rustc_private)]
-
-#[macro_use]
-extern crate log;
-
-pub fn main() {
-    if log_enabled!(log::DEBUG) {
-        panic!("what?! debugging?");
-    }
-}
diff --git a/src/test/run-pass-fulldeps/logging-enabled.rs b/src/test/run-pass-fulldeps/logging-enabled.rs
deleted file mode 100644 (file)
index 2626134..0000000
+++ /dev/null
@@ -1,27 +0,0 @@
-// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// exec-env:RUST_LOG=logging_enabled=info
-// ignore-emscripten: FIXME(#31622)
-
-
-#![feature(rustc_private)]
-
-#[macro_use]
-extern crate log;
-
-pub fn main() {
-    if log_enabled!(log::DEBUG) {
-        panic!("what?! debugging?");
-    }
-    if !log_enabled!(log::INFO) {
-        panic!("what?! no info?");
-    }
-}
diff --git a/src/test/run-pass-fulldeps/logging-right-crate.rs b/src/test/run-pass-fulldeps/logging-right-crate.rs
deleted file mode 100644 (file)
index 7caeeb4..0000000
+++ /dev/null
@@ -1,31 +0,0 @@
-// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// aux-build:logging_right_crate.rs
-// exec-env:RUST_LOG=logging-right-crate=debug
-
-// This is a test for issue #3046 to make sure that when we monomorphize a
-// function from one crate to another the right top-level logging name is
-// preserved.
-//
-// It used to be the case that if logging were turned on for this crate, all
-// monomorphized functions from other crates had logging turned on (their
-// logging module names were all incorrect). This test ensures that this no
-// longer happens by enabling logging for *this* crate and then invoking a
-// function in an external crate which will panic when logging is enabled.
-
-// pretty-expanded FIXME #23616
-
-extern crate logging_right_crate;
-
-pub fn main() {
-    // this function panicks if logging is turned on
-    logging_right_crate::foo::<isize>();
-}
diff --git a/src/test/run-pass-fulldeps/logging-separate-lines.rs b/src/test/run-pass-fulldeps/logging-separate-lines.rs
deleted file mode 100644 (file)
index 183a522..0000000
+++ /dev/null
@@ -1,40 +0,0 @@
-// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// ignore-windows
-// exec-env:RUST_LOG=debug
-// compile-flags:-C debug-assertions=y
-// ignore-emscripten: FIXME(#31622)
-
-#![feature(rustc_private)]
-
-#[macro_use]
-extern crate log;
-
-use std::process::Command;
-use std::env;
-use std::str;
-
-fn main() {
-    let args: Vec<String> = env::args().collect();
-    if args.len() > 1 && args[1] == "child" {
-        debug!("foo");
-        debug!("bar");
-        return
-    }
-
-    let p = Command::new(&args[0])
-                    .arg("child")
-                    .output().unwrap();
-    assert!(p.status.success());
-    let mut lines = str::from_utf8(&p.stderr).unwrap().lines();
-    assert!(lines.next().unwrap().contains("foo"));
-    assert!(lines.next().unwrap().contains("bar"));
-}
diff --git a/src/test/run-pass-fulldeps/rust-log-filter.rs b/src/test/run-pass-fulldeps/rust-log-filter.rs
deleted file mode 100644 (file)
index 306d24e..0000000
+++ /dev/null
@@ -1,58 +0,0 @@
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// exec-env:RUST_LOG=rust_log_filter/foo
-// ignore-emscripten no threads support
-
-#![allow(unknown_features)]
-#![feature(box_syntax, std_misc, rustc_private)]
-
-#[macro_use]
-extern crate log;
-
-use std::sync::mpsc::{channel, Sender, Receiver};
-use std::thread;
-
-pub struct ChannelLogger {
-    tx: Sender<String>
-}
-
-impl ChannelLogger {
-    pub fn new() -> (Box<ChannelLogger>, Receiver<String>) {
-        let (tx, rx) = channel();
-        (box ChannelLogger { tx: tx }, rx)
-    }
-}
-
-impl log::Logger for ChannelLogger {
-    fn log(&mut self, record: &log::LogRecord) {
-        self.tx.send(format!("{}", record.args)).unwrap();
-    }
-}
-
-pub fn main() {
-    let (logger, rx) = ChannelLogger::new();
-
-    let t = thread::spawn(move|| {
-        log::set_logger(logger);
-
-        info!("foo");
-        info!("bar");
-        info!("foo bar");
-        info!("bar foo");
-    });
-
-    assert_eq!(rx.recv().unwrap(), "foo");
-    assert_eq!(rx.recv().unwrap(), "foo bar");
-    assert_eq!(rx.recv().unwrap(), "bar foo");
-    assert!(rx.recv().is_err());
-
-    t.join();
-}
diff --git a/src/test/run-pass-fulldeps/switch-stdout.rs b/src/test/run-pass-fulldeps/switch-stdout.rs
new file mode 100644 (file)
index 0000000..4542e27
--- /dev/null
@@ -0,0 +1,64 @@
+// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(rustc_private)]
+
+extern crate rustc_back;
+
+use std::fs::File;
+use std::io::{Read, Write};
+
+use rustc_back::tempdir::TempDir;
+
+#[cfg(unix)]
+fn switch_stdout_to(file: File) {
+    use std::os::unix::prelude::*;
+
+    extern {
+        fn dup2(old: i32, new: i32) -> i32;
+    }
+
+    unsafe {
+        assert_eq!(dup2(file.as_raw_fd(), 1), 1);
+    }
+}
+
+#[cfg(windows)]
+fn switch_stdout_to(file: File) {
+    use std::os::windows::prelude::*;
+
+    extern "system" {
+        fn SetStdHandle(nStdHandle: u32, handle: *mut u8) -> i32;
+    }
+
+    const STD_OUTPUT_HANDLE: u32 = (-11i32) as u32;
+
+    unsafe {
+        let rc = SetStdHandle(STD_OUTPUT_HANDLE,
+                              file.into_raw_handle() as *mut _);
+        assert!(rc != 0);
+    }
+}
+
+fn main() {
+    let td = TempDir::new("foo").unwrap();
+    let path = td.path().join("bar");
+    let f = File::create(&path).unwrap();
+
+    println!("foo");
+    std::io::stdout().flush().unwrap();
+    switch_stdout_to(f);
+    println!("bar");
+    std::io::stdout().flush().unwrap();
+
+    let mut contents = String::new();
+    File::open(&path).unwrap().read_to_string(&mut contents).unwrap();
+    assert_eq!(contents, "bar\n");
+}
index b335e20f91d932df7a127b37a6eca2e56c4b73f8..7da33be7a57daf19faa1a47bcad1fc9f4be345a4 100644 (file)
@@ -8,8 +8,6 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// exec-env:RUST_LOG=conditional-debug-macro-on=4
-
 pub fn main() {
     // exits early if println! evaluates its arguments, otherwise it
     // will hit the panic.
diff --git a/src/test/run-pass/issue-40770.rs b/src/test/run-pass/issue-40770.rs
new file mode 100644 (file)
index 0000000..599d0b2
--- /dev/null
@@ -0,0 +1,19 @@
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+macro_rules! m {
+    ($e:expr) => {
+        macro_rules! n { () => { $e } }
+    }
+}
+
+fn main() {
+    m!(foo!());
+}
index 1fc98a78a7c473c73148174942d0e62f82bb9fda..7530b65a9b7c4fc383dc24fe94a6426e81593d66 100644 (file)
@@ -5,6 +5,6 @@ version = "0.0.0"
 
 [dependencies]
 log = "0.3"
-env_logger = { version = "0.3.5", default-features = false }
+env_logger = { version = "0.4", default-features = false }
 rustc-serialize = "0.3"
 filetime = "0.1"