]> git.lizzy.rs Git - rust.git/commitdiff
Rollup merge of #40144 - MajorBreakfast:patch-7, r=frewsxcv
authorGuillaume Gomez <guillaume1.gomez@gmail.com>
Thu, 2 Mar 2017 10:29:36 +0000 (11:29 +0100)
committerGitHub <noreply@github.com>
Thu, 2 Mar 2017 10:29:36 +0000 (11:29 +0100)
Unit-like structs doc: Improve code sample

r? @steveklabnik

BTW it seems that
```Rust
let p = Proton {};
```
compiles without an error. That's why I didn't add it to the example. It's about consistency anyway.

101 files changed:
.travis.yml
CONTRIBUTING.md
README.md
src/Cargo.lock
src/bootstrap/sanity.rs
src/ci/docker/android/Dockerfile [deleted file]
src/ci/docker/android/accept-licenses.sh [deleted file]
src/ci/docker/android/install-ndk.sh [deleted file]
src/ci/docker/android/install-sdk.sh [deleted file]
src/ci/docker/android/start-emulator.sh [deleted file]
src/ci/docker/arm-android/Dockerfile [new file with mode: 0644]
src/ci/docker/arm-android/accept-licenses.sh [new file with mode: 0755]
src/ci/docker/arm-android/install-ndk.sh [new file with mode: 0644]
src/ci/docker/arm-android/install-sdk.sh [new file with mode: 0644]
src/ci/docker/arm-android/start-emulator.sh [new file with mode: 0755]
src/ci/docker/dist-android/Dockerfile [new file with mode: 0644]
src/ci/docker/dist-android/install-ndk.sh [new file with mode: 0644]
src/ci/docker/linux-tested-targets/Dockerfile
src/doc/book/src/lifetimes.md
src/doc/book/src/procedural-macros.md
src/doc/book/src/structs.md
src/doc/unstable-book/src/SUMMARY.md
src/libcollections/range.rs
src/libcollections/string.rs
src/libcollectionstest/btree/map.rs
src/libcollectionstest/lib.rs
src/libcollectionstest/vec.rs
src/libcore/fmt/mod.rs
src/libproc_macro/lib.rs
src/libproc_macro_plugin/qquote.rs
src/librustc/diagnostics.rs
src/librustc/infer/error_reporting.rs [deleted file]
src/librustc/infer/error_reporting/mod.rs [new file with mode: 0644]
src/librustc/infer/error_reporting/note.rs [new file with mode: 0644]
src/librustc/infer/mod.rs
src/librustc/lint/builtin.rs
src/librustc/middle/intrinsicck.rs
src/librustc/traits/error_reporting.rs
src/librustc/traits/object_safety.rs
src/librustc/ty/layout.rs
src/librustc/ty/mod.rs
src/librustc/ty/sty.rs
src/librustc_driver/driver.rs
src/librustc_incremental/calculate_svh/svh_visitor.rs
src/librustc_lint/lib.rs
src/librustc_save_analysis/span_utils.rs
src/librustc_trans/mir/block.rs
src/librustdoc/clean/mod.rs
src/librustdoc/html/format.rs
src/librustdoc/html/highlight.rs
src/librustdoc/html/render.rs
src/librustdoc/html/static/main.js
src/librustdoc/html/static/rustdoc.css
src/librustdoc/visit_ast.rs
src/libstd/ffi/c_str.rs
src/libstd/process.rs
src/libstd/sync/condvar.rs
src/libstd/sys_common/poison.rs
src/libsyntax/ext/quote.rs
src/libsyntax/ext/tt/macro_parser.rs
src/libsyntax/ext/tt/macro_rules.rs
src/libsyntax/ext/tt/quoted.rs [new file with mode: 0644]
src/libsyntax/ext/tt/transcribe.rs
src/libsyntax/fold.rs
src/libsyntax/lib.rs
src/libsyntax/parse/lexer/mod.rs
src/libsyntax/parse/mod.rs
src/libsyntax/parse/parser.rs
src/libsyntax/parse/token.rs
src/libsyntax/print/pprust.rs
src/libsyntax/tokenstream.rs
src/libsyntax_ext/lib.rs
src/test/compile-fail-fulldeps/gated-quote.rs
src/test/compile-fail/feature-gate-cfg-target-has-atomic.rs [new file with mode: 0644]
src/test/compile-fail/feature-gate-unboxed-closures.rs [new file with mode: 0644]
src/test/compile-fail/issue-35450.rs
src/test/compile-fail/issue-39404.rs [new file with mode: 0644]
src/test/compile-fail/issue-39709.rs [deleted file]
src/test/compile-fail/macro-error.rs
src/test/compile-fail/macro-tt-matchers.rs
src/test/compile-fail/malformed_macro_lhs.rs
src/test/compile-fail/transmute-from-fn-item-types-error.rs
src/test/compile-fail/transmute-from-fn-item-types-lint.rs [deleted file]
src/test/parse-fail/issue-33569.rs
src/test/run-make/fpic/Makefile [new file with mode: 0644]
src/test/run-make/fpic/hello.rs [new file with mode: 0644]
src/test/run-pass-fulldeps/auxiliary/procedural_mbe_matching.rs
src/test/run-pass-fulldeps/mbe_matching_test_macro.rs
src/test/run-pass-fulldeps/proc-macro/auxiliary/issue-39889.rs [new file with mode: 0644]
src/test/run-pass-fulldeps/proc-macro/issue-39889.rs [new file with mode: 0644]
src/test/run-pass-fulldeps/quote-tokens.rs
src/test/run-pass/enum-layout-optimization.rs [new file with mode: 0644]
src/test/run-pass/issue-39709.rs [new file with mode: 0644]
src/test/run-pass/transmute-from-fn-item-types.rs [deleted file]
src/test/rustdoc/assoc-consts.rs
src/test/rustdoc/issue-27759.rs
src/test/rustdoc/issue-28478.rs
src/test/rustdoc/issue-32374.rs
src/test/rustdoc/issue-33302.rs
src/tools/rustbook/Cargo.toml
src/tools/tidy/src/features.rs

index 1e462362f53b25ef3c37c5ade7b43c8754bb6516..442d02aca7867f27fe58e95a4e08f17777c9e8a2 100644 (file)
@@ -12,10 +12,11 @@ matrix:
   fast_finish: true
   include:
     # Linux builders, all docker images
-    - env: IMAGE=android DEPLOY=1
+    - env: IMAGE=arm-android
     - env: IMAGE=armhf-gnu
     - env: IMAGE=cross DEPLOY=1
     - env: IMAGE=linux-tested-targets DEPLOY=1
+    - env: IMAGE=dist-android DEPLOY=1
     - env: IMAGE=dist-arm-linux DEPLOY=1
     - env: IMAGE=dist-armv7-aarch64-linux DEPLOY=1
     - env: IMAGE=dist-freebsd DEPLOY=1
index e9d8c84f4071505ccb75071d3dcfdf9f5962b543..1e983cfd726d6990c25f6ce28f30a3efa8405082 100644 (file)
@@ -97,33 +97,38 @@ system internals, try asking in [`#rust-internals`][pound-rust-internals].
 
 Before you can start building the compiler you need to configure the build for
 your system. In most cases, that will just mean using the defaults provided
-for Rust. Configuring involves invoking the `configure` script in the project
-root.
+for Rust.
 
-```
-./configure
-```
+To change configuration, you must copy the file `src/bootstrap/config.toml.example`
+to `config.toml` in the directory from which you will be running the build, and
+change the settings provided.
+
+There are large number of options provided in this config file that will alter the
+configuration used in the build process. Some options to note:
 
-There are large number of options accepted by this script to alter the
-configuration used later in the build process. Some options to note:
+#### `[llvm]`:
+- `ccache = true` - Use ccache when building llvm
 
-- `--enable-debug` - Build a debug version of the compiler (disables optimizations,
-    which speeds up compilation of stage1 rustc)
-- `--enable-optimize` - Enable optimizations (can be used with `--enable-debug`
-    to make a debug build with optimizations)
-- `--disable-valgrind-rpass` - Don't run tests with valgrind
-- `--enable-clang` - Prefer clang to gcc for building dependencies (e.g., LLVM)
-- `--enable-ccache` - Invoke clang/gcc with ccache to re-use object files between builds
-- `--enable-compiler-docs` - Build compiler documentation
+#### `[build]`:
+- `compiler-docs = true` - Build compiler documentation
 
-To see a full list of options, run `./configure --help`.
+#### `[rust]`:
+- `debuginfo = true` - Build a compiler with debuginfo
+- `optimize = false` - Disable optimizations to speed up compilation of stage1 rust
+
+For more options, the `config.toml` file contains commented out defaults, with
+descriptions of what each option will do.
+
+Note: Previously the `./configure` script was used to configure this
+project. It can still be used, but it's recommended to use a `config.toml`
+file. If you still have a `config.mk` file in your directory - from
+`./configure` - you may need to delete it for `config.toml` to work.
 
 ### Building
 
-Although the `./configure` script will generate a `Makefile`, this is actually
-just a thin veneer over the actual build system driver, `x.py`. This file, at
-the root of the repository, is used to build, test, and document various parts
-of the compiler. You can execute it as:
+The build system uses the `x.py` script to control the build process. This script
+is used to build, test, and document various parts of the compiler. You can
+execute it as:
 
 ```sh
 python x.py build
@@ -185,6 +190,9 @@ To learn about all possible rules you can execute, run:
 python x.py build --help --verbose
 ```
 
+Note: Previously `./configure` and `make` were used to build this project.
+They are still available, but `x.py` is the recommended build system.
+
 ### Useful commands
 
 Some common invocations of `x.py` are:
@@ -235,8 +243,8 @@ feature. We use the 'fork and pull' model described there.
 
 Please make pull requests against the `master` branch.
 
-Compiling all of `make check` can take a while. When testing your pull request,
-consider using one of the more specialized `make` targets to cut down on the
+Compiling all of `./x.py test` can take a while. When testing your pull request,
+consider using one of the more specialized `./x.py` targets to cut down on the
 amount of time you have to wait. You need to have built the compiler at least
 once before running these will work, but that’s only one full build rather than
 one each time.
@@ -307,7 +315,7 @@ To find documentation-related issues, sort by the [A-docs label][adocs].
 
 [adocs]: https://github.com/rust-lang/rust/issues?q=is%3Aopen+is%3Aissue+label%3AA-docs
 
-In many cases, you don't need a full `make doc`. You can use `rustdoc` directly
+In many cases, you don't need a full `./x.py doc`. You can use `rustdoc` directly
 to check small fixes. For example, `rustdoc src/doc/reference.md` will render
 reference to `doc/reference.html`. The CSS might be messed up, but you can
 verify that the HTML is right.
index c1218e9c600ce772e82b053083393e84ad44f47c..93415adc423f4b26a28dddc1856fc2b519f8f0f7 100644 (file)
--- a/README.md
+++ b/README.md
@@ -35,15 +35,15 @@ Read ["Installing Rust"] from [The Book].
 3. Build and install:
 
     ```sh
-    $ ./configure
-    $ make && sudo make install
+    $ ./x.py build && sudo ./x.py dist --install
     ```
 
-    > ***Note:*** Install locations can be adjusted by passing a `--prefix`
-    > argument to `configure`. Various other options are also supported – pass
-    > `--help` for more information on them.
+    > ***Note:*** Install locations can be adjusted by copying the config file
+    > from `./src/bootstrap/config.toml.example` to `./config.toml`, and
+    > adjusting the `prefix` option under `[install]`. Various other options are
+    > also supported, and are documented in the config file.
 
-    When complete, `sudo make install` will place several programs into
+    When complete, `sudo ./x.py dist --install` will place several programs into
     `/usr/local/bin`: `rustc`, the Rust compiler, and `rustdoc`, the
     API-documentation tool. This install does not include [Cargo],
     Rust's package manager, which you may also want to build.
@@ -59,7 +59,6 @@ for interop with software produced by Visual Studio use the MSVC build of Rust;
 for interop with GNU software built using the MinGW/MSYS2 toolchain use the GNU
 build.
 
-
 #### MinGW
 
 [MSYS2][msys2] can be used to easily build Rust on Windows:
@@ -94,11 +93,10 @@ build.
                mingw-w64-x86_64-gcc
    ```
 
-4. Navigate to Rust's source code (or clone it), then configure and build it:
+4. Navigate to Rust's source code (or clone it), then build it:
 
    ```sh
-   $ ./configure
-   $ make && make install
+   $ ./x.py build && ./x.py dist --install
    ```
 
 #### MSVC
@@ -114,13 +112,6 @@ shell with:
 > python x.py build
 ```
 
-If you're running inside of an msys shell, however, you can run:
-
-```sh
-$ ./configure --build=x86_64-pc-windows-msvc
-$ make && make install
-```
-
 Currently building Rust only works with some known versions of Visual Studio. If
 you have a more recent version installed the build system doesn't understand
 then you may need to force rustbuild to use an older version. This can be done
@@ -131,13 +122,43 @@ CALL "C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\bin\amd64\vcvars64.
 python x.py build
 ```
 
+#### Specifying an ABI
+
+Each specific ABI can also be used from either environment (for example, using
+the GNU ABI in powershell) by using an explicit build triple. The available
+Windows build triples are:
+- GNU ABI (using GCC)
+    - `i686-pc-windows-gnu`
+    - `x86_64-pc-windows-gnu`
+- The MSVC ABI
+    - `i686-pc-windows-msvc`
+    - `x86_64-pc-windows-msvc`
+
+The build triple can be specified by either specifying `--build=ABI` when
+invoking `x.py` commands, or by copying the `config.toml` file (as described
+in Building From Source), and modifying the `build` option under the `[build]`
+section.
+
+### Configure and Make
+
+While it's not the recommended build system, this project also provides a
+configure script and makefile (the latter of which just invokes `x.py`).
+
+```sh
+$ ./configure
+$ make && sudo make install
+```
+
+When using the configure script, the generated config.mk` file may override the
+`config.toml` file. To go back to the `config.toml` file, delete the generated
+`config.mk` file.
+
 ## Building Documentation
 
 If you’d like to build the documentation, it’s almost the same:
 
 ```sh
-$ ./configure
-$ make docs
+$ ./x.py doc
 ```
 
 The generated documentation will appear in a top-level `doc` directory,
index 16a641cc96d15ae1facc2eb9ca10165f33fe235f..1fb5d34d13f5d063a59869668340762b400406b6 100644 (file)
@@ -270,7 +270,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
 name = "mdbook"
-version = "0.0.16"
+version = "0.0.17"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "clap 2.20.5 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -401,7 +401,7 @@ name = "rustbook"
 version = "0.1.0"
 dependencies = [
  "clap 2.20.5 (registry+https://github.com/rust-lang/crates.io-index)",
- "mdbook 0.0.16 (registry+https://github.com/rust-lang/crates.io-index)",
+ "mdbook 0.0.17 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
@@ -976,7 +976,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 "checksum lazy_static 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6abe0ee2e758cd6bc8a2cd56726359007748fbf4128da998b65d0b70f881e19b"
 "checksum libc 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)" = "684f330624d8c3784fb9558ca46c4ce488073a8d22450415c5eb4f4cfb0d11b5"
 "checksum log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "ab83497bf8bf4ed2a74259c1c802351fcd67a65baa86394b6ba73c36f4838054"
-"checksum mdbook 0.0.16 (registry+https://github.com/rust-lang/crates.io-index)" = "14e8a6aca534ac51bad1c1886b10f6d6948a14fa70b1b20a1e41c9e5c0fe3019"
+"checksum mdbook 0.0.17 (registry+https://github.com/rust-lang/crates.io-index)" = "dbba458ca886cb082d026afd704eeeeb0531f7e4ffd6c619f72dc309c1c18fe4"
 "checksum memchr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1dbccc0e46f1ea47b9f17e6d67c5a96bd27030519c519c9c91327e31275a47b4"
 "checksum num-traits 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)" = "a16a42856a256b39c6d3484f097f6713e14feacd9bfb02290917904fae46c81c"
 "checksum num_cpus 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)" = "cee7e88156f3f9e19bdd598f8d6c9db7bf4078f99f8381f43a55b09648d1a6e3"
index 8e79c2d27d1957e5d4dbbd1177a8f0ca2ed2502f..bc439d6f7826d4f6bec937ac3f45886e50595316 100644 (file)
@@ -198,10 +198,6 @@ pub fn check(build: &mut Build) {
 ");
             }
         }
-
-        if target.contains("arm-linux-android") {
-            need_cmd("adb".as_ref());
-        }
     }
 
     for host in build.flags.host.iter() {
diff --git a/src/ci/docker/android/Dockerfile b/src/ci/docker/android/Dockerfile
deleted file mode 100644 (file)
index aa30dc1..0000000
+++ /dev/null
@@ -1,58 +0,0 @@
-FROM ubuntu:16.04
-
-RUN dpkg --add-architecture i386 && \
-    apt-get update && \
-    apt-get install -y --no-install-recommends \
-  g++ \
-  make \
-  file \
-  curl \
-  ca-certificates \
-  python2.7 \
-  git \
-  cmake \
-  unzip \
-  expect \
-  openjdk-9-jre \
-  sudo \
-  libstdc++6:i386 \
-  xz-utils \
-  libssl-dev \
-  pkg-config
-
-WORKDIR /android/
-ENV PATH=$PATH:/android/ndk-arm-9/bin:/android/sdk/tools:/android/sdk/platform-tools
-
-COPY install-ndk.sh install-sdk.sh accept-licenses.sh /android/
-RUN sh /android/install-ndk.sh
-RUN sh /android/install-sdk.sh
-
-RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
-    dpkg -i dumb-init_*.deb && \
-    rm dumb-init_*.deb
-
-COPY start-emulator.sh /android/
-
-ENTRYPOINT ["/usr/bin/dumb-init", "--", "/android/start-emulator.sh"]
-
-RUN curl -o /usr/local/bin/sccache \
-      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-02-25-sccache-x86_64-unknown-linux-musl && \
-      chmod +x /usr/local/bin/sccache
-
-ENV TARGETS=arm-linux-androideabi
-ENV TARGETS=$TARGETS,i686-linux-android
-ENV TARGETS=$TARGETS,aarch64-linux-android
-ENV TARGETS=$TARGETS,armv7-linux-androideabi
-
-ENV RUST_CONFIGURE_ARGS \
-      --target=$TARGETS \
-      --arm-linux-androideabi-ndk=/android/ndk-arm-9 \
-      --armv7-linux-androideabi-ndk=/android/ndk-arm-9 \
-      --i686-linux-android-ndk=/android/ndk-x86-9 \
-      --aarch64-linux-android-ndk=/android/ndk-aarch64
-
-# Just a smoke test in dist to see if this works for now, we should expand this
-# to all the targets above eventually.
-ENV SCRIPT \
-  python2.7 ../x.py test --target arm-linux-androideabi && \
-  python2.7 ../x.py dist --target $TARGETS
diff --git a/src/ci/docker/android/accept-licenses.sh b/src/ci/docker/android/accept-licenses.sh
deleted file mode 100755 (executable)
index 8d8f60a..0000000
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/usr/bin/expect -f
-# ignore-license
-
-set timeout 1800
-set cmd [lindex $argv 0]
-set licenses [lindex $argv 1]
-
-spawn {*}$cmd
-expect {
-  "Do you accept the license '*'*" {
-        exp_send "y\r"
-        exp_continue
-  }
-  eof
-}
diff --git a/src/ci/docker/android/install-ndk.sh b/src/ci/docker/android/install-ndk.sh
deleted file mode 100644 (file)
index 418ce69..0000000
+++ /dev/null
@@ -1,45 +0,0 @@
-#!/bin/sh
-# Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-# file at the top-level directory of this distribution and at
-# http://rust-lang.org/COPYRIGHT.
-#
-# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-# option. This file may not be copied, modified, or distributed
-# except according to those terms.
-
-set -ex
-
-cpgdb() {
-  cp android-ndk-r11c/prebuilt/linux-x86_64/bin/gdb /android/$1/bin/$2-gdb
-  cp android-ndk-r11c/prebuilt/linux-x86_64/bin/gdb-orig /android/$1/bin/gdb-orig
-  cp -r android-ndk-r11c/prebuilt/linux-x86_64/share /android/$1/share
-}
-
-# Prep the Android NDK
-#
-# See https://github.com/servo/servo/wiki/Building-for-Android
-curl -O https://dl.google.com/android/repository/android-ndk-r11c-linux-x86_64.zip
-unzip -q android-ndk-r11c-linux-x86_64.zip
-bash android-ndk-r11c/build/tools/make-standalone-toolchain.sh \
-        --platform=android-9 \
-        --toolchain=arm-linux-androideabi-4.9 \
-        --install-dir=/android/ndk-arm-9 \
-        --ndk-dir=/android/android-ndk-r11c \
-        --arch=arm
-cpgdb ndk-arm-9 arm-linux-androideabi
-bash android-ndk-r11c/build/tools/make-standalone-toolchain.sh \
-        --platform=android-21 \
-        --toolchain=aarch64-linux-android-4.9 \
-        --install-dir=/android/ndk-aarch64 \
-        --ndk-dir=/android/android-ndk-r11c \
-        --arch=arm64
-bash android-ndk-r11c/build/tools/make-standalone-toolchain.sh \
-        --platform=android-9 \
-        --toolchain=x86-4.9 \
-        --install-dir=/android/ndk-x86-9 \
-        --ndk-dir=/android/android-ndk-r11c \
-        --arch=x86
-
-rm -rf ./android-ndk-r11c-linux-x86_64.zip ./android-ndk-r11c
diff --git a/src/ci/docker/android/install-sdk.sh b/src/ci/docker/android/install-sdk.sh
deleted file mode 100644 (file)
index 2db1d46..0000000
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/bin/sh
-# Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-# file at the top-level directory of this distribution and at
-# http://rust-lang.org/COPYRIGHT.
-#
-# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-# option. This file may not be copied, modified, or distributed
-# except according to those terms.
-
-set -ex
-
-# Prep the SDK and emulator
-#
-# Note that the update process requires that we accept a bunch of licenses, and
-# we can't just pipe `yes` into it for some reason, so we take the same strategy
-# located in https://github.com/appunite/docker by just wrapping it in a script
-# which apparently magically accepts the licenses.
-
-mkdir sdk
-curl https://dl.google.com/android/android-sdk_r24.4-linux.tgz | \
-    tar xzf - -C sdk --strip-components=1
-
-filter="platform-tools,android-18"
-filter="$filter,sys-img-armeabi-v7a-android-18"
-
-./accept-licenses.sh "android - update sdk -a --no-ui --filter $filter"
-
-echo "no" | android create avd \
-                --name arm-18 \
-                --target android-18 \
-                --abi armeabi-v7a
diff --git a/src/ci/docker/android/start-emulator.sh b/src/ci/docker/android/start-emulator.sh
deleted file mode 100755 (executable)
index 24c477d..0000000
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/bin/sh
-# Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-# file at the top-level directory of this distribution and at
-# http://rust-lang.org/COPYRIGHT.
-#
-# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-# option. This file may not be copied, modified, or distributed
-# except according to those terms.
-
-set -ex
-ANDROID_EMULATOR_FORCE_32BIT=true \
-  nohup nohup emulator @arm-18 -no-window -partition-size 2047 \
-  0<&- &>/dev/null &
-exec "$@"
diff --git a/src/ci/docker/arm-android/Dockerfile b/src/ci/docker/arm-android/Dockerfile
new file mode 100644 (file)
index 0000000..4c89ce1
--- /dev/null
@@ -0,0 +1,46 @@
+FROM ubuntu:16.04
+
+RUN dpkg --add-architecture i386 && \
+    apt-get update && \
+    apt-get install -y --no-install-recommends \
+  g++ \
+  make \
+  file \
+  curl \
+  ca-certificates \
+  python2.7 \
+  git \
+  cmake \
+  unzip \
+  expect \
+  openjdk-9-jre \
+  sudo \
+  libstdc++6:i386 \
+  xz-utils \
+  libssl-dev \
+  pkg-config
+
+WORKDIR /android/
+ENV PATH=$PATH:/android/ndk-arm-9/bin:/android/sdk/tools:/android/sdk/platform-tools
+
+COPY install-ndk.sh install-sdk.sh accept-licenses.sh /android/
+RUN sh /android/install-ndk.sh
+RUN sh /android/install-sdk.sh
+
+RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
+    dpkg -i dumb-init_*.deb && \
+    rm dumb-init_*.deb
+
+COPY start-emulator.sh /android/
+
+ENTRYPOINT ["/usr/bin/dumb-init", "--", "/android/start-emulator.sh"]
+
+RUN curl -o /usr/local/bin/sccache \
+      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-02-24-sccache-x86_64-unknown-linux-gnu && \
+      chmod +x /usr/local/bin/sccache
+
+ENV RUST_CONFIGURE_ARGS \
+      --target=arm-linux-androideabi \
+      --arm-linux-androideabi-ndk=/android/ndk-arm-9
+
+ENV SCRIPT python2.7 ../x.py test --target arm-linux-androideabi
diff --git a/src/ci/docker/arm-android/accept-licenses.sh b/src/ci/docker/arm-android/accept-licenses.sh
new file mode 100755 (executable)
index 0000000..8d8f60a
--- /dev/null
@@ -0,0 +1,15 @@
+#!/usr/bin/expect -f
+# ignore-license
+
+set timeout 1800
+set cmd [lindex $argv 0]
+set licenses [lindex $argv 1]
+
+spawn {*}$cmd
+expect {
+  "Do you accept the license '*'*" {
+        exp_send "y\r"
+        exp_continue
+  }
+  eof
+}
diff --git a/src/ci/docker/arm-android/install-ndk.sh b/src/ci/docker/arm-android/install-ndk.sh
new file mode 100644 (file)
index 0000000..389ec06
--- /dev/null
@@ -0,0 +1,33 @@
+#!/bin/sh
+# Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+# file at the top-level directory of this distribution and at
+# http://rust-lang.org/COPYRIGHT.
+#
+# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+# option. This file may not be copied, modified, or distributed
+# except according to those terms.
+
+set -ex
+
+cpgdb() {
+  cp android-ndk-r11c/prebuilt/linux-x86_64/bin/gdb /android/$1/bin/$2-gdb
+  cp android-ndk-r11c/prebuilt/linux-x86_64/bin/gdb-orig /android/$1/bin/gdb-orig
+  cp -r android-ndk-r11c/prebuilt/linux-x86_64/share /android/$1/share
+}
+
+# Prep the Android NDK
+#
+# See https://github.com/servo/servo/wiki/Building-for-Android
+curl -O https://dl.google.com/android/repository/android-ndk-r11c-linux-x86_64.zip
+unzip -q android-ndk-r11c-linux-x86_64.zip
+bash android-ndk-r11c/build/tools/make-standalone-toolchain.sh \
+        --platform=android-9 \
+        --toolchain=arm-linux-androideabi-4.9 \
+        --install-dir=/android/ndk-arm-9 \
+        --ndk-dir=/android/android-ndk-r11c \
+        --arch=arm
+cpgdb ndk-arm-9 arm-linux-androideabi
+
+rm -rf ./android-ndk-r11c-linux-x86_64.zip ./android-ndk-r11c
diff --git a/src/ci/docker/arm-android/install-sdk.sh b/src/ci/docker/arm-android/install-sdk.sh
new file mode 100644 (file)
index 0000000..2db1d46
--- /dev/null
@@ -0,0 +1,33 @@
+#!/bin/sh
+# Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+# file at the top-level directory of this distribution and at
+# http://rust-lang.org/COPYRIGHT.
+#
+# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+# option. This file may not be copied, modified, or distributed
+# except according to those terms.
+
+set -ex
+
+# Prep the SDK and emulator
+#
+# Note that the update process requires that we accept a bunch of licenses, and
+# we can't just pipe `yes` into it for some reason, so we take the same strategy
+# located in https://github.com/appunite/docker by just wrapping it in a script
+# which apparently magically accepts the licenses.
+
+mkdir sdk
+curl https://dl.google.com/android/android-sdk_r24.4-linux.tgz | \
+    tar xzf - -C sdk --strip-components=1
+
+filter="platform-tools,android-18"
+filter="$filter,sys-img-armeabi-v7a-android-18"
+
+./accept-licenses.sh "android - update sdk -a --no-ui --filter $filter"
+
+echo "no" | android create avd \
+                --name arm-18 \
+                --target android-18 \
+                --abi armeabi-v7a
diff --git a/src/ci/docker/arm-android/start-emulator.sh b/src/ci/docker/arm-android/start-emulator.sh
new file mode 100755 (executable)
index 0000000..24c477d
--- /dev/null
@@ -0,0 +1,16 @@
+#!/bin/sh
+# Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+# file at the top-level directory of this distribution and at
+# http://rust-lang.org/COPYRIGHT.
+#
+# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+# option. This file may not be copied, modified, or distributed
+# except according to those terms.
+
+set -ex
+ANDROID_EMULATOR_FORCE_32BIT=true \
+  nohup nohup emulator @arm-18 -no-window -partition-size 2047 \
+  0<&- &>/dev/null &
+exec "$@"
diff --git a/src/ci/docker/dist-android/Dockerfile b/src/ci/docker/dist-android/Dockerfile
new file mode 100644 (file)
index 0000000..6d433cc
--- /dev/null
@@ -0,0 +1,50 @@
+FROM ubuntu:16.04
+
+RUN dpkg --add-architecture i386 && \
+    apt-get update && \
+    apt-get install -y --no-install-recommends \
+  g++ \
+  make \
+  file \
+  curl \
+  ca-certificates \
+  python2.7 \
+  git \
+  cmake \
+  unzip \
+  expect \
+  openjdk-9-jre \
+  sudo \
+  libstdc++6:i386 \
+  xz-utils \
+  libssl-dev \
+  pkg-config
+
+WORKDIR /android/
+ENV PATH=$PATH:/android/ndk-arm-9/bin:/android/sdk/tools:/android/sdk/platform-tools
+
+COPY install-ndk.sh /android/
+RUN sh /android/install-ndk.sh
+
+RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
+    dpkg -i dumb-init_*.deb && \
+    rm dumb-init_*.deb
+ENTRYPOINT ["/usr/bin/dumb-init", "--"]
+
+RUN curl -o /usr/local/bin/sccache \
+      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-02-25-sccache-x86_64-unknown-linux-musl && \
+      chmod +x /usr/local/bin/sccache
+
+ENV TARGETS=arm-linux-androideabi
+ENV TARGETS=$TARGETS,i686-linux-android
+ENV TARGETS=$TARGETS,aarch64-linux-android
+ENV TARGETS=$TARGETS,armv7-linux-androideabi
+
+ENV RUST_CONFIGURE_ARGS \
+      --target=$TARGETS \
+      --arm-linux-androideabi-ndk=/android/ndk-arm-9 \
+      --armv7-linux-androideabi-ndk=/android/ndk-arm-9 \
+      --i686-linux-android-ndk=/android/ndk-x86-9 \
+      --aarch64-linux-android-ndk=/android/ndk-aarch64
+
+ENV SCRIPT python2.7 ../x.py dist --target $TARGETS
diff --git a/src/ci/docker/dist-android/install-ndk.sh b/src/ci/docker/dist-android/install-ndk.sh
new file mode 100644 (file)
index 0000000..19c1b94
--- /dev/null
@@ -0,0 +1,38 @@
+#!/bin/sh
+# Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+# file at the top-level directory of this distribution and at
+# http://rust-lang.org/COPYRIGHT.
+#
+# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+# option. This file may not be copied, modified, or distributed
+# except according to those terms.
+
+set -ex
+
+# Prep the Android NDK
+#
+# See https://github.com/servo/servo/wiki/Building-for-Android
+curl -O https://dl.google.com/android/repository/android-ndk-r11c-linux-x86_64.zip
+unzip -q android-ndk-r11c-linux-x86_64.zip
+bash android-ndk-r11c/build/tools/make-standalone-toolchain.sh \
+        --platform=android-9 \
+        --toolchain=arm-linux-androideabi-4.9 \
+        --install-dir=/android/ndk-arm-9 \
+        --ndk-dir=/android/android-ndk-r11c \
+        --arch=arm
+bash android-ndk-r11c/build/tools/make-standalone-toolchain.sh \
+        --platform=android-21 \
+        --toolchain=aarch64-linux-android-4.9 \
+        --install-dir=/android/ndk-aarch64 \
+        --ndk-dir=/android/android-ndk-r11c \
+        --arch=arm64
+bash android-ndk-r11c/build/tools/make-standalone-toolchain.sh \
+        --platform=android-9 \
+        --toolchain=x86-4.9 \
+        --install-dir=/android/ndk-x86-9 \
+        --ndk-dir=/android/android-ndk-r11c \
+        --arch=x86
+
+rm -rf ./android-ndk-r11c-linux-x86_64.zip ./android-ndk-r11c
index 06c14a961017e966653343333bdddbf1d6cdd5e8..a7060cd55fe64aeedaf79241c9c87ba291835c16 100644 (file)
@@ -39,8 +39,8 @@ ENV RUST_CONFIGURE_ARGS \
 # way to produce "super compatible" binaries.
 #
 # See: https://github.com/rust-lang/rust/issues/34978
-ENV CFLAGS_i686_unknown_linux_gnu=-Wa,-mrelax-relocations=no \
-    CFLAGS_x86_64_unknown_linux_gnu=-Wa,-mrelax-relocations=no
+ENV CFLAGS_i686_unknown_linux_musl=-Wa,-mrelax-relocations=no \
+    CFLAGS_x86_64_unknown_linux_musl=-Wa,-mrelax-relocations=no
 
 ENV SCRIPT \
       python2.7 ../x.py test \
index 8bca13c28f0bd2b2def24b6dc7b1ed8bbdc28abd..042d9af9717d052a705f8f183bb4e998add2b0d4 100644 (file)
@@ -349,8 +349,8 @@ to it.
 
 ## Lifetime Elision
 
-Rust supports powerful local type inference in the bodies of functions but not in their item signatures. 
-It's forbidden to allow reasoning about types based on the item signature alone
+Rust supports powerful local type inference in the bodies of functions, but it
+deliberately does not perform any reasoning about types for item signatures
 However, for ergonomic reasons, a very restricted secondary inference algorithm called 
 “lifetime elision” does apply when judging lifetimes. Lifetime elision is concerned solely with inferring 
 lifetime parameters using three easily memorizable and unambiguous rules. This means lifetime elision 
index 4f5a6a7c0332d75dd3a534d206af8ae5702f7bce..e02b5a6cdd79b8c27ad66bf7c675bd71c47880e5 100644 (file)
@@ -128,7 +128,7 @@ pub fn hello_world(input: TokenStream) -> TokenStream {
 So there is a lot going on here. We have introduced two new crates: [`syn`] and
 [`quote`]. As you may have noticed, `input: TokenSteam` is immediately converted
 to a `String`. This `String` is a string representation of the Rust code for which
-we are deriving `HelloWorld` for. At the moment, the only thing you can do with a
+we are deriving `HelloWorld`. At the moment, the only thing you can do with a
 `TokenStream` is convert it to a string. A richer API will exist in the future.
 
 So what we really need is to be able to _parse_ Rust code into something
index 9f61e5b66289bfb90ce6d55dc4b933bd518a0de0..3efa4f0e0a8d85898551218a9e9212d305e0d58b 100644 (file)
@@ -88,7 +88,7 @@ fn main() {
 }
 ```
 
-Your structure can still contain `&mut` pointers, which will let
+Your structure can still contain `&mut` references, which will let
 you do some kinds of mutation:
 
 ```rust
index ee8ae9a9839e9f409b38d15cce65baa1df64d908..e876b4aac0dfa594181fb943bd226eb374633eaa 100644 (file)
@@ -1,94 +1,94 @@
 [The Unstable Book](the-unstable-book.md)
 
-- [asm](asm.md)
-- [alloc_system](alloc-system.md)
+- [abi_msp430_interrupt](abi-msp430-interrupt.md)
+- [abi_ptx](abi-ptx.md)
+- [abi_sysv64](abi-sysv64.md)
+- [abi_unadjusted](abi-unadjusted.md)
+- [abi_vectorcall](abi-vectorcall.md)
+- [advanced_slice_patterns](advanced-slice-patterns.md)
 - [alloc_jemalloc](alloc-jemalloc.md)
-- [test](test.md)
+- [alloc_system](alloc-system.md)
+- [allocator](allocator.md)
+- [allow_internal_unstable](allow-internal-unstable.md)
+- [asm](asm.md)
+- [associated_consts](associated-consts.md)
+- [associated_type_defaults](associated-type-defaults.md)
+- [attr_literals](attr-literals.md)
+- [box_patterns](box-patterns.md)
+- [box_syntax](box-syntax.md)
+- [cfg_target_feature](cfg-target-feature.md)
+- [cfg_target_has_atomic](cfg-target-has-atomic.md)
+- [cfg_target_thread_local](cfg-target-thread-local.md)
+- [cfg_target_vendor](cfg-target-vendor.md)
+- [compiler_builtins](compiler-builtins.md)
 - [concat_idents](concat-idents.md)
-- [link_args](link-args.md)
-- [log_syntax](log-syntax.md)
-- [non_ascii_idents](non-ascii-idents.md)
-- [plugin_registrar](plugin-registrar.md)
-- [thread_local](thread-local.md)
-- [trace_macros](trace-macros.md)
+- [conservative_impl_trait](conservative-impl-trait.md)
+- [const_fn](const-fn.md)
+- [const_indexing](const-indexing.md)
+- [custom_attribute](custom-attribute.md)
+- [custom_derive](custom-derive.md)
+- [default_type_parameter_fallback](default-type-parameter-fallback.md)
+- [drop_types_in_const](drop-types-in-const.md)
+- [dropck_eyepatch](dropck-eyepatch.md)
+- [dropck_parametricity](dropck-parametricity.md)
+- [exclusive_range_pattern](exclusive-range-pattern.md)
+- [field_init_shorthand](field-init-shorthand.md)
+- [fundamental](fundamental.md)
+- [generic_param_attrs](generic-param-attrs.md)
+- [i128_type](i128-type.md)
+- [inclusive_range_syntax](inclusive-range-syntax.md)
 - [intrinsics](intrinsics.md)
 - [lang_items](lang-items.md)
+- [link_args](link-args.md)
+- [link_cfg](link-cfg.md)
 - [link_llvm_intrinsics](link-llvm-intrinsics.md)
 - [linkage](linkage.md)
-- [quote](quote.md)
-- [simd](simd.md)
-- [rustc_diagnostic_macros](rustc-diagnostic-macros.md)
-- [advanced_slice_patterns](advanced-slice-patterns.md)
-- [box_syntax](box-syntax.md)
-- [placement_in_syntax](placement-in-syntax.md)
-- [unboxed_closures](unboxed-closures.md)
-- [allocator](allocator.md)
-- [fundamental](fundamental.md)
+- [log_syntax](log-syntax.md)
+- [loop_break_value](loop-break-value.md)
+- [macro_reexport](macro-reexport.md)
 - [main](main.md)
+- [naked_functions](naked-functions.md)
 - [needs_allocator](needs-allocator.md)
-- [on_unimplemented](on-unimplemented.md)
-- [plugin](plugin.md)
-- [simd_ffi](simd-ffi.md)
-- [start](start.md)
-- [structural_match](structural-match.md)
-- [panic_runtime](panic-runtime.md)
 - [needs_panic_runtime](needs-panic-runtime.md)
-- [optin_builtin_traits](optin-builtin-traits.md)
-- [macro_reexport](macro-reexport.md)
-- [staged_api](staged-api.md)
+- [never_type](never-type.md)
 - [no_core](no-core.md)
-- [box_patterns](box-patterns.md)
-- [dropck_parametricity](dropck-parametricity.md)
-- [dropck_eyepatch](dropck-eyepatch.md)
-- [custom_attribute](custom-attribute.md)
-- [custom_derive](custom-derive.md)
+- [no_debug](no-debug.md)
+- [non_ascii_idents](non-ascii-idents.md)
+- [omit_gdb_pretty_printer_section](omit-gdb-pretty-printer-section.md)
+- [on_unimplemented](on-unimplemented.md)
+- [optin_builtin_traits](optin-builtin-traits.md)
+- [panic_runtime](panic-runtime.md)
+- [placement_in_syntax](placement-in-syntax.md)
+- [platform_intrinsics](platform-intrinsics.md)
+- [plugin](plugin.md)
+- [plugin_registrar](plugin-registrar.md)
+- [prelude_import](prelude-import.md)
+- [proc_macro](proc-macro.md)
+- [pub_restricted](pub-restricted.md)
+- [quote](quote.md)
+- [relaxed_adts](relaxed-adts.md)
+- [repr_simd](repr-simd.md)
 - [rustc_attrs](rustc-attrs.md)
-- [allow_internal_unstable](allow-internal-unstable.md)
+- [rustc_diagnostic_macros](rustc-diagnostic-macros.md)
+- [sanitizer_runtime](sanitizer-runtime.md)
+- [simd](simd.md)
+- [simd_ffi](simd-ffi.md)
 - [slice_patterns](slice-patterns.md)
-- [associated_consts](associated-consts.md)
-- [const_fn](const-fn.md)
-- [const_indexing](const-indexing.md)
-- [prelude_import](prelude-import.md)
+- [specialization](specialization.md)
+- [staged_api](staged-api.md)
+- [start](start.md)
+- [static_nobundle](static-nobundle.md)
 - [static_recursion](static-recursion.md)
-- [default_type_parameter_fallback](default-type-parameter-fallback.md)
-- [associated_type_defaults](associated-type-defaults.md)
-- [repr_simd](repr-simd.md)
-- [cfg_target_feature](cfg-target-feature.md)
-- [platform_intrinsics](platform-intrinsics.md)
-- [unwind_attributes](unwind-attributes.md)
-- [naked_functions](naked-functions.md)
-- [no_debug](no-debug.md)
-- [omit_gdb_pretty_printer_section](omit-gdb-pretty-printer-section.md)
-- [cfg_target_vendor](cfg-target-vendor.md)
 - [stmt_expr_attributes](stmt-expr-attributes.md)
+- [struct_field_attributes](struct-field-attributes.md)
+- [structural_match](structural-match.md)
+- [target_feature](target-feature.md)
+- [test](test.md)
+- [thread_local](thread-local.md)
+- [trace_macros](trace-macros.md)
 - [type_ascription](type-ascription.md)
-- [cfg_target_thread_local](cfg-target-thread-local.md)
-- [abi_vectorcall](abi-vectorcall.md)
-- [inclusive_range_syntax](inclusive-range-syntax.md)
-- [exclusive_range_pattern](exclusive-range-pattern.md)
-- [specialization](specialization.md)
-- [pub_restricted](pub-restricted.md)
-- [drop_types_in_const](drop-types-in-const.md)
-- [cfg_target_has_atomic](cfg-target-has-atomic.md)
-- [conservative_impl_trait](conservative-impl-trait.md)
-- [relaxed_adts](relaxed-adts.md)
-- [never_type](never-type.md)
-- [attr_literals](attr-literals.md)
-- [abi_sysv64](abi-sysv64.md)
+- [unboxed_closures](unboxed-closures.md)
 - [untagged_unions](untagged-unions.md)
-- [compiler_builtins](compiler-builtins.md)
-- [generic_param_attrs](generic-param-attrs.md)
-- [field_init_shorthand](field-init-shorthand.md)
-- [windows_subsystem](windows-subsystem.md)
-- [link_cfg](link-cfg.md)
+- [unwind_attributes](unwind-attributes.md)
 - [use_extern_macros](use-extern-macros.md)
-- [loop_break_value](loop-break-value.md)
-- [target_feature](target-feature.md)
-- [abi_ptx](abi-ptx.md)
-- [i128_type](i128-type.md)
-- [abi_unadjusted](abi-unadjusted.md)
-- [proc_macro](proc-macro.md)
-- [struct_field_attributes](struct-field-attributes.md)
-- [static_nobundle](static-nobundle.md)
-- [abi_msp430_interrupt](abi-msp430-interrupt.md)
-- [sanitizer_runtime](sanitizer-runtime.md)
+- [windows_subsystem](windows-subsystem.md)
index 1df4ace377707d286fafc9a243a65c9557d30d12..e4b94a1d70ee4189fe3ce610721aa52cf0bd096c 100644 (file)
@@ -14,7 +14,7 @@
 
 //! Range syntax.
 
-use core::ops::{RangeFull, Range, RangeTo, RangeFrom};
+use core::ops::{RangeFull, Range, RangeTo, RangeFrom, RangeInclusive, RangeToInclusive};
 use Bound::{self, Excluded, Included, Unbounded};
 
 /// **RangeArgument** is implemented by Rust's built-in range types, produced
@@ -105,6 +105,32 @@ fn end(&self) -> Bound<&T> {
     }
 }
 
+#[unstable(feature = "inclusive_range", reason = "recently added, follows RFC", issue = "28237")]
+impl<T> RangeArgument<T> for RangeInclusive<T> {
+    fn start(&self) -> Bound<&T> {
+        match *self {
+            RangeInclusive::Empty{ ref at }            => Included(at),
+            RangeInclusive::NonEmpty { ref start, .. } => Included(start),
+        }
+    }
+    fn end(&self) -> Bound<&T> {
+        match *self {
+            RangeInclusive::Empty{ ref at }            => Excluded(at),
+            RangeInclusive::NonEmpty { ref end, .. }   => Included(end),
+        }
+    }
+}
+
+#[unstable(feature = "inclusive_range", reason = "recently added, follows RFC", issue = "28237")]
+impl<T> RangeArgument<T> for RangeToInclusive<T> {
+    fn start(&self) -> Bound<&T> {
+        Unbounded
+    }
+    fn end(&self) -> Bound<&T> {
+        Included(&self.end)
+    }
+}
+
 impl<T> RangeArgument<T> for (Bound<T>, Bound<T>) {
     fn start(&self) -> Bound<&T> {
         match *self {
index 6839b698a5611bfede3d95c4994efb394595c22b..4b37aef860d7212e38d500aa3061040cb76efa4d 100644 (file)
@@ -1483,6 +1483,15 @@ fn from_iter<I: IntoIterator<Item = char>>(iter: I) -> String {
     }
 }
 
+#[stable(feature = "string_from_iter_by_ref", since = "1.17.0")]
+impl<'a> FromIterator<&'a char> for String {
+    fn from_iter<I: IntoIterator<Item = &'a char>>(iter: I) -> String {
+        let mut buf = String::new();
+        buf.extend(iter);
+        buf
+    }
+}
+
 #[stable(feature = "rust1", since = "1.0.0")]
 impl<'a> FromIterator<&'a str> for String {
     fn from_iter<I: IntoIterator<Item = &'a str>>(iter: I) -> String {
index f33923f99631911fc4752262235314528361a9b9..2c899d96940ece9e222adc3ff21600f7f164c708 100644 (file)
@@ -178,6 +178,43 @@ fn test_range_small() {
     assert_eq!(j, size - 2);
 }
 
+#[test]
+fn test_range_inclusive() {
+    let size = 500;
+
+    let map: BTreeMap<_, _> = (0...size).map(|i| (i, i)).collect();
+
+    fn check<'a, L, R>(lhs: L, rhs: R)
+        where L: IntoIterator<Item=(&'a i32, &'a i32)>,
+              R: IntoIterator<Item=(&'a i32, &'a i32)>,
+    {
+        let lhs: Vec<_> = lhs.into_iter().collect();
+        let rhs: Vec<_> = rhs.into_iter().collect();
+        assert_eq!(lhs, rhs);
+    }
+
+    check(map.range(size + 1...size + 1), vec![]);
+    check(map.range(size...size), vec![(&size, &size)]);
+    check(map.range(size...size + 1), vec![(&size, &size)]);
+    check(map.range(0...0), vec![(&0, &0)]);
+    check(map.range(0...size - 1), map.range(..size));
+    check(map.range(-1...-1), vec![]);
+    check(map.range(-1...size), map.range(..));
+    check(map.range(...size), map.range(..));
+    check(map.range(...200), map.range(..201));
+    check(map.range(5...8), vec![(&5, &5), (&6, &6), (&7, &7), (&8, &8)]);
+    check(map.range(-1...0), vec![(&0, &0)]);
+    check(map.range(-1...2), vec![(&0, &0), (&1, &1), (&2, &2)]);
+}
+
+#[test]
+fn test_range_inclusive_max_value() {
+    let max = ::std::usize::MAX;
+    let map: BTreeMap<_, _> = vec![(max, 0)].into_iter().collect();
+
+    assert_eq!(map.range(max...max).collect::<Vec<_>>(), &[(&max, &0)]);
+}
+
 #[test]
 fn test_range_equal_empty_cases() {
     let map: BTreeMap<_, _> = (0..5).map(|i| (i, i)).collect();
index 57e3c2df059e122775f7d98f0c082980a7a4c7bb..849d2401691691eb013d0e57736fd29db0d5a496 100644 (file)
@@ -14,6 +14,7 @@
 #![feature(binary_heap_peek_mut_pop)]
 #![feature(box_syntax)]
 #![feature(btree_range)]
+#![feature(inclusive_range_syntax)]
 #![feature(collection_placement)]
 #![feature(collections)]
 #![feature(collections_bound)]
index edeedf1d40baf3d37b38d6c36928945c7828558a..06d70800d392560e8c99e44fd65edc878c95a114 100644 (file)
@@ -507,6 +507,56 @@ fn test_drain_range() {
     assert_eq!(v, &[(), ()]);
 }
 
+#[test]
+fn test_drain_inclusive_range() {
+    let mut v = vec!['a', 'b', 'c', 'd', 'e'];
+    for _ in v.drain(1...3) {
+    }
+    assert_eq!(v, &['a', 'e']);
+
+    let mut v: Vec<_> = (0...5).map(|x| x.to_string()).collect();
+    for _ in v.drain(1...5) {
+    }
+    assert_eq!(v, &["0".to_string()]);
+
+    let mut v: Vec<String> = (0...5).map(|x| x.to_string()).collect();
+    for _ in v.drain(0...5) {
+    }
+    assert_eq!(v, Vec::<String>::new());
+
+    let mut v: Vec<_> = (0...5).map(|x| x.to_string()).collect();
+    for _ in v.drain(0...3) {
+    }
+    assert_eq!(v, &["4".to_string(), "5".to_string()]);
+
+    let mut v: Vec<_> = (0...1).map(|x| x.to_string()).collect();
+    for _ in v.drain(...0) {
+    }
+    assert_eq!(v, &["1".to_string()]);
+}
+
+#[test]
+fn test_drain_max_vec_size() {
+    let mut v = Vec::<()>::with_capacity(usize::max_value());
+    unsafe { v.set_len(usize::max_value()); }
+    for _ in v.drain(usize::max_value() - 1..) {
+    }
+    assert_eq!(v.len(), usize::max_value() - 1);
+
+    let mut v = Vec::<()>::with_capacity(usize::max_value());
+    unsafe { v.set_len(usize::max_value()); }
+    for _ in v.drain(usize::max_value() - 1...usize::max_value() - 1) {
+    }
+    assert_eq!(v.len(), usize::max_value() - 1);
+}
+
+#[test]
+#[should_panic]
+fn test_drain_inclusive_out_of_bounds() {
+    let mut v = vec![1, 2, 3, 4, 5];
+    v.drain(5...5);
+}
+
 #[test]
 fn test_into_boxed_slice() {
     let xs = vec![1, 2, 3];
index e6c9e1ed38e4fb49f5dba56764538ad5bc6732aa..dc5a662cdb0445fea0fca104923710cb80fbaec8 100644 (file)
@@ -65,12 +65,15 @@ pub mod rt {
 /// A collection of methods that are required to format a message into a stream.
 ///
 /// This trait is the type which this modules requires when formatting
-/// information. This is similar to the standard library's `io::Write` trait,
+/// information. This is similar to the standard library's [`io::Write`] trait,
 /// but it is only intended for use in libcore.
 ///
 /// This trait should generally not be implemented by consumers of the standard
-/// library. The `write!` macro accepts an instance of `io::Write`, and the
-/// `io::Write` trait is favored over implementing this trait.
+/// library. The [`write!`] macro accepts an instance of [`io::Write`], and the
+/// [`io::Write`] trait is favored over implementing this trait.
+///
+/// [`write!`]: ../../std/macro.write.html
+/// [`io::Write`]: ../../std/io/trait.Write.html
 #[stable(feature = "rust1", since = "1.0.0")]
 pub trait Write {
     /// Writes a slice of bytes into this writer, returning whether the write
@@ -82,29 +85,79 @@ pub trait Write {
     ///
     /// # Errors
     ///
-    /// This function will return an instance of `Error` on error.
+    /// This function will return an instance of [`Error`] on error.
+    ///
+    /// [`Error`]: struct.Error.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::fmt::{Error, Write};
+    ///
+    /// fn writer<W: Write>(f: &mut W, s: &str) -> Result<(), Error> {
+    ///     f.write_str(s)
+    /// }
+    ///
+    /// let mut buf = String::new();
+    /// writer(&mut buf, "hola").unwrap();
+    /// assert_eq!(&buf, "hola");
+    /// ```
     #[stable(feature = "rust1", since = "1.0.0")]
     fn write_str(&mut self, s: &str) -> Result;
 
-    /// Writes a `char` into this writer, returning whether the write succeeded.
+    /// Writes a [`char`] into this writer, returning whether the write succeeded.
     ///
-    /// A single `char` may be encoded as more than one byte.
+    /// A single [`char`] may be encoded as more than one byte.
     /// This method can only succeed if the entire byte sequence was successfully
     /// written, and this method will not return until all data has been
     /// written or an error occurs.
     ///
     /// # Errors
     ///
-    /// This function will return an instance of `Error` on error.
+    /// This function will return an instance of [`Error`] on error.
+    ///
+    /// [`char`]: ../../std/primitive.char.html
+    /// [`Error`]: struct.Error.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::fmt::{Error, Write};
+    ///
+    /// fn writer<W: Write>(f: &mut W, c: char) -> Result<(), Error> {
+    ///     f.write_char(c)
+    /// }
+    ///
+    /// let mut buf = String::new();
+    /// writer(&mut buf, 'a').unwrap();
+    /// writer(&mut buf, 'b').unwrap();
+    /// assert_eq!(&buf, "ab");
+    /// ```
     #[stable(feature = "fmt_write_char", since = "1.1.0")]
     fn write_char(&mut self, c: char) -> Result {
         self.write_str(c.encode_utf8(&mut [0; 4]))
     }
 
-    /// Glue for usage of the `write!` macro with implementors of this trait.
+    /// Glue for usage of the [`write!`] macro with implementors of this trait.
     ///
     /// This method should generally not be invoked manually, but rather through
-    /// the `write!` macro itself.
+    /// the [`write!`] macro itself.
+    ///
+    /// [`write!`]: ../../std/macro.write.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::fmt::{Error, Write};
+    ///
+    /// fn writer<W: Write>(f: &mut W, s: &str) -> Result<(), Error> {
+    ///     f.write_fmt(format_args!("{}", s))
+    /// }
+    ///
+    /// let mut buf = String::new();
+    /// writer(&mut buf, "world").unwrap();
+    /// assert_eq!(&buf, "world");
+    /// ```
     #[stable(feature = "rust1", since = "1.0.0")]
     fn write_fmt(&mut self, args: Arguments) -> Result {
         // This Adapter is needed to allow `self` (of type `&mut
index f962c888f42cc4da71e568a567a40f3cd563d5dc..b33caefbcd2ecbac73e3b73a7b1d88ca4af3f2a5 100644 (file)
@@ -173,8 +173,7 @@ fn from_str(src: &str) -> Result<TokenStream, LexError> {
         __internal::with_parse_sess(|sess| {
             let src = src.to_string();
             let name = "<proc-macro source code>".to_string();
-            let tts = try!(parse::parse_tts_from_source_str(name, src, sess)
-                .map_err(parse_to_lex_err));
+            let tts = parse::parse_tts_from_source_str(name, src, sess);
 
             Ok(__internal::token_stream_wrap(tts.into_iter().collect()))
         })
index 300b4df89294354edcafa092abe5601a700245f6..dc7c96a4e27672d88fb2f501d5d2b7b0e69b322d 100644 (file)
@@ -119,7 +119,6 @@ fn quote(&self) -> TokenStream {
                 ::syntax::tokenstream::TokenTree::Delimited(::syntax::ext::quote::rt::DUMMY_SP,
                                                             (quote delimited))
             },
-            _ => panic!("unexpected `TokenTree::Sequence` in `qquote`"),
         }
     }
 }
index b1b1b849437d1464339bce0655d6d716bf447173..85b4ddcdd719099a90bd1cd25abc93827b32b7a2 100644 (file)
@@ -1725,6 +1725,68 @@ fn main() {
 specified exit code, use `std::process::exit`.
 "##,
 
+E0591: r##"
+Per [RFC 401][rfc401], if you have a function declaration `foo`:
+
+```rust,ignore
+// For the purposes of this explanation, all of these
+// different kinds of `fn` declarations are equivalent:
+fn foo(x: i32) { ... }
+extern "C" fn foo(x: i32);
+impl i32 { fn foo(x: self) { ... } }
+```
+
+the type of `foo` is **not** `fn(i32)`, as one might expect.
+Rather, it is a unique, zero-sized marker type written here as `typeof(foo)`.
+However, `typeof(foo)` can be _coerced_ to a function pointer `fn(i32)`,
+so you rarely notice this:
+
+```rust,ignore
+let x: fn(i32) = foo; // OK, coerces
+```
+
+The reason that this matter is that the type `fn(i32)` is not specific to
+any particular function: it's a function _pointer_. So calling `x()` results
+in a virtual call, whereas `foo()` is statically dispatched, because the type
+of `foo` tells us precisely what function is being called.
+
+As noted above, coercions mean that most code doesn't have to be
+concerned with this distinction. However, you can tell the difference
+when using **transmute** to convert a fn item into a fn pointer.
+
+This is sometimes done as part of an FFI:
+
+```rust,ignore
+extern "C" fn foo(userdata: Box<i32>) {
+   ...
+}
+
+let f: extern "C" fn(*mut i32) = transmute(foo);
+callback(f);
+
+```
+
+Here, transmute is being used to convert the types of the fn arguments.
+This pattern is incorrect because, because the type of `foo` is a function
+**item** (`typeof(foo)`), which is zero-sized, and the target type (`fn()`)
+is a function pointer, which is not zero-sized.
+This pattern should be rewritten. There are a few possible ways to do this:
+- change the original fn declaration to match the expected signature,
+  and do the cast in the fn body (the prefered option)
+- cast the fn item fo a fn pointer before calling transmute, as shown here:
+  - `let f: extern "C" fn(*mut i32) = transmute(foo as extern "C" fn(_))`
+  - `let f: extern "C" fn(*mut i32) = transmute(foo as usize) /* works too */`
+
+The same applies to transmutes to `*mut fn()`, which were observedin practice.
+Note though that use of this type is generally incorrect.
+The intention is typically to describe a function pointer, but just `fn()`
+alone suffices for that. `*mut fn()` is a pointer to a fn pointer.
+(Since these values are typically just passed to C code, however, this rarely
+makes a difference in practice.)
+
+[rfc401]: https://github.com/rust-lang/rfcs/blob/master/text/0401-coercions.md
+"##,
+
 }
 
 
diff --git a/src/librustc/infer/error_reporting.rs b/src/librustc/infer/error_reporting.rs
deleted file mode 100644 (file)
index f48ff87..0000000
+++ /dev/null
@@ -1,1147 +0,0 @@
-// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Error Reporting Code for the inference engine
-//!
-//! Because of the way inference, and in particular region inference,
-//! works, it often happens that errors are not detected until far after
-//! the relevant line of code has been type-checked. Therefore, there is
-//! an elaborate system to track why a particular constraint in the
-//! inference graph arose so that we can explain to the user what gave
-//! rise to a particular error.
-//!
-//! The basis of the system are the "origin" types. An "origin" is the
-//! reason that a constraint or inference variable arose. There are
-//! different "origin" enums for different kinds of constraints/variables
-//! (e.g., `TypeOrigin`, `RegionVariableOrigin`). An origin always has
-//! a span, but also more information so that we can generate a meaningful
-//! error message.
-//!
-//! Having a catalogue of all the different reasons an error can arise is
-//! also useful for other reasons, like cross-referencing FAQs etc, though
-//! we are not really taking advantage of this yet.
-//!
-//! # Region Inference
-//!
-//! Region inference is particularly tricky because it always succeeds "in
-//! the moment" and simply registers a constraint. Then, at the end, we
-//! can compute the full graph and report errors, so we need to be able to
-//! store and later report what gave rise to the conflicting constraints.
-//!
-//! # Subtype Trace
-//!
-//! Determining whether `T1 <: T2` often involves a number of subtypes and
-//! subconstraints along the way. A "TypeTrace" is an extended version
-//! of an origin that traces the types and other values that were being
-//! compared. It is not necessarily comprehensive (in fact, at the time of
-//! this writing it only tracks the root values being compared) but I'd
-//! like to extend it to include significant "waypoints". For example, if
-//! you are comparing `(T1, T2) <: (T3, T4)`, and the problem is that `T2
-//! <: T4` fails, I'd like the trace to include enough information to say
-//! "in the 2nd element of the tuple". Similarly, failures when comparing
-//! arguments or return types in fn types should be able to cite the
-//! specific position, etc.
-//!
-//! # Reality vs plan
-//!
-//! Of course, there is still a LOT of code in typeck that has yet to be
-//! ported to this system, and which relies on string concatenation at the
-//! time of error detection.
-
-use super::InferCtxt;
-use super::TypeTrace;
-use super::SubregionOrigin;
-use super::RegionVariableOrigin;
-use super::ValuePairs;
-use super::region_inference::RegionResolutionError;
-use super::region_inference::ConcreteFailure;
-use super::region_inference::SubSupConflict;
-use super::region_inference::GenericBoundFailure;
-use super::region_inference::GenericKind;
-
-use hir::map as hir_map;
-use hir;
-
-use hir::def_id::DefId;
-use infer;
-use middle::region;
-use traits::{ObligationCause, ObligationCauseCode};
-use ty::{self, TyCtxt, TypeFoldable};
-use ty::{Region, Issue32330};
-use ty::error::TypeError;
-
-use std::fmt;
-use syntax_pos::{Pos, Span};
-use errors::DiagnosticBuilder;
-
-impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
-    pub fn note_and_explain_region(self,
-                                   err: &mut DiagnosticBuilder,
-                                   prefix: &str,
-                                   region: &'tcx ty::Region,
-                                   suffix: &str) {
-        fn item_scope_tag(item: &hir::Item) -> &'static str {
-            match item.node {
-                hir::ItemImpl(..) => "impl",
-                hir::ItemStruct(..) => "struct",
-                hir::ItemUnion(..) => "union",
-                hir::ItemEnum(..) => "enum",
-                hir::ItemTrait(..) => "trait",
-                hir::ItemFn(..) => "function body",
-                _ => "item"
-            }
-        }
-
-        fn trait_item_scope_tag(item: &hir::TraitItem) -> &'static str {
-            match item.node {
-                hir::TraitItemKind::Method(..) => "method body",
-                hir::TraitItemKind::Const(..) |
-                hir::TraitItemKind::Type(..) => "associated item"
-            }
-        }
-
-        fn impl_item_scope_tag(item: &hir::ImplItem) -> &'static str {
-            match item.node {
-                hir::ImplItemKind::Method(..) => "method body",
-                hir::ImplItemKind::Const(..) |
-                hir::ImplItemKind::Type(_) => "associated item"
-            }
-        }
-
-        fn explain_span<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
-                                        heading: &str, span: Span)
-                                        -> (String, Option<Span>) {
-            let lo = tcx.sess.codemap().lookup_char_pos_adj(span.lo);
-            (format!("the {} at {}:{}", heading, lo.line, lo.col.to_usize()),
-             Some(span))
-        }
-
-        let (description, span) = match *region {
-            ty::ReScope(scope) => {
-                let new_string;
-                let unknown_scope = || {
-                    format!("{}unknown scope: {:?}{}.  Please report a bug.",
-                            prefix, scope, suffix)
-                };
-                let span = match scope.span(&self.region_maps, &self.hir) {
-                    Some(s) => s,
-                    None => {
-                        err.note(&unknown_scope());
-                        return;
-                    }
-                };
-                let tag = match self.hir.find(scope.node_id(&self.region_maps)) {
-                    Some(hir_map::NodeBlock(_)) => "block",
-                    Some(hir_map::NodeExpr(expr)) => match expr.node {
-                        hir::ExprCall(..) => "call",
-                        hir::ExprMethodCall(..) => "method call",
-                        hir::ExprMatch(.., hir::MatchSource::IfLetDesugar { .. }) => "if let",
-                        hir::ExprMatch(.., hir::MatchSource::WhileLetDesugar) =>  "while let",
-                        hir::ExprMatch(.., hir::MatchSource::ForLoopDesugar) =>  "for",
-                        hir::ExprMatch(..) => "match",
-                        _ => "expression",
-                    },
-                    Some(hir_map::NodeStmt(_)) => "statement",
-                    Some(hir_map::NodeItem(it)) => item_scope_tag(&it),
-                    Some(hir_map::NodeTraitItem(it)) => trait_item_scope_tag(&it),
-                    Some(hir_map::NodeImplItem(it)) => impl_item_scope_tag(&it),
-                    Some(_) | None => {
-                        err.span_note(span, &unknown_scope());
-                        return;
-                    }
-                };
-                let scope_decorated_tag = match self.region_maps.code_extent_data(scope) {
-                    region::CodeExtentData::Misc(_) => tag,
-                    region::CodeExtentData::CallSiteScope { .. } => {
-                        "scope of call-site for function"
-                    }
-                    region::CodeExtentData::ParameterScope { .. } => {
-                        "scope of function body"
-                    }
-                    region::CodeExtentData::DestructionScope(_) => {
-                        new_string = format!("destruction scope surrounding {}", tag);
-                        &new_string[..]
-                    }
-                    region::CodeExtentData::Remainder(r) => {
-                        new_string = format!("block suffix following statement {}",
-                                             r.first_statement_index);
-                        &new_string[..]
-                    }
-                };
-                explain_span(self, scope_decorated_tag, span)
-            }
-
-            ty::ReFree(ref fr) => {
-                let prefix = match fr.bound_region {
-                    ty::BrAnon(idx) => {
-                        format!("the anonymous lifetime #{} defined on", idx + 1)
-                    }
-                    ty::BrFresh(_) => "an anonymous lifetime defined on".to_owned(),
-                    _ => {
-                        format!("the lifetime {} as defined on",
-                                fr.bound_region)
-                    }
-                };
-
-                let node = fr.scope.node_id(&self.region_maps);
-                let unknown;
-                let tag = match self.hir.find(node) {
-                    Some(hir_map::NodeBlock(_)) |
-                    Some(hir_map::NodeExpr(_)) => "body",
-                    Some(hir_map::NodeItem(it)) => item_scope_tag(&it),
-                    Some(hir_map::NodeTraitItem(it)) => trait_item_scope_tag(&it),
-                    Some(hir_map::NodeImplItem(it)) => impl_item_scope_tag(&it),
-
-                    // this really should not happen, but it does:
-                    // FIXME(#27942)
-                    Some(_) => {
-                        unknown = format!("unexpected node ({}) for scope {:?}.  \
-                                           Please report a bug.",
-                                          self.hir.node_to_string(node), fr.scope);
-                        &unknown
-                    }
-                    None => {
-                        unknown = format!("unknown node for scope {:?}.  \
-                                           Please report a bug.", fr.scope);
-                        &unknown
-                    }
-                };
-                let (msg, opt_span) = explain_span(self, tag, self.hir.span(node));
-                (format!("{} {}", prefix, msg), opt_span)
-            }
-
-            ty::ReStatic => ("the static lifetime".to_owned(), None),
-
-            ty::ReEmpty => ("the empty lifetime".to_owned(), None),
-
-            ty::ReEarlyBound(ref data) => (data.name.to_string(), None),
-
-            // FIXME(#13998) ReSkolemized should probably print like
-            // ReFree rather than dumping Debug output on the user.
-            //
-            // We shouldn't really be having unification failures with ReVar
-            // and ReLateBound though.
-            ty::ReSkolemized(..) |
-            ty::ReVar(_) |
-            ty::ReLateBound(..) |
-            ty::ReErased => {
-                (format!("lifetime {:?}", region), None)
-            }
-        };
-        let message = format!("{}{}{}", prefix, description, suffix);
-        if let Some(span) = span {
-            err.span_note(span, &message);
-        } else {
-            err.note(&message);
-        }
-    }
-}
-
-impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
-    pub fn report_region_errors(&self,
-                                errors: &Vec<RegionResolutionError<'tcx>>) {
-        debug!("report_region_errors(): {} errors to start", errors.len());
-
-        // try to pre-process the errors, which will group some of them
-        // together into a `ProcessedErrors` group:
-        let errors = self.process_errors(errors);
-
-        debug!("report_region_errors: {} errors after preprocessing", errors.len());
-
-        for error in errors {
-            debug!("report_region_errors: error = {:?}", error);
-            match error.clone() {
-                ConcreteFailure(origin, sub, sup) => {
-                    self.report_concrete_failure(origin, sub, sup).emit();
-                }
-
-                GenericBoundFailure(kind, param_ty, sub) => {
-                    self.report_generic_bound_failure(kind, param_ty, sub);
-                }
-
-                SubSupConflict(var_origin,
-                               sub_origin, sub_r,
-                               sup_origin, sup_r) => {
-                    self.report_sub_sup_conflict(var_origin,
-                                                 sub_origin, sub_r,
-                                                 sup_origin, sup_r);
-                }
-            }
-        }
-    }
-
-    // This method goes through all the errors and try to group certain types
-    // of error together, for the purpose of suggesting explicit lifetime
-    // parameters to the user. This is done so that we can have a more
-    // complete view of what lifetimes should be the same.
-    // If the return value is an empty vector, it means that processing
-    // failed (so the return value of this method should not be used).
-    //
-    // The method also attempts to weed out messages that seem like
-    // duplicates that will be unhelpful to the end-user. But
-    // obviously it never weeds out ALL errors.
-    fn process_errors(&self, errors: &Vec<RegionResolutionError<'tcx>>)
-                      -> Vec<RegionResolutionError<'tcx>> {
-        debug!("process_errors()");
-
-        // We want to avoid reporting generic-bound failures if we can
-        // avoid it: these have a very high rate of being unhelpful in
-        // practice. This is because they are basically secondary
-        // checks that test the state of the region graph after the
-        // rest of inference is done, and the other kinds of errors
-        // indicate that the region constraint graph is internally
-        // inconsistent, so these test results are likely to be
-        // meaningless.
-        //
-        // Therefore, we filter them out of the list unless they are
-        // the only thing in the list.
-
-        let is_bound_failure = |e: &RegionResolutionError<'tcx>| match *e {
-            ConcreteFailure(..) => false,
-            SubSupConflict(..) => false,
-            GenericBoundFailure(..) => true,
-        };
-
-        if errors.iter().all(|e| is_bound_failure(e)) {
-            errors.clone()
-        } else {
-            errors.iter().filter(|&e| !is_bound_failure(e)).cloned().collect()
-        }
-    }
-
-    /// Adds a note if the types come from similarly named crates
-    fn check_and_note_conflicting_crates(&self,
-                                         err: &mut DiagnosticBuilder,
-                                         terr: &TypeError<'tcx>,
-                                         sp: Span) {
-        let report_path_match = |err: &mut DiagnosticBuilder, did1: DefId, did2: DefId| {
-            // Only external crates, if either is from a local
-            // module we could have false positives
-            if !(did1.is_local() || did2.is_local()) && did1.krate != did2.krate {
-                let exp_path = self.tcx.item_path_str(did1);
-                let found_path = self.tcx.item_path_str(did2);
-                // We compare strings because DefPath can be different
-                // for imported and non-imported crates
-                if exp_path == found_path {
-                    let crate_name = self.tcx.sess.cstore.crate_name(did1.krate);
-                    err.span_note(sp, &format!("Perhaps two different versions \
-                                                of crate `{}` are being used?",
-                                               crate_name));
-                }
-            }
-        };
-        match *terr {
-            TypeError::Sorts(ref exp_found) => {
-                // if they are both "path types", there's a chance of ambiguity
-                // due to different versions of the same crate
-                match (&exp_found.expected.sty, &exp_found.found.sty) {
-                    (&ty::TyAdt(exp_adt, _), &ty::TyAdt(found_adt, _)) => {
-                        report_path_match(err, exp_adt.did, found_adt.did);
-                    },
-                    _ => ()
-                }
-            },
-            TypeError::Traits(ref exp_found) => {
-                report_path_match(err, exp_found.expected, exp_found.found);
-            },
-            _ => () // FIXME(#22750) handle traits and stuff
-        }
-    }
-
-    fn note_error_origin(&self,
-                         err: &mut DiagnosticBuilder<'tcx>,
-                         cause: &ObligationCause<'tcx>)
-    {
-        match cause.code {
-            ObligationCauseCode::MatchExpressionArm { arm_span, source } => match source {
-                hir::MatchSource::IfLetDesugar {..} => {
-                    err.span_note(arm_span, "`if let` arm with an incompatible type");
-                }
-                _ => {
-                    err.span_note(arm_span, "match arm with an incompatible type");
-                }
-            },
-            _ => ()
-        }
-    }
-
-    pub fn note_type_err(&self,
-                         diag: &mut DiagnosticBuilder<'tcx>,
-                         cause: &ObligationCause<'tcx>,
-                         secondary_span: Option<(Span, String)>,
-                         values: Option<ValuePairs<'tcx>>,
-                         terr: &TypeError<'tcx>)
-    {
-        let (expected_found, is_simple_error) = match values {
-            None => (None, false),
-            Some(values) => {
-                let is_simple_error = match values {
-                    ValuePairs::Types(exp_found) => {
-                        exp_found.expected.is_primitive() && exp_found.found.is_primitive()
-                    }
-                    _ => false,
-                };
-                let vals = match self.values_str(&values) {
-                    Some((expected, found)) => Some((expected, found)),
-                    None => {
-                        // Derived error. Cancel the emitter.
-                        self.tcx.sess.diagnostic().cancel(diag);
-                        return
-                    }
-                };
-                (vals, is_simple_error)
-            }
-        };
-
-        let span = cause.span;
-
-        if let Some((expected, found)) = expected_found {
-            match (terr, is_simple_error, expected == found) {
-                (&TypeError::Sorts(ref values), false,  true) => {
-                    diag.note_expected_found_extra(
-                        &"type", &expected, &found,
-                        &format!(" ({})", values.expected.sort_string(self.tcx)),
-                        &format!(" ({})", values.found.sort_string(self.tcx)));
-                }
-                (_, false,  _) => {
-                    diag.note_expected_found(&"type", &expected, &found);
-                }
-                _ => (),
-            }
-        }
-
-        diag.span_label(span, &terr);
-        if let Some((sp, msg)) = secondary_span {
-            diag.span_label(sp, &msg);
-        }
-
-        self.note_error_origin(diag, &cause);
-        self.check_and_note_conflicting_crates(diag, terr, span);
-        self.tcx.note_and_explain_type_err(diag, terr, span);
-    }
-
-    pub fn note_issue_32330(&self,
-                            diag: &mut DiagnosticBuilder<'tcx>,
-                            terr: &TypeError<'tcx>)
-    {
-        debug!("note_issue_32330: terr={:?}", terr);
-        match *terr {
-            TypeError::RegionsInsufficientlyPolymorphic(_, &Region::ReVar(vid)) |
-            TypeError::RegionsOverlyPolymorphic(_, &Region::ReVar(vid)) => {
-                match self.region_vars.var_origin(vid) {
-                    RegionVariableOrigin::EarlyBoundRegion(_, _, Some(Issue32330 {
-                        fn_def_id,
-                        region_name
-                    })) => {
-                        diag.note(
-                            &format!("lifetime parameter `{0}` declared on fn `{1}` \
-                                      appears only in the return type, \
-                                      but here is required to be higher-ranked, \
-                                      which means that `{0}` must appear in both \
-                                      argument and return types",
-                                     region_name,
-                                     self.tcx.item_path_str(fn_def_id)));
-                        diag.note(
-                            &format!("this error is the result of a recent bug fix; \
-                                      for more information, see issue #33685 \
-                                      <https://github.com/rust-lang/rust/issues/33685>"));
-                    }
-                    _ => { }
-                }
-            }
-            _ => { }
-        }
-    }
-
-    pub fn report_and_explain_type_error(&self,
-                                         trace: TypeTrace<'tcx>,
-                                         terr: &TypeError<'tcx>)
-                                         -> DiagnosticBuilder<'tcx>
-    {
-        let span = trace.cause.span;
-        let failure_str = trace.cause.as_failure_str();
-        let mut diag = match trace.cause.code {
-            ObligationCauseCode::IfExpressionWithNoElse => {
-                struct_span_err!(self.tcx.sess, span, E0317, "{}", failure_str)
-            }
-            ObligationCauseCode::MainFunctionType => {
-                struct_span_err!(self.tcx.sess, span, E0580, "{}", failure_str)
-            }
-            _ => {
-                struct_span_err!(self.tcx.sess, span, E0308, "{}", failure_str)
-            }
-        };
-        self.note_type_err(&mut diag, &trace.cause, None, Some(trace.values), terr);
-        self.note_issue_32330(&mut diag, terr);
-        diag
-    }
-
-    /// Returns a string of the form "expected `{}`, found `{}`".
-    fn values_str(&self, values: &ValuePairs<'tcx>) -> Option<(String, String)> {
-        match *values {
-            infer::Types(ref exp_found) => self.expected_found_str(exp_found),
-            infer::TraitRefs(ref exp_found) => self.expected_found_str(exp_found),
-            infer::PolyTraitRefs(ref exp_found) => self.expected_found_str(exp_found),
-        }
-    }
-
-    fn expected_found_str<T: fmt::Display + TypeFoldable<'tcx>>(
-        &self,
-        exp_found: &ty::error::ExpectedFound<T>)
-        -> Option<(String, String)>
-    {
-        let exp_found = self.resolve_type_vars_if_possible(exp_found);
-        if exp_found.references_error() {
-            return None;
-        }
-
-        Some((format!("{}", exp_found.expected), format!("{}", exp_found.found)))
-    }
-
-    fn report_generic_bound_failure(&self,
-                                    origin: SubregionOrigin<'tcx>,
-                                    bound_kind: GenericKind<'tcx>,
-                                    sub: &'tcx Region)
-    {
-        // FIXME: it would be better to report the first error message
-        // with the span of the parameter itself, rather than the span
-        // where the error was detected. But that span is not readily
-        // accessible.
-
-        let labeled_user_string = match bound_kind {
-            GenericKind::Param(ref p) =>
-                format!("the parameter type `{}`", p),
-            GenericKind::Projection(ref p) =>
-                format!("the associated type `{}`", p),
-        };
-
-        if let SubregionOrigin::CompareImplMethodObligation {
-            span, item_name, impl_item_def_id, trait_item_def_id, lint_id
-        } = origin {
-            self.report_extra_impl_obligation(span,
-                                              item_name,
-                                              impl_item_def_id,
-                                              trait_item_def_id,
-                                              &format!("`{}: {}`", bound_kind, sub),
-                                              lint_id)
-                .emit();
-            return;
-        }
-
-        let mut err = match *sub {
-            ty::ReFree(ty::FreeRegion {bound_region: ty::BrNamed(..), ..}) => {
-                // Does the required lifetime have a nice name we can print?
-                let mut err = struct_span_err!(self.tcx.sess,
-                                               origin.span(),
-                                               E0309,
-                                               "{} may not live long enough",
-                                               labeled_user_string);
-                err.help(&format!("consider adding an explicit lifetime bound `{}: {}`...",
-                         bound_kind,
-                         sub));
-                err
-            }
-
-            ty::ReStatic => {
-                // Does the required lifetime have a nice name we can print?
-                let mut err = struct_span_err!(self.tcx.sess,
-                                               origin.span(),
-                                               E0310,
-                                               "{} may not live long enough",
-                                               labeled_user_string);
-                err.help(&format!("consider adding an explicit lifetime \
-                                   bound `{}: 'static`...",
-                                  bound_kind));
-                err
-            }
-
-            _ => {
-                // If not, be less specific.
-                let mut err = struct_span_err!(self.tcx.sess,
-                                               origin.span(),
-                                               E0311,
-                                               "{} may not live long enough",
-                                               labeled_user_string);
-                err.help(&format!("consider adding an explicit lifetime bound for `{}`",
-                                  bound_kind));
-                self.tcx.note_and_explain_region(
-                    &mut err,
-                    &format!("{} must be valid for ", labeled_user_string),
-                    sub,
-                    "...");
-                err
-            }
-        };
-
-        self.note_region_origin(&mut err, &origin);
-        err.emit();
-    }
-
-    fn report_concrete_failure(&self,
-                               origin: SubregionOrigin<'tcx>,
-                               sub: &'tcx Region,
-                               sup: &'tcx Region)
-                                -> DiagnosticBuilder<'tcx> {
-        match origin {
-            infer::Subtype(trace) => {
-                let terr = TypeError::RegionsDoesNotOutlive(sup, sub);
-                self.report_and_explain_type_error(trace, &terr)
-            }
-            infer::Reborrow(span) => {
-                let mut err = struct_span_err!(self.tcx.sess, span, E0312,
-                    "lifetime of reference outlives \
-                     lifetime of borrowed content...");
-                self.tcx.note_and_explain_region(&mut err,
-                    "...the reference is valid for ",
-                    sub,
-                    "...");
-                self.tcx.note_and_explain_region(&mut err,
-                    "...but the borrowed content is only valid for ",
-                    sup,
-                    "");
-                err
-            }
-            infer::ReborrowUpvar(span, ref upvar_id) => {
-                let mut err = struct_span_err!(self.tcx.sess, span, E0313,
-                    "lifetime of borrowed pointer outlives \
-                            lifetime of captured variable `{}`...",
-                            self.tcx.local_var_name_str(upvar_id.var_id));
-                self.tcx.note_and_explain_region(&mut err,
-                    "...the borrowed pointer is valid for ",
-                    sub,
-                    "...");
-                self.tcx.note_and_explain_region(&mut err,
-                    &format!("...but `{}` is only valid for ",
-                             self.tcx.local_var_name_str(upvar_id.var_id)),
-                    sup,
-                    "");
-                err
-            }
-            infer::InfStackClosure(span) => {
-                let mut err = struct_span_err!(self.tcx.sess, span, E0314,
-                    "closure outlives stack frame");
-                self.tcx.note_and_explain_region(&mut err,
-                    "...the closure must be valid for ",
-                    sub,
-                    "...");
-                self.tcx.note_and_explain_region(&mut err,
-                    "...but the closure's stack frame is only valid for ",
-                    sup,
-                    "");
-                err
-            }
-            infer::InvokeClosure(span) => {
-                let mut err = struct_span_err!(self.tcx.sess, span, E0315,
-                    "cannot invoke closure outside of its lifetime");
-                self.tcx.note_and_explain_region(&mut err,
-                    "the closure is only valid for ",
-                    sup,
-                    "");
-                err
-            }
-            infer::DerefPointer(span) => {
-                let mut err = struct_span_err!(self.tcx.sess, span, E0473,
-                          "dereference of reference outside its lifetime");
-                self.tcx.note_and_explain_region(&mut err,
-                    "the reference is only valid for ",
-                    sup,
-                    "");
-                err
-            }
-            infer::FreeVariable(span, id) => {
-                let mut err = struct_span_err!(self.tcx.sess, span, E0474,
-                          "captured variable `{}` does not outlive the enclosing closure",
-                          self.tcx.local_var_name_str(id));
-                self.tcx.note_and_explain_region(&mut err,
-                    "captured variable is valid for ",
-                    sup,
-                    "");
-                self.tcx.note_and_explain_region(&mut err,
-                    "closure is valid for ",
-                    sub,
-                    "");
-                err
-            }
-            infer::IndexSlice(span) => {
-                let mut err = struct_span_err!(self.tcx.sess, span, E0475,
-                          "index of slice outside its lifetime");
-                self.tcx.note_and_explain_region(&mut err,
-                    "the slice is only valid for ",
-                    sup,
-                    "");
-                err
-            }
-            infer::RelateObjectBound(span) => {
-                let mut err = struct_span_err!(self.tcx.sess, span, E0476,
-                          "lifetime of the source pointer does not outlive \
-                           lifetime bound of the object type");
-                self.tcx.note_and_explain_region(&mut err,
-                    "object type is valid for ",
-                    sub,
-                    "");
-                self.tcx.note_and_explain_region(&mut err,
-                    "source pointer is only valid for ",
-                    sup,
-                    "");
-                err
-            }
-            infer::RelateParamBound(span, ty) => {
-                let mut err = struct_span_err!(self.tcx.sess, span, E0477,
-                          "the type `{}` does not fulfill the required lifetime",
-                          self.ty_to_string(ty));
-                self.tcx.note_and_explain_region(&mut err,
-                                        "type must outlive ",
-                                        sub,
-                                        "");
-                err
-            }
-            infer::RelateRegionParamBound(span) => {
-                let mut err = struct_span_err!(self.tcx.sess, span, E0478,
-                          "lifetime bound not satisfied");
-                self.tcx.note_and_explain_region(&mut err,
-                    "lifetime parameter instantiated with ",
-                    sup,
-                    "");
-                self.tcx.note_and_explain_region(&mut err,
-                    "but lifetime parameter must outlive ",
-                    sub,
-                    "");
-                err
-            }
-            infer::RelateDefaultParamBound(span, ty) => {
-                let mut err = struct_span_err!(self.tcx.sess, span, E0479,
-                          "the type `{}` (provided as the value of \
-                           a type parameter) is not valid at this point",
-                          self.ty_to_string(ty));
-                self.tcx.note_and_explain_region(&mut err,
-                                        "type must outlive ",
-                                        sub,
-                                        "");
-                err
-            }
-            infer::CallRcvr(span) => {
-                let mut err = struct_span_err!(self.tcx.sess, span, E0480,
-                          "lifetime of method receiver does not outlive \
-                           the method call");
-                self.tcx.note_and_explain_region(&mut err,
-                    "the receiver is only valid for ",
-                    sup,
-                    "");
-                err
-            }
-            infer::CallArg(span) => {
-                let mut err = struct_span_err!(self.tcx.sess, span, E0481,
-                          "lifetime of function argument does not outlive \
-                           the function call");
-                self.tcx.note_and_explain_region(&mut err,
-                    "the function argument is only valid for ",
-                    sup,
-                    "");
-                err
-            }
-            infer::CallReturn(span) => {
-                let mut err = struct_span_err!(self.tcx.sess, span, E0482,
-                          "lifetime of return value does not outlive \
-                           the function call");
-                self.tcx.note_and_explain_region(&mut err,
-                    "the return value is only valid for ",
-                    sup,
-                    "");
-                err
-            }
-            infer::Operand(span) => {
-                let mut err = struct_span_err!(self.tcx.sess, span, E0483,
-                          "lifetime of operand does not outlive \
-                           the operation");
-                self.tcx.note_and_explain_region(&mut err,
-                    "the operand is only valid for ",
-                    sup,
-                    "");
-                err
-            }
-            infer::AddrOf(span) => {
-                let mut err = struct_span_err!(self.tcx.sess, span, E0484,
-                          "reference is not valid at the time of borrow");
-                self.tcx.note_and_explain_region(&mut err,
-                    "the borrow is only valid for ",
-                    sup,
-                    "");
-                err
-            }
-            infer::AutoBorrow(span) => {
-                let mut err = struct_span_err!(self.tcx.sess, span, E0485,
-                          "automatically reference is not valid \
-                           at the time of borrow");
-                self.tcx.note_and_explain_region(&mut err,
-                    "the automatic borrow is only valid for ",
-                    sup,
-                    "");
-                err
-            }
-            infer::ExprTypeIsNotInScope(t, span) => {
-                let mut err = struct_span_err!(self.tcx.sess, span, E0486,
-                          "type of expression contains references \
-                           that are not valid during the expression: `{}`",
-                          self.ty_to_string(t));
-                self.tcx.note_and_explain_region(&mut err,
-                    "type is only valid for ",
-                    sup,
-                    "");
-                err
-            }
-            infer::SafeDestructor(span) => {
-                let mut err = struct_span_err!(self.tcx.sess, span, E0487,
-                          "unsafe use of destructor: destructor might be called \
-                           while references are dead");
-                // FIXME (22171): terms "super/subregion" are suboptimal
-                self.tcx.note_and_explain_region(&mut err,
-                    "superregion: ",
-                    sup,
-                    "");
-                self.tcx.note_and_explain_region(&mut err,
-                    "subregion: ",
-                    sub,
-                    "");
-                err
-            }
-            infer::BindingTypeIsNotValidAtDecl(span) => {
-                let mut err = struct_span_err!(self.tcx.sess, span, E0488,
-                          "lifetime of variable does not enclose its declaration");
-                self.tcx.note_and_explain_region(&mut err,
-                    "the variable is only valid for ",
-                    sup,
-                    "");
-                err
-            }
-            infer::ParameterInScope(_, span) => {
-                let mut err = struct_span_err!(self.tcx.sess, span, E0489,
-                          "type/lifetime parameter not in scope here");
-                self.tcx.note_and_explain_region(&mut err,
-                    "the parameter is only valid for ",
-                    sub,
-                    "");
-                err
-            }
-            infer::DataBorrowed(ty, span) => {
-                let mut err = struct_span_err!(self.tcx.sess, span, E0490,
-                          "a value of type `{}` is borrowed for too long",
-                          self.ty_to_string(ty));
-                self.tcx.note_and_explain_region(&mut err, "the type is valid for ", sub, "");
-                self.tcx.note_and_explain_region(&mut err, "but the borrow lasts for ", sup, "");
-                err
-            }
-            infer::ReferenceOutlivesReferent(ty, span) => {
-                let mut err = struct_span_err!(self.tcx.sess, span, E0491,
-                          "in type `{}`, reference has a longer lifetime \
-                           than the data it references",
-                          self.ty_to_string(ty));
-                self.tcx.note_and_explain_region(&mut err,
-                    "the pointer is valid for ",
-                    sub,
-                    "");
-                self.tcx.note_and_explain_region(&mut err,
-                    "but the referenced data is only valid for ",
-                    sup,
-                    "");
-                err
-            }
-            infer::CompareImplMethodObligation { span,
-                                                 item_name,
-                                                 impl_item_def_id,
-                                                 trait_item_def_id,
-                                                 lint_id } => {
-                self.report_extra_impl_obligation(span,
-                                                  item_name,
-                                                  impl_item_def_id,
-                                                  trait_item_def_id,
-                                                  &format!("`{}: {}`", sup, sub),
-                                                  lint_id)
-            }
-        }
-    }
-
-    fn report_sub_sup_conflict(&self,
-                               var_origin: RegionVariableOrigin,
-                               sub_origin: SubregionOrigin<'tcx>,
-                               sub_region: &'tcx Region,
-                               sup_origin: SubregionOrigin<'tcx>,
-                               sup_region: &'tcx Region) {
-        let mut err = self.report_inference_failure(var_origin);
-
-        self.tcx.note_and_explain_region(&mut err,
-            "first, the lifetime cannot outlive ",
-            sup_region,
-            "...");
-
-        self.note_region_origin(&mut err, &sup_origin);
-
-        self.tcx.note_and_explain_region(&mut err,
-            "but, the lifetime must be valid for ",
-            sub_region,
-            "...");
-
-        self.note_region_origin(&mut err, &sub_origin);
-        err.emit();
-    }
-}
-
-impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
-    fn report_inference_failure(&self,
-                                var_origin: RegionVariableOrigin)
-                                -> DiagnosticBuilder<'tcx> {
-        let br_string = |br: ty::BoundRegion| {
-            let mut s = br.to_string();
-            if !s.is_empty() {
-                s.push_str(" ");
-            }
-            s
-        };
-        let var_description = match var_origin {
-            infer::MiscVariable(_) => "".to_string(),
-            infer::PatternRegion(_) => " for pattern".to_string(),
-            infer::AddrOfRegion(_) => " for borrow expression".to_string(),
-            infer::Autoref(_) => " for autoref".to_string(),
-            infer::Coercion(_) => " for automatic coercion".to_string(),
-            infer::LateBoundRegion(_, br, infer::FnCall) => {
-                format!(" for lifetime parameter {}in function call",
-                        br_string(br))
-            }
-            infer::LateBoundRegion(_, br, infer::HigherRankedType) => {
-                format!(" for lifetime parameter {}in generic type", br_string(br))
-            }
-            infer::LateBoundRegion(_, br, infer::AssocTypeProjection(type_name)) => {
-                format!(" for lifetime parameter {}in trait containing associated type `{}`",
-                        br_string(br), type_name)
-            }
-            infer::EarlyBoundRegion(_, name, _) => {
-                format!(" for lifetime parameter `{}`",
-                        name)
-            }
-            infer::BoundRegionInCoherence(name) => {
-                format!(" for lifetime parameter `{}` in coherence check",
-                        name)
-            }
-            infer::UpvarRegion(ref upvar_id, _) => {
-                format!(" for capture of `{}` by closure",
-                        self.tcx.local_var_name_str(upvar_id.var_id).to_string())
-            }
-        };
-
-        struct_span_err!(self.tcx.sess, var_origin.span(), E0495,
-                  "cannot infer an appropriate lifetime{} \
-                   due to conflicting requirements",
-                  var_description)
-    }
-
-    fn note_region_origin(&self, err: &mut DiagnosticBuilder, origin: &SubregionOrigin<'tcx>) {
-        match *origin {
-            infer::Subtype(ref trace) => {
-                if let Some((expected, found)) = self.values_str(&trace.values) {
-                    // FIXME: do we want a "the" here?
-                    err.span_note(
-                        trace.cause.span,
-                        &format!("...so that {} (expected {}, found {})",
-                                 trace.cause.as_requirement_str(), expected, found));
-                } else {
-                    // FIXME: this really should be handled at some earlier stage. Our
-                    // handling of region checking when type errors are present is
-                    // *terrible*.
-
-                    err.span_note(
-                        trace.cause.span,
-                        &format!("...so that {}",
-                                 trace.cause.as_requirement_str()));
-                }
-            }
-            infer::Reborrow(span) => {
-                err.span_note(
-                    span,
-                    "...so that reference does not outlive \
-                    borrowed content");
-            }
-            infer::ReborrowUpvar(span, ref upvar_id) => {
-                err.span_note(
-                    span,
-                    &format!(
-                        "...so that closure can access `{}`",
-                        self.tcx.local_var_name_str(upvar_id.var_id)
-                            .to_string()));
-            }
-            infer::InfStackClosure(span) => {
-                err.span_note(
-                    span,
-                    "...so that closure does not outlive its stack frame");
-            }
-            infer::InvokeClosure(span) => {
-                err.span_note(
-                    span,
-                    "...so that closure is not invoked outside its lifetime");
-            }
-            infer::DerefPointer(span) => {
-                err.span_note(
-                    span,
-                    "...so that pointer is not dereferenced \
-                    outside its lifetime");
-            }
-            infer::FreeVariable(span, id) => {
-                err.span_note(
-                    span,
-                    &format!("...so that captured variable `{}` \
-                            does not outlive the enclosing closure",
-                            self.tcx.local_var_name_str(id)));
-            }
-            infer::IndexSlice(span) => {
-                err.span_note(
-                    span,
-                    "...so that slice is not indexed outside the lifetime");
-            }
-            infer::RelateObjectBound(span) => {
-                err.span_note(
-                    span,
-                    "...so that it can be closed over into an object");
-            }
-            infer::CallRcvr(span) => {
-                err.span_note(
-                    span,
-                    "...so that method receiver is valid for the method call");
-            }
-            infer::CallArg(span) => {
-                err.span_note(
-                    span,
-                    "...so that argument is valid for the call");
-            }
-            infer::CallReturn(span) => {
-                err.span_note(
-                    span,
-                    "...so that return value is valid for the call");
-            }
-            infer::Operand(span) => {
-                err.span_note(
-                    span,
-                    "...so that operand is valid for operation");
-            }
-            infer::AddrOf(span) => {
-                err.span_note(
-                    span,
-                    "...so that reference is valid \
-                     at the time of borrow");
-            }
-            infer::AutoBorrow(span) => {
-                err.span_note(
-                    span,
-                    "...so that auto-reference is valid \
-                     at the time of borrow");
-            }
-            infer::ExprTypeIsNotInScope(t, span) => {
-                err.span_note(
-                    span,
-                    &format!("...so type `{}` of expression is valid during the \
-                             expression",
-                            self.ty_to_string(t)));
-            }
-            infer::BindingTypeIsNotValidAtDecl(span) => {
-                err.span_note(
-                    span,
-                    "...so that variable is valid at time of its declaration");
-            }
-            infer::ParameterInScope(_, span) => {
-                err.span_note(
-                    span,
-                    "...so that a type/lifetime parameter is in scope here");
-            }
-            infer::DataBorrowed(ty, span) => {
-                err.span_note(
-                    span,
-                    &format!("...so that the type `{}` is not borrowed for too long",
-                             self.ty_to_string(ty)));
-            }
-            infer::ReferenceOutlivesReferent(ty, span) => {
-                err.span_note(
-                    span,
-                    &format!("...so that the reference type `{}` \
-                             does not outlive the data it points at",
-                            self.ty_to_string(ty)));
-            }
-            infer::RelateParamBound(span, t) => {
-                err.span_note(
-                    span,
-                    &format!("...so that the type `{}` \
-                             will meet its required lifetime bounds",
-                            self.ty_to_string(t)));
-            }
-            infer::RelateDefaultParamBound(span, t) => {
-                err.span_note(
-                    span,
-                    &format!("...so that type parameter \
-                             instantiated with `{}`, \
-                             will meet its declared lifetime bounds",
-                            self.ty_to_string(t)));
-            }
-            infer::RelateRegionParamBound(span) => {
-                err.span_note(
-                    span,
-                    "...so that the declared lifetime parameter bounds \
-                                are satisfied");
-            }
-            infer::SafeDestructor(span) => {
-                err.span_note(
-                    span,
-                    "...so that references are valid when the destructor \
-                     runs");
-            }
-            infer::CompareImplMethodObligation { span, .. } => {
-                err.span_note(
-                    span,
-                    "...so that the definition in impl matches the definition from the trait");
-            }
-        }
-    }
-}
-
-impl<'tcx> ObligationCause<'tcx> {
-    fn as_failure_str(&self) -> &'static str {
-        use traits::ObligationCauseCode::*;
-        match self.code {
-            CompareImplMethodObligation { .. } => "method not compatible with trait",
-            MatchExpressionArm { source, .. } => match source {
-                hir::MatchSource::IfLetDesugar{..} => "`if let` arms have incompatible types",
-                _ => "match arms have incompatible types",
-            },
-            IfExpression => "if and else have incompatible types",
-            IfExpressionWithNoElse => "if may be missing an else clause",
-            EquatePredicate => "equality predicate not satisfied",
-            MainFunctionType => "main function has wrong type",
-            StartFunctionType => "start function has wrong type",
-            IntrinsicType => "intrinsic has wrong type",
-            MethodReceiver => "mismatched method receiver",
-            _ => "mismatched types",
-        }
-    }
-
-    fn as_requirement_str(&self) -> &'static str {
-        use traits::ObligationCauseCode::*;
-        match self.code {
-            CompareImplMethodObligation { .. } => "method type is compatible with trait",
-            ExprAssignable => "expression is assignable",
-            MatchExpressionArm { source, .. } => match source {
-                hir::MatchSource::IfLetDesugar{..} => "`if let` arms have compatible types",
-                _ => "match arms have compatible types",
-            },
-            IfExpression => "if and else have compatible types",
-            IfExpressionWithNoElse => "if missing an else returns ()",
-            EquatePredicate => "equality where clause is satisfied",
-            MainFunctionType => "`main` function has the correct type",
-            StartFunctionType => "`start` function has the correct type",
-            IntrinsicType => "intrinsic has the correct type",
-            MethodReceiver => "method receiver has the correct type",
-            _ => "types are compatible",
-        }
-    }
-}
diff --git a/src/librustc/infer/error_reporting/mod.rs b/src/librustc/infer/error_reporting/mod.rs
new file mode 100644 (file)
index 0000000..21139c8
--- /dev/null
@@ -0,0 +1,693 @@
+// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Error Reporting Code for the inference engine
+//!
+//! Because of the way inference, and in particular region inference,
+//! works, it often happens that errors are not detected until far after
+//! the relevant line of code has been type-checked. Therefore, there is
+//! an elaborate system to track why a particular constraint in the
+//! inference graph arose so that we can explain to the user what gave
+//! rise to a particular error.
+//!
+//! The basis of the system are the "origin" types. An "origin" is the
+//! reason that a constraint or inference variable arose. There are
+//! different "origin" enums for different kinds of constraints/variables
+//! (e.g., `TypeOrigin`, `RegionVariableOrigin`). An origin always has
+//! a span, but also more information so that we can generate a meaningful
+//! error message.
+//!
+//! Having a catalogue of all the different reasons an error can arise is
+//! also useful for other reasons, like cross-referencing FAQs etc, though
+//! we are not really taking advantage of this yet.
+//!
+//! # Region Inference
+//!
+//! Region inference is particularly tricky because it always succeeds "in
+//! the moment" and simply registers a constraint. Then, at the end, we
+//! can compute the full graph and report errors, so we need to be able to
+//! store and later report what gave rise to the conflicting constraints.
+//!
+//! # Subtype Trace
+//!
+//! Determining whether `T1 <: T2` often involves a number of subtypes and
+//! subconstraints along the way. A "TypeTrace" is an extended version
+//! of an origin that traces the types and other values that were being
+//! compared. It is not necessarily comprehensive (in fact, at the time of
+//! this writing it only tracks the root values being compared) but I'd
+//! like to extend it to include significant "waypoints". For example, if
+//! you are comparing `(T1, T2) <: (T3, T4)`, and the problem is that `T2
+//! <: T4` fails, I'd like the trace to include enough information to say
+//! "in the 2nd element of the tuple". Similarly, failures when comparing
+//! arguments or return types in fn types should be able to cite the
+//! specific position, etc.
+//!
+//! # Reality vs plan
+//!
+//! Of course, there is still a LOT of code in typeck that has yet to be
+//! ported to this system, and which relies on string concatenation at the
+//! time of error detection.
+
+use infer;
+use super::{InferCtxt, TypeTrace, SubregionOrigin, RegionVariableOrigin, ValuePairs};
+use super::region_inference::{RegionResolutionError, ConcreteFailure, SubSupConflict,
+                              GenericBoundFailure, GenericKind};
+
+use std::fmt;
+use hir;
+use hir::map as hir_map;
+use hir::def_id::DefId;
+use middle::region;
+use traits::{ObligationCause, ObligationCauseCode};
+use ty::{self, TyCtxt, TypeFoldable};
+use ty::{Region, Issue32330};
+use ty::error::TypeError;
+use syntax_pos::{Pos, Span};
+use errors::DiagnosticBuilder;
+
+mod note;
+
+impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
+    pub fn note_and_explain_region(self,
+                                   err: &mut DiagnosticBuilder,
+                                   prefix: &str,
+                                   region: &'tcx ty::Region,
+                                   suffix: &str) {
+        fn item_scope_tag(item: &hir::Item) -> &'static str {
+            match item.node {
+                hir::ItemImpl(..) => "impl",
+                hir::ItemStruct(..) => "struct",
+                hir::ItemUnion(..) => "union",
+                hir::ItemEnum(..) => "enum",
+                hir::ItemTrait(..) => "trait",
+                hir::ItemFn(..) => "function body",
+                _ => "item"
+            }
+        }
+
+        fn trait_item_scope_tag(item: &hir::TraitItem) -> &'static str {
+            match item.node {
+                hir::TraitItemKind::Method(..) => "method body",
+                hir::TraitItemKind::Const(..) |
+                hir::TraitItemKind::Type(..) => "associated item"
+            }
+        }
+
+        fn impl_item_scope_tag(item: &hir::ImplItem) -> &'static str {
+            match item.node {
+                hir::ImplItemKind::Method(..) => "method body",
+                hir::ImplItemKind::Const(..) |
+                hir::ImplItemKind::Type(_) => "associated item"
+            }
+        }
+
+        fn explain_span<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
+                                        heading: &str, span: Span)
+                                        -> (String, Option<Span>) {
+            let lo = tcx.sess.codemap().lookup_char_pos_adj(span.lo);
+            (format!("the {} at {}:{}", heading, lo.line, lo.col.to_usize()),
+             Some(span))
+        }
+
+        let (description, span) = match *region {
+            ty::ReScope(scope) => {
+                let new_string;
+                let unknown_scope = || {
+                    format!("{}unknown scope: {:?}{}.  Please report a bug.",
+                            prefix, scope, suffix)
+                };
+                let span = match scope.span(&self.region_maps, &self.hir) {
+                    Some(s) => s,
+                    None => {
+                        err.note(&unknown_scope());
+                        return;
+                    }
+                };
+                let tag = match self.hir.find(scope.node_id(&self.region_maps)) {
+                    Some(hir_map::NodeBlock(_)) => "block",
+                    Some(hir_map::NodeExpr(expr)) => match expr.node {
+                        hir::ExprCall(..) => "call",
+                        hir::ExprMethodCall(..) => "method call",
+                        hir::ExprMatch(.., hir::MatchSource::IfLetDesugar { .. }) => "if let",
+                        hir::ExprMatch(.., hir::MatchSource::WhileLetDesugar) =>  "while let",
+                        hir::ExprMatch(.., hir::MatchSource::ForLoopDesugar) =>  "for",
+                        hir::ExprMatch(..) => "match",
+                        _ => "expression",
+                    },
+                    Some(hir_map::NodeStmt(_)) => "statement",
+                    Some(hir_map::NodeItem(it)) => item_scope_tag(&it),
+                    Some(hir_map::NodeTraitItem(it)) => trait_item_scope_tag(&it),
+                    Some(hir_map::NodeImplItem(it)) => impl_item_scope_tag(&it),
+                    Some(_) | None => {
+                        err.span_note(span, &unknown_scope());
+                        return;
+                    }
+                };
+                let scope_decorated_tag = match self.region_maps.code_extent_data(scope) {
+                    region::CodeExtentData::Misc(_) => tag,
+                    region::CodeExtentData::CallSiteScope { .. } => {
+                        "scope of call-site for function"
+                    }
+                    region::CodeExtentData::ParameterScope { .. } => {
+                        "scope of function body"
+                    }
+                    region::CodeExtentData::DestructionScope(_) => {
+                        new_string = format!("destruction scope surrounding {}", tag);
+                        &new_string[..]
+                    }
+                    region::CodeExtentData::Remainder(r) => {
+                        new_string = format!("block suffix following statement {}",
+                                             r.first_statement_index);
+                        &new_string[..]
+                    }
+                };
+                explain_span(self, scope_decorated_tag, span)
+            }
+
+            ty::ReFree(ref fr) => {
+                let prefix = match fr.bound_region {
+                    ty::BrAnon(idx) => {
+                        format!("the anonymous lifetime #{} defined on", idx + 1)
+                    }
+                    ty::BrFresh(_) => "an anonymous lifetime defined on".to_owned(),
+                    _ => {
+                        format!("the lifetime {} as defined on",
+                                fr.bound_region)
+                    }
+                };
+
+                let node = fr.scope.node_id(&self.region_maps);
+                let unknown;
+                let tag = match self.hir.find(node) {
+                    Some(hir_map::NodeBlock(_)) |
+                    Some(hir_map::NodeExpr(_)) => "body",
+                    Some(hir_map::NodeItem(it)) => item_scope_tag(&it),
+                    Some(hir_map::NodeTraitItem(it)) => trait_item_scope_tag(&it),
+                    Some(hir_map::NodeImplItem(it)) => impl_item_scope_tag(&it),
+
+                    // this really should not happen, but it does:
+                    // FIXME(#27942)
+                    Some(_) => {
+                        unknown = format!("unexpected node ({}) for scope {:?}.  \
+                                           Please report a bug.",
+                                          self.hir.node_to_string(node), fr.scope);
+                        &unknown
+                    }
+                    None => {
+                        unknown = format!("unknown node for scope {:?}.  \
+                                           Please report a bug.", fr.scope);
+                        &unknown
+                    }
+                };
+                let (msg, opt_span) = explain_span(self, tag, self.hir.span(node));
+                (format!("{} {}", prefix, msg), opt_span)
+            }
+
+            ty::ReStatic => ("the static lifetime".to_owned(), None),
+
+            ty::ReEmpty => ("the empty lifetime".to_owned(), None),
+
+            ty::ReEarlyBound(ref data) => (data.name.to_string(), None),
+
+            // FIXME(#13998) ReSkolemized should probably print like
+            // ReFree rather than dumping Debug output on the user.
+            //
+            // We shouldn't really be having unification failures with ReVar
+            // and ReLateBound though.
+            ty::ReSkolemized(..) |
+            ty::ReVar(_) |
+            ty::ReLateBound(..) |
+            ty::ReErased => {
+                (format!("lifetime {:?}", region), None)
+            }
+        };
+        let message = format!("{}{}{}", prefix, description, suffix);
+        if let Some(span) = span {
+            err.span_note(span, &message);
+        } else {
+            err.note(&message);
+        }
+    }
+}
+
+impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
+    pub fn report_region_errors(&self,
+                                errors: &Vec<RegionResolutionError<'tcx>>) {
+        debug!("report_region_errors(): {} errors to start", errors.len());
+
+        // try to pre-process the errors, which will group some of them
+        // together into a `ProcessedErrors` group:
+        let errors = self.process_errors(errors);
+
+        debug!("report_region_errors: {} errors after preprocessing", errors.len());
+
+        for error in errors {
+            debug!("report_region_errors: error = {:?}", error);
+            match error.clone() {
+                ConcreteFailure(origin, sub, sup) => {
+                    self.report_concrete_failure(origin, sub, sup).emit();
+                }
+
+                GenericBoundFailure(kind, param_ty, sub) => {
+                    self.report_generic_bound_failure(kind, param_ty, sub);
+                }
+
+                SubSupConflict(var_origin,
+                               sub_origin, sub_r,
+                               sup_origin, sup_r) => {
+                    self.report_sub_sup_conflict(var_origin,
+                                                 sub_origin, sub_r,
+                                                 sup_origin, sup_r);
+                }
+            }
+        }
+    }
+
+    // This method goes through all the errors and try to group certain types
+    // of error together, for the purpose of suggesting explicit lifetime
+    // parameters to the user. This is done so that we can have a more
+    // complete view of what lifetimes should be the same.
+    // If the return value is an empty vector, it means that processing
+    // failed (so the return value of this method should not be used).
+    //
+    // The method also attempts to weed out messages that seem like
+    // duplicates that will be unhelpful to the end-user. But
+    // obviously it never weeds out ALL errors.
+    fn process_errors(&self, errors: &Vec<RegionResolutionError<'tcx>>)
+                      -> Vec<RegionResolutionError<'tcx>> {
+        debug!("process_errors()");
+
+        // We want to avoid reporting generic-bound failures if we can
+        // avoid it: these have a very high rate of being unhelpful in
+        // practice. This is because they are basically secondary
+        // checks that test the state of the region graph after the
+        // rest of inference is done, and the other kinds of errors
+        // indicate that the region constraint graph is internally
+        // inconsistent, so these test results are likely to be
+        // meaningless.
+        //
+        // Therefore, we filter them out of the list unless they are
+        // the only thing in the list.
+
+        let is_bound_failure = |e: &RegionResolutionError<'tcx>| match *e {
+            ConcreteFailure(..) => false,
+            SubSupConflict(..) => false,
+            GenericBoundFailure(..) => true,
+        };
+
+        if errors.iter().all(|e| is_bound_failure(e)) {
+            errors.clone()
+        } else {
+            errors.iter().filter(|&e| !is_bound_failure(e)).cloned().collect()
+        }
+    }
+
+    /// Adds a note if the types come from similarly named crates
+    fn check_and_note_conflicting_crates(&self,
+                                         err: &mut DiagnosticBuilder,
+                                         terr: &TypeError<'tcx>,
+                                         sp: Span) {
+        let report_path_match = |err: &mut DiagnosticBuilder, did1: DefId, did2: DefId| {
+            // Only external crates, if either is from a local
+            // module we could have false positives
+            if !(did1.is_local() || did2.is_local()) && did1.krate != did2.krate {
+                let exp_path = self.tcx.item_path_str(did1);
+                let found_path = self.tcx.item_path_str(did2);
+                // We compare strings because DefPath can be different
+                // for imported and non-imported crates
+                if exp_path == found_path {
+                    let crate_name = self.tcx.sess.cstore.crate_name(did1.krate);
+                    err.span_note(sp, &format!("Perhaps two different versions \
+                                                of crate `{}` are being used?",
+                                               crate_name));
+                }
+            }
+        };
+        match *terr {
+            TypeError::Sorts(ref exp_found) => {
+                // if they are both "path types", there's a chance of ambiguity
+                // due to different versions of the same crate
+                match (&exp_found.expected.sty, &exp_found.found.sty) {
+                    (&ty::TyAdt(exp_adt, _), &ty::TyAdt(found_adt, _)) => {
+                        report_path_match(err, exp_adt.did, found_adt.did);
+                    },
+                    _ => ()
+                }
+            },
+            TypeError::Traits(ref exp_found) => {
+                report_path_match(err, exp_found.expected, exp_found.found);
+            },
+            _ => () // FIXME(#22750) handle traits and stuff
+        }
+    }
+
+    fn note_error_origin(&self,
+                         err: &mut DiagnosticBuilder<'tcx>,
+                         cause: &ObligationCause<'tcx>)
+    {
+        match cause.code {
+            ObligationCauseCode::MatchExpressionArm { arm_span, source } => match source {
+                hir::MatchSource::IfLetDesugar {..} => {
+                    err.span_note(arm_span, "`if let` arm with an incompatible type");
+                }
+                _ => {
+                    err.span_note(arm_span, "match arm with an incompatible type");
+                }
+            },
+            _ => ()
+        }
+    }
+
+    pub fn note_type_err(&self,
+                         diag: &mut DiagnosticBuilder<'tcx>,
+                         cause: &ObligationCause<'tcx>,
+                         secondary_span: Option<(Span, String)>,
+                         values: Option<ValuePairs<'tcx>>,
+                         terr: &TypeError<'tcx>)
+    {
+        let (expected_found, is_simple_error) = match values {
+            None => (None, false),
+            Some(values) => {
+                let is_simple_error = match values {
+                    ValuePairs::Types(exp_found) => {
+                        exp_found.expected.is_primitive() && exp_found.found.is_primitive()
+                    }
+                    _ => false,
+                };
+                let vals = match self.values_str(&values) {
+                    Some((expected, found)) => Some((expected, found)),
+                    None => {
+                        // Derived error. Cancel the emitter.
+                        self.tcx.sess.diagnostic().cancel(diag);
+                        return
+                    }
+                };
+                (vals, is_simple_error)
+            }
+        };
+
+        let span = cause.span;
+
+        if let Some((expected, found)) = expected_found {
+            match (terr, is_simple_error, expected == found) {
+                (&TypeError::Sorts(ref values), false,  true) => {
+                    diag.note_expected_found_extra(
+                        &"type", &expected, &found,
+                        &format!(" ({})", values.expected.sort_string(self.tcx)),
+                        &format!(" ({})", values.found.sort_string(self.tcx)));
+                }
+                (_, false,  _) => {
+                    diag.note_expected_found(&"type", &expected, &found);
+                }
+                _ => (),
+            }
+        }
+
+        diag.span_label(span, &terr);
+        if let Some((sp, msg)) = secondary_span {
+            diag.span_label(sp, &msg);
+        }
+
+        self.note_error_origin(diag, &cause);
+        self.check_and_note_conflicting_crates(diag, terr, span);
+        self.tcx.note_and_explain_type_err(diag, terr, span);
+    }
+
+    pub fn note_issue_32330(&self,
+                            diag: &mut DiagnosticBuilder<'tcx>,
+                            terr: &TypeError<'tcx>)
+    {
+        debug!("note_issue_32330: terr={:?}", terr);
+        match *terr {
+            TypeError::RegionsInsufficientlyPolymorphic(_, &Region::ReVar(vid)) |
+            TypeError::RegionsOverlyPolymorphic(_, &Region::ReVar(vid)) => {
+                match self.region_vars.var_origin(vid) {
+                    RegionVariableOrigin::EarlyBoundRegion(_, _, Some(Issue32330 {
+                        fn_def_id,
+                        region_name
+                    })) => {
+                        diag.note(
+                            &format!("lifetime parameter `{0}` declared on fn `{1}` \
+                                      appears only in the return type, \
+                                      but here is required to be higher-ranked, \
+                                      which means that `{0}` must appear in both \
+                                      argument and return types",
+                                     region_name,
+                                     self.tcx.item_path_str(fn_def_id)));
+                        diag.note(
+                            &format!("this error is the result of a recent bug fix; \
+                                      for more information, see issue #33685 \
+                                      <https://github.com/rust-lang/rust/issues/33685>"));
+                    }
+                    _ => { }
+                }
+            }
+            _ => { }
+        }
+    }
+
+    pub fn report_and_explain_type_error(&self,
+                                         trace: TypeTrace<'tcx>,
+                                         terr: &TypeError<'tcx>)
+                                         -> DiagnosticBuilder<'tcx>
+    {
+        let span = trace.cause.span;
+        let failure_str = trace.cause.as_failure_str();
+        let mut diag = match trace.cause.code {
+            ObligationCauseCode::IfExpressionWithNoElse => {
+                struct_span_err!(self.tcx.sess, span, E0317, "{}", failure_str)
+            }
+            ObligationCauseCode::MainFunctionType => {
+                struct_span_err!(self.tcx.sess, span, E0580, "{}", failure_str)
+            }
+            _ => {
+                struct_span_err!(self.tcx.sess, span, E0308, "{}", failure_str)
+            }
+        };
+        self.note_type_err(&mut diag, &trace.cause, None, Some(trace.values), terr);
+        self.note_issue_32330(&mut diag, terr);
+        diag
+    }
+
+    /// Returns a string of the form "expected `{}`, found `{}`".
+    fn values_str(&self, values: &ValuePairs<'tcx>) -> Option<(String, String)> {
+        match *values {
+            infer::Types(ref exp_found) => self.expected_found_str(exp_found),
+            infer::TraitRefs(ref exp_found) => self.expected_found_str(exp_found),
+            infer::PolyTraitRefs(ref exp_found) => self.expected_found_str(exp_found),
+        }
+    }
+
+    fn expected_found_str<T: fmt::Display + TypeFoldable<'tcx>>(
+        &self,
+        exp_found: &ty::error::ExpectedFound<T>)
+        -> Option<(String, String)>
+    {
+        let exp_found = self.resolve_type_vars_if_possible(exp_found);
+        if exp_found.references_error() {
+            return None;
+        }
+
+        Some((format!("{}", exp_found.expected), format!("{}", exp_found.found)))
+    }
+
+    fn report_generic_bound_failure(&self,
+                                    origin: SubregionOrigin<'tcx>,
+                                    bound_kind: GenericKind<'tcx>,
+                                    sub: &'tcx Region)
+    {
+        // FIXME: it would be better to report the first error message
+        // with the span of the parameter itself, rather than the span
+        // where the error was detected. But that span is not readily
+        // accessible.
+
+        let labeled_user_string = match bound_kind {
+            GenericKind::Param(ref p) =>
+                format!("the parameter type `{}`", p),
+            GenericKind::Projection(ref p) =>
+                format!("the associated type `{}`", p),
+        };
+
+        if let SubregionOrigin::CompareImplMethodObligation {
+            span, item_name, impl_item_def_id, trait_item_def_id, lint_id
+        } = origin {
+            self.report_extra_impl_obligation(span,
+                                              item_name,
+                                              impl_item_def_id,
+                                              trait_item_def_id,
+                                              &format!("`{}: {}`", bound_kind, sub),
+                                              lint_id)
+                .emit();
+            return;
+        }
+
+        let mut err = match *sub {
+            ty::ReFree(ty::FreeRegion {bound_region: ty::BrNamed(..), ..}) => {
+                // Does the required lifetime have a nice name we can print?
+                let mut err = struct_span_err!(self.tcx.sess,
+                                               origin.span(),
+                                               E0309,
+                                               "{} may not live long enough",
+                                               labeled_user_string);
+                err.help(&format!("consider adding an explicit lifetime bound `{}: {}`...",
+                         bound_kind,
+                         sub));
+                err
+            }
+
+            ty::ReStatic => {
+                // Does the required lifetime have a nice name we can print?
+                let mut err = struct_span_err!(self.tcx.sess,
+                                               origin.span(),
+                                               E0310,
+                                               "{} may not live long enough",
+                                               labeled_user_string);
+                err.help(&format!("consider adding an explicit lifetime \
+                                   bound `{}: 'static`...",
+                                  bound_kind));
+                err
+            }
+
+            _ => {
+                // If not, be less specific.
+                let mut err = struct_span_err!(self.tcx.sess,
+                                               origin.span(),
+                                               E0311,
+                                               "{} may not live long enough",
+                                               labeled_user_string);
+                err.help(&format!("consider adding an explicit lifetime bound for `{}`",
+                                  bound_kind));
+                self.tcx.note_and_explain_region(
+                    &mut err,
+                    &format!("{} must be valid for ", labeled_user_string),
+                    sub,
+                    "...");
+                err
+            }
+        };
+
+        self.note_region_origin(&mut err, &origin);
+        err.emit();
+    }
+
+    fn report_sub_sup_conflict(&self,
+                               var_origin: RegionVariableOrigin,
+                               sub_origin: SubregionOrigin<'tcx>,
+                               sub_region: &'tcx Region,
+                               sup_origin: SubregionOrigin<'tcx>,
+                               sup_region: &'tcx Region) {
+        let mut err = self.report_inference_failure(var_origin);
+
+        self.tcx.note_and_explain_region(&mut err,
+            "first, the lifetime cannot outlive ",
+            sup_region,
+            "...");
+
+        self.note_region_origin(&mut err, &sup_origin);
+
+        self.tcx.note_and_explain_region(&mut err,
+            "but, the lifetime must be valid for ",
+            sub_region,
+            "...");
+
+        self.note_region_origin(&mut err, &sub_origin);
+        err.emit();
+    }
+}
+
+impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
+    fn report_inference_failure(&self,
+                                var_origin: RegionVariableOrigin)
+                                -> DiagnosticBuilder<'tcx> {
+        let br_string = |br: ty::BoundRegion| {
+            let mut s = br.to_string();
+            if !s.is_empty() {
+                s.push_str(" ");
+            }
+            s
+        };
+        let var_description = match var_origin {
+            infer::MiscVariable(_) => "".to_string(),
+            infer::PatternRegion(_) => " for pattern".to_string(),
+            infer::AddrOfRegion(_) => " for borrow expression".to_string(),
+            infer::Autoref(_) => " for autoref".to_string(),
+            infer::Coercion(_) => " for automatic coercion".to_string(),
+            infer::LateBoundRegion(_, br, infer::FnCall) => {
+                format!(" for lifetime parameter {}in function call",
+                        br_string(br))
+            }
+            infer::LateBoundRegion(_, br, infer::HigherRankedType) => {
+                format!(" for lifetime parameter {}in generic type", br_string(br))
+            }
+            infer::LateBoundRegion(_, br, infer::AssocTypeProjection(type_name)) => {
+                format!(" for lifetime parameter {}in trait containing associated type `{}`",
+                        br_string(br), type_name)
+            }
+            infer::EarlyBoundRegion(_, name, _) => {
+                format!(" for lifetime parameter `{}`",
+                        name)
+            }
+            infer::BoundRegionInCoherence(name) => {
+                format!(" for lifetime parameter `{}` in coherence check",
+                        name)
+            }
+            infer::UpvarRegion(ref upvar_id, _) => {
+                format!(" for capture of `{}` by closure",
+                        self.tcx.local_var_name_str(upvar_id.var_id).to_string())
+            }
+        };
+
+        struct_span_err!(self.tcx.sess, var_origin.span(), E0495,
+                  "cannot infer an appropriate lifetime{} \
+                   due to conflicting requirements",
+                  var_description)
+    }
+}
+
+impl<'tcx> ObligationCause<'tcx> {
+    fn as_failure_str(&self) -> &'static str {
+        use traits::ObligationCauseCode::*;
+        match self.code {
+            CompareImplMethodObligation { .. } => "method not compatible with trait",
+            MatchExpressionArm { source, .. } => match source {
+                hir::MatchSource::IfLetDesugar{..} => "`if let` arms have incompatible types",
+                _ => "match arms have incompatible types",
+            },
+            IfExpression => "if and else have incompatible types",
+            IfExpressionWithNoElse => "if may be missing an else clause",
+            EquatePredicate => "equality predicate not satisfied",
+            MainFunctionType => "main function has wrong type",
+            StartFunctionType => "start function has wrong type",
+            IntrinsicType => "intrinsic has wrong type",
+            MethodReceiver => "mismatched method receiver",
+            _ => "mismatched types",
+        }
+    }
+
+    fn as_requirement_str(&self) -> &'static str {
+        use traits::ObligationCauseCode::*;
+        match self.code {
+            CompareImplMethodObligation { .. } => "method type is compatible with trait",
+            ExprAssignable => "expression is assignable",
+            MatchExpressionArm { source, .. } => match source {
+                hir::MatchSource::IfLetDesugar{..} => "`if let` arms have compatible types",
+                _ => "match arms have compatible types",
+            },
+            IfExpression => "if and else have compatible types",
+            IfExpressionWithNoElse => "if missing an else returns ()",
+            EquatePredicate => "equality where clause is satisfied",
+            MainFunctionType => "`main` function has the correct type",
+            StartFunctionType => "`start` function has the correct type",
+            IntrinsicType => "intrinsic has the correct type",
+            MethodReceiver => "method receiver has the correct type",
+            _ => "types are compatible",
+        }
+    }
+}
diff --git a/src/librustc/infer/error_reporting/note.rs b/src/librustc/infer/error_reporting/note.rs
new file mode 100644 (file)
index 0000000..8f8b260
--- /dev/null
@@ -0,0 +1,432 @@
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use infer::{self, InferCtxt, SubregionOrigin};
+use ty::Region;
+use ty::error::TypeError;
+use errors::DiagnosticBuilder;
+
+impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
+    pub(super) fn note_region_origin(&self,
+                                     err: &mut DiagnosticBuilder,
+                                     origin: &SubregionOrigin<'tcx>) {
+        match *origin {
+            infer::Subtype(ref trace) => {
+                if let Some((expected, found)) = self.values_str(&trace.values) {
+                    // FIXME: do we want a "the" here?
+                    err.span_note(trace.cause.span,
+                                  &format!("...so that {} (expected {}, found {})",
+                                           trace.cause.as_requirement_str(),
+                                           expected,
+                                           found));
+                } else {
+                    // FIXME: this really should be handled at some earlier stage. Our
+                    // handling of region checking when type errors are present is
+                    // *terrible*.
+
+                    err.span_note(trace.cause.span,
+                                  &format!("...so that {}", trace.cause.as_requirement_str()));
+                }
+            }
+            infer::Reborrow(span) => {
+                err.span_note(span,
+                              "...so that reference does not outlive borrowed content");
+            }
+            infer::ReborrowUpvar(span, ref upvar_id) => {
+                err.span_note(span,
+                              &format!("...so that closure can access `{}`",
+                                       self.tcx
+                                           .local_var_name_str(upvar_id.var_id)
+                                           .to_string()));
+            }
+            infer::InfStackClosure(span) => {
+                err.span_note(span, "...so that closure does not outlive its stack frame");
+            }
+            infer::InvokeClosure(span) => {
+                err.span_note(span,
+                              "...so that closure is not invoked outside its lifetime");
+            }
+            infer::DerefPointer(span) => {
+                err.span_note(span,
+                              "...so that pointer is not dereferenced outside its lifetime");
+            }
+            infer::FreeVariable(span, id) => {
+                err.span_note(span,
+                              &format!("...so that captured variable `{}` does not outlive the \
+                                        enclosing closure",
+                                       self.tcx.local_var_name_str(id)));
+            }
+            infer::IndexSlice(span) => {
+                err.span_note(span, "...so that slice is not indexed outside the lifetime");
+            }
+            infer::RelateObjectBound(span) => {
+                err.span_note(span, "...so that it can be closed over into an object");
+            }
+            infer::CallRcvr(span) => {
+                err.span_note(span,
+                              "...so that method receiver is valid for the method call");
+            }
+            infer::CallArg(span) => {
+                err.span_note(span, "...so that argument is valid for the call");
+            }
+            infer::CallReturn(span) => {
+                err.span_note(span, "...so that return value is valid for the call");
+            }
+            infer::Operand(span) => {
+                err.span_note(span, "...so that operand is valid for operation");
+            }
+            infer::AddrOf(span) => {
+                err.span_note(span, "...so that reference is valid at the time of borrow");
+            }
+            infer::AutoBorrow(span) => {
+                err.span_note(span,
+                              "...so that auto-reference is valid at the time of borrow");
+            }
+            infer::ExprTypeIsNotInScope(t, span) => {
+                err.span_note(span,
+                              &format!("...so type `{}` of expression is valid during the \
+                                        expression",
+                                       self.ty_to_string(t)));
+            }
+            infer::BindingTypeIsNotValidAtDecl(span) => {
+                err.span_note(span,
+                              "...so that variable is valid at time of its declaration");
+            }
+            infer::ParameterInScope(_, span) => {
+                err.span_note(span,
+                              "...so that a type/lifetime parameter is in scope here");
+            }
+            infer::DataBorrowed(ty, span) => {
+                err.span_note(span,
+                              &format!("...so that the type `{}` is not borrowed for too long",
+                                       self.ty_to_string(ty)));
+            }
+            infer::ReferenceOutlivesReferent(ty, span) => {
+                err.span_note(span,
+                              &format!("...so that the reference type `{}` does not outlive the \
+                                        data it points at",
+                                       self.ty_to_string(ty)));
+            }
+            infer::RelateParamBound(span, t) => {
+                err.span_note(span,
+                              &format!("...so that the type `{}` will meet its required \
+                                        lifetime bounds",
+                                       self.ty_to_string(t)));
+            }
+            infer::RelateDefaultParamBound(span, t) => {
+                err.span_note(span,
+                              &format!("...so that type parameter instantiated with `{}`, will \
+                                        meet its declared lifetime bounds",
+                                       self.ty_to_string(t)));
+            }
+            infer::RelateRegionParamBound(span) => {
+                err.span_note(span,
+                              "...so that the declared lifetime parameter bounds are satisfied");
+            }
+            infer::SafeDestructor(span) => {
+                err.span_note(span,
+                              "...so that references are valid when the destructor runs");
+            }
+            infer::CompareImplMethodObligation { span, .. } => {
+                err.span_note(span,
+                              "...so that the definition in impl matches the definition from the \
+                               trait");
+            }
+        }
+    }
+
+    pub(super) fn report_concrete_failure(&self,
+                                          origin: SubregionOrigin<'tcx>,
+                                          sub: &'tcx Region,
+                                          sup: &'tcx Region)
+                                          -> DiagnosticBuilder<'tcx> {
+        match origin {
+            infer::Subtype(trace) => {
+                let terr = TypeError::RegionsDoesNotOutlive(sup, sub);
+                self.report_and_explain_type_error(trace, &terr)
+            }
+            infer::Reborrow(span) => {
+                let mut err = struct_span_err!(self.tcx.sess,
+                                               span,
+                                               E0312,
+                                               "lifetime of reference outlives lifetime of \
+                                                borrowed content...");
+                self.tcx.note_and_explain_region(&mut err,
+                                                 "...the reference is valid for ",
+                                                 sub,
+                                                 "...");
+                self.tcx.note_and_explain_region(&mut err,
+                                                 "...but the borrowed content is only valid for ",
+                                                 sup,
+                                                 "");
+                err
+            }
+            infer::ReborrowUpvar(span, ref upvar_id) => {
+                let mut err = struct_span_err!(self.tcx.sess,
+                                               span,
+                                               E0313,
+                                               "lifetime of borrowed pointer outlives lifetime \
+                                                of captured variable `{}`...",
+                                               self.tcx.local_var_name_str(upvar_id.var_id));
+                self.tcx.note_and_explain_region(&mut err,
+                                                 "...the borrowed pointer is valid for ",
+                                                 sub,
+                                                 "...");
+                self.tcx
+                    .note_and_explain_region(&mut err,
+                                             &format!("...but `{}` is only valid for ",
+                                                      self.tcx
+                                                          .local_var_name_str(upvar_id.var_id)),
+                                             sup,
+                                             "");
+                err
+            }
+            infer::InfStackClosure(span) => {
+                let mut err =
+                    struct_span_err!(self.tcx.sess, span, E0314, "closure outlives stack frame");
+                self.tcx.note_and_explain_region(&mut err,
+                                                 "...the closure must be valid for ",
+                                                 sub,
+                                                 "...");
+                self.tcx.note_and_explain_region(&mut err,
+                                                 "...but the closure's stack frame is only valid \
+                                                  for ",
+                                                 sup,
+                                                 "");
+                err
+            }
+            infer::InvokeClosure(span) => {
+                let mut err = struct_span_err!(self.tcx.sess,
+                                               span,
+                                               E0315,
+                                               "cannot invoke closure outside of its lifetime");
+                self.tcx
+                    .note_and_explain_region(&mut err, "the closure is only valid for ", sup, "");
+                err
+            }
+            infer::DerefPointer(span) => {
+                let mut err = struct_span_err!(self.tcx.sess,
+                                               span,
+                                               E0473,
+                                               "dereference of reference outside its lifetime");
+                self.tcx
+                    .note_and_explain_region(&mut err, "the reference is only valid for ", sup, "");
+                err
+            }
+            infer::FreeVariable(span, id) => {
+                let mut err = struct_span_err!(self.tcx.sess,
+                                               span,
+                                               E0474,
+                                               "captured variable `{}` does not outlive the \
+                                                enclosing closure",
+                                               self.tcx.local_var_name_str(id));
+                self.tcx
+                    .note_and_explain_region(&mut err, "captured variable is valid for ", sup, "");
+                self.tcx.note_and_explain_region(&mut err, "closure is valid for ", sub, "");
+                err
+            }
+            infer::IndexSlice(span) => {
+                let mut err = struct_span_err!(self.tcx.sess,
+                                               span,
+                                               E0475,
+                                               "index of slice outside its lifetime");
+                self.tcx.note_and_explain_region(&mut err, "the slice is only valid for ", sup, "");
+                err
+            }
+            infer::RelateObjectBound(span) => {
+                let mut err = struct_span_err!(self.tcx.sess,
+                                               span,
+                                               E0476,
+                                               "lifetime of the source pointer does not outlive \
+                                                lifetime bound of the object type");
+                self.tcx.note_and_explain_region(&mut err, "object type is valid for ", sub, "");
+                self.tcx.note_and_explain_region(&mut err,
+                                                 "source pointer is only valid for ",
+                                                 sup,
+                                                 "");
+                err
+            }
+            infer::RelateParamBound(span, ty) => {
+                let mut err = struct_span_err!(self.tcx.sess,
+                                               span,
+                                               E0477,
+                                               "the type `{}` does not fulfill the required \
+                                                lifetime",
+                                               self.ty_to_string(ty));
+                self.tcx.note_and_explain_region(&mut err, "type must outlive ", sub, "");
+                err
+            }
+            infer::RelateRegionParamBound(span) => {
+                let mut err =
+                    struct_span_err!(self.tcx.sess, span, E0478, "lifetime bound not satisfied");
+                self.tcx.note_and_explain_region(&mut err,
+                                                 "lifetime parameter instantiated with ",
+                                                 sup,
+                                                 "");
+                self.tcx.note_and_explain_region(&mut err,
+                                                 "but lifetime parameter must outlive ",
+                                                 sub,
+                                                 "");
+                err
+            }
+            infer::RelateDefaultParamBound(span, ty) => {
+                let mut err = struct_span_err!(self.tcx.sess,
+                                               span,
+                                               E0479,
+                                               "the type `{}` (provided as the value of a type \
+                                                parameter) is not valid at this point",
+                                               self.ty_to_string(ty));
+                self.tcx.note_and_explain_region(&mut err, "type must outlive ", sub, "");
+                err
+            }
+            infer::CallRcvr(span) => {
+                let mut err = struct_span_err!(self.tcx.sess,
+                                               span,
+                                               E0480,
+                                               "lifetime of method receiver does not outlive the \
+                                                method call");
+                self.tcx
+                    .note_and_explain_region(&mut err, "the receiver is only valid for ", sup, "");
+                err
+            }
+            infer::CallArg(span) => {
+                let mut err = struct_span_err!(self.tcx.sess,
+                                               span,
+                                               E0481,
+                                               "lifetime of function argument does not outlive \
+                                                the function call");
+                self.tcx.note_and_explain_region(&mut err,
+                                                 "the function argument is only valid for ",
+                                                 sup,
+                                                 "");
+                err
+            }
+            infer::CallReturn(span) => {
+                let mut err = struct_span_err!(self.tcx.sess,
+                                               span,
+                                               E0482,
+                                               "lifetime of return value does not outlive the \
+                                                function call");
+                self.tcx.note_and_explain_region(&mut err,
+                                                 "the return value is only valid for ",
+                                                 sup,
+                                                 "");
+                err
+            }
+            infer::Operand(span) => {
+                let mut err = struct_span_err!(self.tcx.sess,
+                                               span,
+                                               E0483,
+                                               "lifetime of operand does not outlive the \
+                                                operation");
+                self.tcx
+                    .note_and_explain_region(&mut err, "the operand is only valid for ", sup, "");
+                err
+            }
+            infer::AddrOf(span) => {
+                let mut err = struct_span_err!(self.tcx.sess,
+                                               span,
+                                               E0484,
+                                               "reference is not valid at the time of borrow");
+                self.tcx
+                    .note_and_explain_region(&mut err, "the borrow is only valid for ", sup, "");
+                err
+            }
+            infer::AutoBorrow(span) => {
+                let mut err = struct_span_err!(self.tcx.sess,
+                                               span,
+                                               E0485,
+                                               "automatically reference is not valid at the time \
+                                                of borrow");
+                self.tcx.note_and_explain_region(&mut err,
+                                                 "the automatic borrow is only valid for ",
+                                                 sup,
+                                                 "");
+                err
+            }
+            infer::ExprTypeIsNotInScope(t, span) => {
+                let mut err = struct_span_err!(self.tcx.sess,
+                                               span,
+                                               E0486,
+                                               "type of expression contains references that are \
+                                                not valid during the expression: `{}`",
+                                               self.ty_to_string(t));
+                self.tcx.note_and_explain_region(&mut err, "type is only valid for ", sup, "");
+                err
+            }
+            infer::SafeDestructor(span) => {
+                let mut err = struct_span_err!(self.tcx.sess,
+                                               span,
+                                               E0487,
+                                               "unsafe use of destructor: destructor might be \
+                                                called while references are dead");
+                // FIXME (22171): terms "super/subregion" are suboptimal
+                self.tcx.note_and_explain_region(&mut err, "superregion: ", sup, "");
+                self.tcx.note_and_explain_region(&mut err, "subregion: ", sub, "");
+                err
+            }
+            infer::BindingTypeIsNotValidAtDecl(span) => {
+                let mut err = struct_span_err!(self.tcx.sess,
+                                               span,
+                                               E0488,
+                                               "lifetime of variable does not enclose its \
+                                                declaration");
+                self.tcx
+                    .note_and_explain_region(&mut err, "the variable is only valid for ", sup, "");
+                err
+            }
+            infer::ParameterInScope(_, span) => {
+                let mut err = struct_span_err!(self.tcx.sess,
+                                               span,
+                                               E0489,
+                                               "type/lifetime parameter not in scope here");
+                self.tcx
+                    .note_and_explain_region(&mut err, "the parameter is only valid for ", sub, "");
+                err
+            }
+            infer::DataBorrowed(ty, span) => {
+                let mut err = struct_span_err!(self.tcx.sess,
+                                               span,
+                                               E0490,
+                                               "a value of type `{}` is borrowed for too long",
+                                               self.ty_to_string(ty));
+                self.tcx.note_and_explain_region(&mut err, "the type is valid for ", sub, "");
+                self.tcx.note_and_explain_region(&mut err, "but the borrow lasts for ", sup, "");
+                err
+            }
+            infer::ReferenceOutlivesReferent(ty, span) => {
+                let mut err = struct_span_err!(self.tcx.sess,
+                                               span,
+                                               E0491,
+                                               "in type `{}`, reference has a longer lifetime \
+                                                than the data it references",
+                                               self.ty_to_string(ty));
+                self.tcx.note_and_explain_region(&mut err, "the pointer is valid for ", sub, "");
+                self.tcx.note_and_explain_region(&mut err,
+                                                 "but the referenced data is only valid for ",
+                                                 sup,
+                                                 "");
+                err
+            }
+            infer::CompareImplMethodObligation { span,
+                                                 item_name,
+                                                 impl_item_def_id,
+                                                 trait_item_def_id,
+                                                 lint_id } => {
+                self.report_extra_impl_obligation(span,
+                                                  item_name,
+                                                  impl_item_def_id,
+                                                  trait_item_def_id,
+                                                  &format!("`{}: {}`", sup, sub),
+                                                  lint_id)
+            }
+        }
+    }
+}
index a929060cf98904e53d626677c179b43d20fce79a..b07ef4dfd448e45f1a3651110b4dd6b79838e836 100644 (file)
@@ -210,7 +210,7 @@ pub struct InferCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
 /// region that each late-bound region was replaced with.
 pub type SkolemizationMap<'tcx> = FxHashMap<ty::BoundRegion, &'tcx ty::Region>;
 
-/// See `error_reporting.rs` for more details
+/// See `error_reporting` module for more details
 #[derive(Clone, Debug)]
 pub enum ValuePairs<'tcx> {
     Types(ExpectedFound<Ty<'tcx>>),
@@ -221,7 +221,7 @@ pub enum ValuePairs<'tcx> {
 /// The trace designates the path through inference that we took to
 /// encounter an error or subtyping constraint.
 ///
-/// See `error_reporting.rs` for more details.
+/// See `error_reporting` module for more details.
 #[derive(Clone)]
 pub struct TypeTrace<'tcx> {
     cause: ObligationCause<'tcx>,
@@ -230,7 +230,7 @@ pub struct TypeTrace<'tcx> {
 
 /// The origin of a `r1 <= r2` constraint.
 ///
-/// See `error_reporting.rs` for more details
+/// See `error_reporting` module for more details
 #[derive(Clone, Debug)]
 pub enum SubregionOrigin<'tcx> {
     // Arose from a subtyping relation
@@ -348,7 +348,7 @@ pub enum LateBoundRegionConversionTime {
 
 /// Reasons to create a region inference variable
 ///
-/// See `error_reporting.rs` for more details
+/// See `error_reporting` module for more details
 #[derive(Clone, Debug)]
 pub enum RegionVariableOrigin {
     // Region variables created for ill-categorized reasons,
@@ -1295,7 +1295,7 @@ pub fn resolve_regions_and_report_errors(&self,
             // this infcx was in use.  This is totally hokey but
             // otherwise we have a hard time separating legit region
             // errors from silly ones.
-            self.report_region_errors(&errors); // see error_reporting.rs
+            self.report_region_errors(&errors); // see error_reporting module
         }
     }
 
index 70f03e02f46d9b9c6a8dd7c22c7afaa2cd4f31ae..e681d55cf94b893db2fd37f65eb7e5a422bc4dda 100644 (file)
     "uses of #[derive] with raw pointers are rarely correct"
 }
 
-declare_lint! {
-    pub TRANSMUTE_FROM_FN_ITEM_TYPES,
-    Deny,
-    "transmute from function item type to pointer-sized type erroneously allowed"
-}
-
 declare_lint! {
     pub HR_LIFETIME_IN_ASSOC_TYPE,
     Deny,
     "detects use of struct constructors that would be invisible with new visibility rules"
 }
 
+declare_lint! {
+    pub MISSING_FRAGMENT_SPECIFIER,
+    Warn,
+    "detects missing fragment specifiers in unused `macro_rules!` patterns"
+}
+
 declare_lint! {
     pub DEPRECATED,
     Warn,
@@ -273,7 +273,6 @@ fn get_lints(&self) -> LintArray {
             ILLEGAL_STRUCT_OR_ENUM_CONSTANT_PATTERN,
             CONST_ERR,
             RAW_POINTER_DERIVE,
-            TRANSMUTE_FROM_FN_ITEM_TYPES,
             OVERLAPPING_INHERENT_IMPLS,
             RENAMED_AND_REMOVED_LINTS,
             SUPER_OR_SELF_IN_GLOBAL_PATH,
@@ -286,6 +285,7 @@ fn get_lints(&self) -> LintArray {
             LEGACY_DIRECTORY_OWNERSHIP,
             LEGACY_IMPORTS,
             LEGACY_CONSTRUCTOR_VISIBILITY,
+            MISSING_FRAGMENT_SPECIFIER,
             DEPRECATED
         )
     }
index cdbf92e93a4cb983bcd70c53dc849e7e9e15117d..c9722adc9510cc6f7eaa8f6a37e9295c4d983931 100644 (file)
@@ -17,7 +17,6 @@
 use ty::layout::{LayoutError, Pointer, SizeSkeleton};
 
 use syntax::abi::Abi::RustIntrinsic;
-use syntax::ast;
 use syntax_pos::Span;
 use hir::intravisit::{self, Visitor, NestedVisitorMap};
 use hir;
@@ -37,6 +36,35 @@ struct ExprVisitor<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
     infcx: &'a InferCtxt<'a, 'gcx, 'tcx>
 }
 
+/// If the type is `Option<T>`, it will return `T`, otherwise
+/// the type itself. Works on most `Option`-like types.
+fn unpack_option_like<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+                                ty: Ty<'tcx>)
+                                -> Ty<'tcx> {
+    let (def, substs) = match ty.sty {
+        ty::TyAdt(def, substs) => (def, substs),
+        _ => return ty
+    };
+
+    if def.variants.len() == 2 && !def.repr.c && def.repr.int.is_none() {
+        let data_idx;
+
+        if def.variants[0].fields.is_empty() {
+            data_idx = 1;
+        } else if def.variants[1].fields.is_empty() {
+            data_idx = 0;
+        } else {
+            return ty;
+        }
+
+        if def.variants[data_idx].fields.len() == 1 {
+            return def.variants[data_idx].fields[0].ty(tcx, substs);
+        }
+    }
+
+    ty
+}
+
 impl<'a, 'gcx, 'tcx> ExprVisitor<'a, 'gcx, 'tcx> {
     fn def_id_is_transmute(&self, def_id: DefId) -> bool {
         let intrinsic = match self.infcx.tcx.item_type(def_id).sty {
@@ -46,7 +74,7 @@ fn def_id_is_transmute(&self, def_id: DefId) -> bool {
         intrinsic && self.infcx.tcx.item_name(def_id) == "transmute"
     }
 
-    fn check_transmute(&self, span: Span, from: Ty<'gcx>, to: Ty<'gcx>, id: ast::NodeId) {
+    fn check_transmute(&self, span: Span, from: Ty<'gcx>, to: Ty<'gcx>) {
         let sk_from = SizeSkeleton::compute(from, self.infcx);
         let sk_to = SizeSkeleton::compute(to, self.infcx);
 
@@ -56,15 +84,17 @@ fn check_transmute(&self, span: Span, from: Ty<'gcx>, to: Ty<'gcx>, id: ast::Nod
                 return;
             }
 
+            // Special-case transmutting from `typeof(function)` and
+            // `Option<typeof(function)>` to present a clearer error.
+            let from = unpack_option_like(self.infcx.tcx.global_tcx(), from);
             match (&from.sty, sk_to) {
                 (&ty::TyFnDef(..), SizeSkeleton::Known(size_to))
                         if size_to == Pointer.size(&self.infcx.tcx.data_layout) => {
-                    // FIXME #19925 Remove this warning after a release cycle.
-                    let msg = format!("`{}` is now zero-sized and has to be cast \
-                                       to a pointer before transmuting to `{}`",
-                                      from, to);
-                    self.infcx.tcx.sess.add_lint(
-                        ::lint::builtin::TRANSMUTE_FROM_FN_ITEM_TYPES, id, span, msg);
+                    struct_span_err!(self.infcx.tcx.sess, span, E0591,
+                                     "`{}` is zero-sized and can't be transmuted to `{}`",
+                                     from, to)
+                        .span_note(span, &format!("cast with `as` to a pointer instead"))
+                        .emit();
                     return;
                 }
                 _ => {}
@@ -140,7 +170,7 @@ fn visit_expr(&mut self, expr: &'gcx hir::Expr) {
                     ty::TyFnDef(.., sig) if sig.abi() == RustIntrinsic => {
                         let from = sig.inputs().skip_binder()[0];
                         let to = *sig.output().skip_binder();
-                        self.check_transmute(expr.span, from, to, expr.id);
+                        self.check_transmute(expr.span, from, to);
                     }
                     _ => {
                         span_bug!(expr.span, "transmute wasn't a bare fn?!");
index 70ca5fe83a93299e3d529113426e60fef2af79b2..f850fd97727810ca3230bed219a0d9e2b579443b 100644 (file)
@@ -21,7 +21,6 @@
     SelectionContext,
     SelectionError,
     ObjectSafetyViolation,
-    MethodViolationCode,
 };
 
 use fmt_macros::{Parser, Piece, Position};
@@ -267,61 +266,63 @@ fn on_unimplemented_note(&self,
 
         let span = obligation.cause.span;
         let mut report = None;
-        for item in self.tcx.get_attrs(def_id).iter() {
-            if item.check_name("rustc_on_unimplemented") {
-                let err_sp = item.meta().span.substitute_dummy(span);
-                let trait_str = self.tcx.item_path_str(trait_ref.def_id);
-                if let Some(istring) = item.value_str() {
-                    let istring = &*istring.as_str();
-                    let generics = self.tcx.item_generics(trait_ref.def_id);
-                    let generic_map = generics.types.iter().map(|param| {
-                        (param.name.as_str().to_string(),
-                         trait_ref.substs.type_for_def(param).to_string())
-                    }).collect::<FxHashMap<String, String>>();
-                    let parser = Parser::new(istring);
-                    let mut errored = false;
-                    let err: String = parser.filter_map(|p| {
-                        match p {
-                            Piece::String(s) => Some(s),
-                            Piece::NextArgument(a) => match a.position {
-                                Position::ArgumentNamed(s) => match generic_map.get(s) {
-                                    Some(val) => Some(val),
-                                    None => {
-                                        span_err!(self.tcx.sess, err_sp, E0272,
-                                                       "the #[rustc_on_unimplemented] \
-                                                                attribute on \
-                                                                trait definition for {} refers to \
-                                                                non-existent type parameter {}",
-                                                               trait_str, s);
-                                        errored = true;
-                                        None
-                                    }
-                                },
-                                _ => {
-                                    span_err!(self.tcx.sess, err_sp, E0273,
-                                              "the #[rustc_on_unimplemented] attribute \
-                                               on trait definition for {} must have \
-                                               named format arguments, eg \
-                                               `#[rustc_on_unimplemented = \
-                                                \"foo {{T}}\"]`", trait_str);
+        if let Some(item) = self.tcx
+            .get_attrs(def_id)
+            .into_iter()
+            .filter(|a| a.check_name("rustc_on_unimplemented"))
+            .next()
+        {
+            let err_sp = item.meta().span.substitute_dummy(span);
+            let trait_str = self.tcx.item_path_str(trait_ref.def_id);
+            if let Some(istring) = item.value_str() {
+                let istring = &*istring.as_str();
+                let generics = self.tcx.item_generics(trait_ref.def_id);
+                let generic_map = generics.types.iter().map(|param| {
+                    (param.name.as_str().to_string(),
+                        trait_ref.substs.type_for_def(param).to_string())
+                }).collect::<FxHashMap<String, String>>();
+                let parser = Parser::new(istring);
+                let mut errored = false;
+                let err: String = parser.filter_map(|p| {
+                    match p {
+                        Piece::String(s) => Some(s),
+                        Piece::NextArgument(a) => match a.position {
+                            Position::ArgumentNamed(s) => match generic_map.get(s) {
+                                Some(val) => Some(val),
+                                None => {
+                                    span_err!(self.tcx.sess, err_sp, E0272,
+                                                    "the #[rustc_on_unimplemented] \
+                                                            attribute on \
+                                                            trait definition for {} refers to \
+                                                            non-existent type parameter {}",
+                                                            trait_str, s);
                                     errored = true;
                                     None
                                 }
+                            },
+                            _ => {
+                                span_err!(self.tcx.sess, err_sp, E0273,
+                                            "the #[rustc_on_unimplemented] attribute \
+                                            on trait definition for {} must have \
+                                            named format arguments, eg \
+                                            `#[rustc_on_unimplemented = \
+                                            \"foo {{T}}\"]`", trait_str);
+                                errored = true;
+                                None
                             }
                         }
-                    }).collect();
-                    // Report only if the format string checks out
-                    if !errored {
-                        report = Some(err);
                     }
-                } else {
-                    span_err!(self.tcx.sess, err_sp, E0274,
-                                            "the #[rustc_on_unimplemented] attribute on \
-                                                     trait definition for {} must have a value, \
-                                                     eg `#[rustc_on_unimplemented = \"foo\"]`",
-                                                     trait_str);
+                }).collect();
+                // Report only if the format string checks out
+                if !errored {
+                    report = Some(err);
                 }
-                break;
+            } else {
+                span_err!(self.tcx.sess, err_sp, E0274,
+                                        "the #[rustc_on_unimplemented] attribute on \
+                                                    trait definition for {} must have a value, \
+                                                    eg `#[rustc_on_unimplemented = \"foo\"]`",
+                                                    trait_str);
             }
         }
         report
@@ -359,34 +360,9 @@ fn find_similar_impl_candidates(&self,
     }
 
     fn report_similar_impl_candidates(&self,
-                                      trait_ref: ty::PolyTraitRef<'tcx>,
+                                      impl_candidates: Vec<ty::TraitRef<'tcx>>,
                                       err: &mut DiagnosticBuilder)
     {
-        let simp = fast_reject::simplify_type(self.tcx,
-                                              trait_ref.skip_binder().self_ty(),
-                                              true);
-        let mut impl_candidates = Vec::new();
-        let trait_def = self.tcx.lookup_trait_def(trait_ref.def_id());
-
-        match simp {
-            Some(simp) => trait_def.for_each_impl(self.tcx, |def_id| {
-                let imp = self.tcx.impl_trait_ref(def_id).unwrap();
-                let imp_simp = fast_reject::simplify_type(self.tcx,
-                                                          imp.self_ty(),
-                                                          true);
-                if let Some(imp_simp) = imp_simp {
-                    if simp != imp_simp {
-                        return;
-                    }
-                }
-                impl_candidates.push(imp);
-            }),
-            None => trait_def.for_each_impl(self.tcx, |def_id| {
-                impl_candidates.push(
-                    self.tcx.impl_trait_ref(def_id).unwrap());
-            })
-        };
-
         if impl_candidates.is_empty() {
             return;
         }
@@ -525,127 +501,118 @@ pub fn report_selection_error(&self,
                         lint_id)
                         .emit();
                     return;
-                } else {
-                    match obligation.predicate {
-                        ty::Predicate::Trait(ref trait_predicate) => {
-                            let trait_predicate =
-                                self.resolve_type_vars_if_possible(trait_predicate);
-
-                            if self.tcx.sess.has_errors() && trait_predicate.references_error() {
-                                return;
-                            } else {
-                                let trait_ref = trait_predicate.to_poly_trait_ref();
-                                let (post_message, pre_message) = match self.get_parent_trait_ref(
-                                    &obligation.cause.code)
-                                {
-                                    Some(t) => {
-                                        (format!(" in `{}`", t), format!("within `{}`, ", t))
-                                    }
-                                    None => (String::new(), String::new()),
-                                };
-                                let mut err = struct_span_err!(
-                                    self.tcx.sess,
-                                    span,
-                                    E0277,
-                                    "the trait bound `{}` is not satisfied{}",
-                                    trait_ref.to_predicate(),
-                                    post_message);
-                                err.span_label(span,
-                                               &format!("{}the trait `{}` is not \
-                                                         implemented for `{}`",
-                                                        pre_message,
-                                                        trait_ref,
-                                                        trait_ref.self_ty()));
-
-                                // Try to report a help message
-
-                                if !trait_ref.has_infer_types() &&
-                                    self.predicate_can_apply(trait_ref) {
-                                    // If a where-clause may be useful, remind the
-                                    // user that they can add it.
-                                    //
-                                    // don't display an on-unimplemented note, as
-                                    // these notes will often be of the form
-                                    //     "the type `T` can't be frobnicated"
-                                    // which is somewhat confusing.
-                                    err.help(&format!("consider adding a `where {}` bound",
-                                                      trait_ref.to_predicate()));
-                                } else if let Some(s) = self.on_unimplemented_note(trait_ref,
-                                                                                   obligation) {
-                                    // If it has a custom "#[rustc_on_unimplemented]"
-                                    // error message, let's display it!
-                                    err.note(&s);
-                                } else {
-                                    // If we can't show anything useful, try to find
-                                    // similar impls.
-                                    let impl_candidates =
-                                        self.find_similar_impl_candidates(trait_ref);
-                                    if impl_candidates.len() > 0 {
-                                        self.report_similar_impl_candidates(trait_ref, &mut err);
-                                    }
-                                }
-                                err
-                            }
-                        }
+                }
+                match obligation.predicate {
+                    ty::Predicate::Trait(ref trait_predicate) => {
+                        let trait_predicate =
+                            self.resolve_type_vars_if_possible(trait_predicate);
 
-                        ty::Predicate::Equate(ref predicate) => {
-                            let predicate = self.resolve_type_vars_if_possible(predicate);
-                            let err = self.equality_predicate(&obligation.cause,
-                                                              &predicate).err().unwrap();
-                            struct_span_err!(self.tcx.sess, span, E0278,
-                                "the requirement `{}` is not satisfied (`{}`)",
-                                predicate, err)
+                        if self.tcx.sess.has_errors() && trait_predicate.references_error() {
+                            return;
                         }
-
-                        ty::Predicate::RegionOutlives(ref predicate) => {
-                            let predicate = self.resolve_type_vars_if_possible(predicate);
-                            let err = self.region_outlives_predicate(&obligation.cause,
-                                                                     &predicate).err().unwrap();
-                            struct_span_err!(self.tcx.sess, span, E0279,
-                                "the requirement `{}` is not satisfied (`{}`)",
-                                predicate, err)
+                        let trait_ref = trait_predicate.to_poly_trait_ref();
+                        let (post_message, pre_message) =
+                            self.get_parent_trait_ref(&obligation.cause.code)
+                                .map(|t| (format!(" in `{}`", t), format!("within `{}`, ", t)))
+                                .unwrap_or((String::new(), String::new()));
+                        let mut err = struct_span_err!(
+                            self.tcx.sess,
+                            span,
+                            E0277,
+                            "the trait bound `{}` is not satisfied{}",
+                            trait_ref.to_predicate(),
+                            post_message);
+                        err.span_label(span,
+                                        &format!("{}the trait `{}` is not \
+                                                    implemented for `{}`",
+                                                pre_message,
+                                                trait_ref,
+                                                trait_ref.self_ty()));
+
+                        // Try to report a help message
+
+                        if !trait_ref.has_infer_types() &&
+                            self.predicate_can_apply(trait_ref) {
+                            // If a where-clause may be useful, remind the
+                            // user that they can add it.
+                            //
+                            // don't display an on-unimplemented note, as
+                            // these notes will often be of the form
+                            //     "the type `T` can't be frobnicated"
+                            // which is somewhat confusing.
+                            err.help(&format!("consider adding a `where {}` bound",
+                                                trait_ref.to_predicate()));
+                        } else if let Some(s) = self.on_unimplemented_note(trait_ref,
+                                                                            obligation) {
+                            // If it has a custom "#[rustc_on_unimplemented]"
+                            // error message, let's display it!
+                            err.note(&s);
+                        } else {
+                            // If we can't show anything useful, try to find
+                            // similar impls.
+                            let impl_candidates = self.find_similar_impl_candidates(trait_ref);
+                            self.report_similar_impl_candidates(impl_candidates, &mut err);
                         }
+                        err
+                    }
 
-                        ty::Predicate::Projection(..) | ty::Predicate::TypeOutlives(..) => {
-                            let predicate =
-                                self.resolve_type_vars_if_possible(&obligation.predicate);
-                            struct_span_err!(self.tcx.sess, span, E0280,
-                                "the requirement `{}` is not satisfied",
-                                predicate)
-                        }
+                    ty::Predicate::Equate(ref predicate) => {
+                        let predicate = self.resolve_type_vars_if_possible(predicate);
+                        let err = self.equality_predicate(&obligation.cause,
+                                                            &predicate).err().unwrap();
+                        struct_span_err!(self.tcx.sess, span, E0278,
+                            "the requirement `{}` is not satisfied (`{}`)",
+                            predicate, err)
+                    }
 
-                        ty::Predicate::ObjectSafe(trait_def_id) => {
-                            let violations = self.tcx.object_safety_violations(trait_def_id);
-                            self.tcx.report_object_safety_error(span,
-                                                                trait_def_id,
-                                                                violations)
-                        }
+                    ty::Predicate::RegionOutlives(ref predicate) => {
+                        let predicate = self.resolve_type_vars_if_possible(predicate);
+                        let err = self.region_outlives_predicate(&obligation.cause,
+                                                                    &predicate).err().unwrap();
+                        struct_span_err!(self.tcx.sess, span, E0279,
+                            "the requirement `{}` is not satisfied (`{}`)",
+                            predicate, err)
+                    }
 
-                        ty::Predicate::ClosureKind(closure_def_id, kind) => {
-                            let found_kind = self.closure_kind(closure_def_id).unwrap();
-                            let closure_span = self.tcx.hir.span_if_local(closure_def_id).unwrap();
-                            let mut err = struct_span_err!(
-                                self.tcx.sess, closure_span, E0525,
-                                "expected a closure that implements the `{}` trait, \
-                                 but this closure only implements `{}`",
-                                kind,
-                                found_kind);
-                            err.span_note(
-                                obligation.cause.span,
-                                &format!("the requirement to implement \
-                                          `{}` derives from here", kind));
-                            err.emit();
-                            return;
-                        }
+                    ty::Predicate::Projection(..) | ty::Predicate::TypeOutlives(..) => {
+                        let predicate =
+                            self.resolve_type_vars_if_possible(&obligation.predicate);
+                        struct_span_err!(self.tcx.sess, span, E0280,
+                            "the requirement `{}` is not satisfied",
+                            predicate)
+                    }
 
-                        ty::Predicate::WellFormed(ty) => {
-                            // WF predicates cannot themselves make
-                            // errors. They can only block due to
-                            // ambiguity; otherwise, they always
-                            // degenerate into other obligations
-                            // (which may fail).
-                            span_bug!(span, "WF predicate not satisfied for {:?}", ty);
-                        }
+                    ty::Predicate::ObjectSafe(trait_def_id) => {
+                        let violations = self.tcx.object_safety_violations(trait_def_id);
+                        self.tcx.report_object_safety_error(span,
+                                                            trait_def_id,
+                                                            violations)
+                    }
+
+                    ty::Predicate::ClosureKind(closure_def_id, kind) => {
+                        let found_kind = self.closure_kind(closure_def_id).unwrap();
+                        let closure_span = self.tcx.hir.span_if_local(closure_def_id).unwrap();
+                        let mut err = struct_span_err!(
+                            self.tcx.sess, closure_span, E0525,
+                            "expected a closure that implements the `{}` trait, \
+                                but this closure only implements `{}`",
+                            kind,
+                            found_kind);
+                        err.span_note(
+                            obligation.cause.span,
+                            &format!("the requirement to implement \
+                                        `{}` derives from here", kind));
+                        err.emit();
+                        return;
+                    }
+
+                    ty::Predicate::WellFormed(ty) => {
+                        // WF predicates cannot themselves make
+                        // errors. They can only block due to
+                        // ambiguity; otherwise, they always
+                        // degenerate into other obligations
+                        // (which may fail).
+                        span_bug!(span, "WF predicate not satisfied for {:?}", ty);
                     }
                 }
             }
@@ -713,38 +680,7 @@ pub fn report_object_safety_error(self,
             if !reported_violations.insert(violation.clone()) {
                 continue;
             }
-            let buf;
-            let note = match violation {
-                ObjectSafetyViolation::SizedSelf => {
-                    "the trait cannot require that `Self : Sized`"
-                }
-
-                ObjectSafetyViolation::SupertraitSelf => {
-                    "the trait cannot use `Self` as a type parameter \
-                         in the supertrait listing"
-                }
-
-                ObjectSafetyViolation::Method(name,
-                                              MethodViolationCode::StaticMethod) => {
-                    buf = format!("method `{}` has no receiver", name);
-                    &buf
-                }
-
-                ObjectSafetyViolation::Method(name,
-                                              MethodViolationCode::ReferencesSelf) => {
-                    buf = format!("method `{}` references the `Self` type \
-                                       in its arguments or return type",
-                                  name);
-                    &buf
-                }
-
-                ObjectSafetyViolation::Method(name,
-                                              MethodViolationCode::Generic) => {
-                    buf = format!("method `{}` has generic type parameters", name);
-                    &buf
-                }
-            };
-            err.note(note);
+            err.note(&violation.error_msg());
         }
         err
     }
@@ -774,46 +710,46 @@ fn maybe_report_ambiguity(&self, obligation: &PredicateObligation<'tcx>) {
                 let trait_ref = data.to_poly_trait_ref();
                 let self_ty = trait_ref.self_ty();
                 if predicate.references_error() {
-                } else {
-                    // Typically, this ambiguity should only happen if
-                    // there are unresolved type inference variables
-                    // (otherwise it would suggest a coherence
-                    // failure). But given #21974 that is not necessarily
-                    // the case -- we can have multiple where clauses that
-                    // are only distinguished by a region, which results
-                    // in an ambiguity even when all types are fully
-                    // known, since we don't dispatch based on region
-                    // relationships.
-
-                    // This is kind of a hack: it frequently happens that some earlier
-                    // error prevents types from being fully inferred, and then we get
-                    // a bunch of uninteresting errors saying something like "<generic
-                    // #0> doesn't implement Sized".  It may even be true that we
-                    // could just skip over all checks where the self-ty is an
-                    // inference variable, but I was afraid that there might be an
-                    // inference variable created, registered as an obligation, and
-                    // then never forced by writeback, and hence by skipping here we'd
-                    // be ignoring the fact that we don't KNOW the type works
-                    // out. Though even that would probably be harmless, given that
-                    // we're only talking about builtin traits, which are known to be
-                    // inhabited. But in any case I just threw in this check for
-                    // has_errors() to be sure that compilation isn't happening
-                    // anyway. In that case, why inundate the user.
-                    if !self.tcx.sess.has_errors() {
-                        if
-                            self.tcx.lang_items.sized_trait()
-                            .map_or(false, |sized_id| sized_id == trait_ref.def_id())
-                        {
-                            self.need_type_info(obligation, self_ty);
-                        } else {
-                            let mut err = struct_span_err!(self.tcx.sess,
-                                                           obligation.cause.span, E0283,
-                                                           "type annotations required: \
-                                                            cannot resolve `{}`",
-                                                           predicate);
-                            self.note_obligation_cause(&mut err, obligation);
-                            err.emit();
-                        }
+                    return;
+                }
+                // Typically, this ambiguity should only happen if
+                // there are unresolved type inference variables
+                // (otherwise it would suggest a coherence
+                // failure). But given #21974 that is not necessarily
+                // the case -- we can have multiple where clauses that
+                // are only distinguished by a region, which results
+                // in an ambiguity even when all types are fully
+                // known, since we don't dispatch based on region
+                // relationships.
+
+                // This is kind of a hack: it frequently happens that some earlier
+                // error prevents types from being fully inferred, and then we get
+                // a bunch of uninteresting errors saying something like "<generic
+                // #0> doesn't implement Sized".  It may even be true that we
+                // could just skip over all checks where the self-ty is an
+                // inference variable, but I was afraid that there might be an
+                // inference variable created, registered as an obligation, and
+                // then never forced by writeback, and hence by skipping here we'd
+                // be ignoring the fact that we don't KNOW the type works
+                // out. Though even that would probably be harmless, given that
+                // we're only talking about builtin traits, which are known to be
+                // inhabited. But in any case I just threw in this check for
+                // has_errors() to be sure that compilation isn't happening
+                // anyway. In that case, why inundate the user.
+                if !self.tcx.sess.has_errors() {
+                    if
+                        self.tcx.lang_items.sized_trait()
+                        .map_or(false, |sized_id| sized_id == trait_ref.def_id())
+                    {
+                        self.need_type_info(obligation, self_ty);
+                    } else {
+                        let mut err = struct_span_err!(self.tcx.sess,
+                                                        obligation.cause.span, E0283,
+                                                        "type annotations required: \
+                                                        cannot resolve `{}`",
+                                                        predicate);
+                        self.note_obligation_cause(&mut err, obligation);
+                        err.emit();
                     }
                 }
             }
index 60808fbc741fb9898bde442a1f568641256b68d9..2ebe0d459fab1453451b02f4bc7fd208224ac76b 100644 (file)
@@ -23,6 +23,7 @@
 use traits;
 use ty::{self, Ty, TyCtxt, TypeFoldable};
 use ty::subst::Substs;
+use std::borrow::Cow;
 use syntax::ast;
 
 #[derive(Clone, Debug, PartialEq, Eq, Hash)]
@@ -38,6 +39,25 @@ pub enum ObjectSafetyViolation {
     Method(ast::Name, MethodViolationCode),
 }
 
+impl ObjectSafetyViolation {
+    pub fn error_msg(&self) -> Cow<'static, str> {
+        match *self {
+            ObjectSafetyViolation::SizedSelf =>
+                "the trait cannot require that `Self : Sized`".into(),
+            ObjectSafetyViolation::SupertraitSelf =>
+                "the trait cannot use `Self` as a type parameter \
+                 in the supertrait listing".into(),
+            ObjectSafetyViolation::Method(name, MethodViolationCode::StaticMethod) =>
+                format!("method `{}` has no receiver", name).into(),
+            ObjectSafetyViolation::Method(name, MethodViolationCode::ReferencesSelf) =>
+                format!("method `{}` references the `Self` type \
+                         in its arguments or return type", name).into(),
+            ObjectSafetyViolation::Method(name, MethodViolationCode::Generic) =>
+                format!("method `{}` has generic type parameters", name).into(),
+        }
+    }
+}
+
 /// Reasons a method might not be object-safe.
 #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
 pub enum MethodViolationCode {
index e7895ca799012530852c0c49be85c9a95466f3bd..123db6e89476c52c0418ad4c3db34cdd8839da8d 100644 (file)
@@ -1201,7 +1201,8 @@ pub fn compute_uncached(ty: Ty<'gcx>,
                     });
                 }
 
-                if !def.is_enum() || def.variants.len() == 1 {
+                if !def.is_enum() || (def.variants.len() == 1 &&
+                                      !def.repr.inhibit_enum_layout_opt()) {
                     // Struct, or union, or univariant enum equivalent to a struct.
                     // (Typechecking will reject discriminant-sizing attrs.)
 
@@ -1250,7 +1251,7 @@ pub fn compute_uncached(ty: Ty<'gcx>,
                     v.fields.iter().map(|field| field.ty(tcx, substs)).collect::<Vec<_>>()
                 }).collect::<Vec<_>>();
 
-                if variants.len() == 2 && !def.repr.c {
+                if variants.len() == 2 && !def.repr.inhibit_enum_layout_opt() {
                     // Nullable pointer optimization
                     for discr in 0..2 {
                         let other_fields = variants[1 - discr].iter().map(|ty| {
index 55b6f61148d7715b947e7665d25942536f4b82f0..8cf8a839afbffaca5e62c42c532be118a1168509 100644 (file)
@@ -1097,24 +1097,6 @@ pub fn is_empty(&self) -> bool {
     }
 }
 
-impl<'tcx> TraitRef<'tcx> {
-    pub fn new(def_id: DefId, substs: &'tcx Substs<'tcx>) -> TraitRef<'tcx> {
-        TraitRef { def_id: def_id, substs: substs }
-    }
-
-    pub fn self_ty(&self) -> Ty<'tcx> {
-        self.substs.type_at(0)
-    }
-
-    pub fn input_types<'a>(&'a self) -> impl DoubleEndedIterator<Item=Ty<'tcx>> + 'a {
-        // Select only the "input types" from a trait-reference. For
-        // now this is all the types that appear in the
-        // trait-reference, but it should eventually exclude
-        // associated types.
-        self.substs.types()
-    }
-}
-
 /// When type checking, we use the `ParameterEnvironment` to track
 /// details about the type/lifetime parameters that are in scope.
 /// It primarily stores the bounds information.
@@ -1409,6 +1391,13 @@ pub fn new(tcx: TyCtxt, did: DefId) -> ReprOptions {
     pub fn discr_type(&self) -> attr::IntType {
         self.int.unwrap_or(attr::SignedInt(ast::IntTy::Is))
     }
+
+    /// Returns true if this `#[repr()]` should inhabit "smart enum
+    /// layout" optimizations, such as representing `Foo<&T>` as a
+    /// single pointer.
+    pub fn inhibit_enum_layout_opt(&self) -> bool {
+        self.c || self.int.is_some()
+    }
 }
 
 impl<'a, 'gcx, 'tcx> AdtDef {
index aa2990679b6acc27efbdae1f2a3718f8994cb7ed..e686d62019145bc13d7aaadb1d955a4116d0a86b 100644 (file)
@@ -389,6 +389,24 @@ pub struct TraitRef<'tcx> {
     pub substs: &'tcx Substs<'tcx>,
 }
 
+impl<'tcx> TraitRef<'tcx> {
+    pub fn new(def_id: DefId, substs: &'tcx Substs<'tcx>) -> TraitRef<'tcx> {
+        TraitRef { def_id: def_id, substs: substs }
+    }
+
+    pub fn self_ty(&self) -> Ty<'tcx> {
+        self.substs.type_at(0)
+    }
+
+    pub fn input_types<'a>(&'a self) -> impl DoubleEndedIterator<Item=Ty<'tcx>> + 'a {
+        // Select only the "input types" from a trait-reference. For
+        // now this is all the types that appear in the
+        // trait-reference, but it should eventually exclude
+        // associated types.
+        self.substs.types()
+    }
+}
+
 pub type PolyTraitRef<'tcx> = Binder<TraitRef<'tcx>>;
 
 impl<'tcx> PolyTraitRef<'tcx> {
index ddf09f5cfe0e08dd5cb61d10e2af4bcf7d6391d7..9619ba8472404bb70e0a1fffca1e620841dbe528 100644 (file)
@@ -688,6 +688,14 @@ pub fn phase_2_configure_and_expand<F>(sess: &Session,
 
         let krate = ecx.monotonic_expander().expand_crate(krate);
 
+        let mut missing_fragment_specifiers: Vec<_> =
+            ecx.parse_sess.missing_fragment_specifiers.borrow().iter().cloned().collect();
+        missing_fragment_specifiers.sort();
+        for span in missing_fragment_specifiers {
+            let lint = lint::builtin::MISSING_FRAGMENT_SPECIFIER;
+            let msg = "missing fragment specifier".to_string();
+            sess.add_lint(lint, ast::CRATE_NODE_ID, span, msg);
+        }
         if ecx.parse_sess.span_diagnostic.err_count() - ecx.resolve_err_count > err_count {
             ecx.parse_sess.span_diagnostic.abort_if_errors();
         }
index 150a2c39db7a827e9796d5dca3edb992b88e39d5..b075fa599924996650b2571afa356f312f2484c7 100644 (file)
@@ -1044,26 +1044,6 @@ fn hash_token_tree(&mut self, tt: &tokenstream::TokenTree) {
                     self.hash_token_tree(sub_tt);
                 }
             }
-            tokenstream::TokenTree::Sequence(span, ref sequence_repetition) => {
-                hash_span!(self, span);
-                let tokenstream::SequenceRepetition {
-                    ref tts,
-                    ref separator,
-                    op,
-                    num_captures,
-                } = **sequence_repetition;
-
-                tts.len().hash(self.st);
-                for sub_tt in tts {
-                    self.hash_token_tree(sub_tt);
-                }
-                self.hash_discriminant(separator);
-                if let Some(ref separator) = *separator {
-                    self.hash_token(separator, span);
-                }
-                op.hash(self.st);
-                num_captures.hash(self.st);
-            }
         }
     }
 
@@ -1129,10 +1109,6 @@ fn hash_token(&mut self,
             token::Token::Ident(ident) |
             token::Token::Lifetime(ident) |
             token::Token::SubstNt(ident) => ident.name.as_str().hash(self.st),
-            token::Token::MatchNt(ident1, ident2) => {
-                ident1.name.as_str().hash(self.st);
-                ident2.name.as_str().hash(self.st);
-            }
 
             token::Token::Interpolated(ref non_terminal) => {
                 // FIXME(mw): This could be implemented properly. It's just a
index 8fb1740e66eacbfccf65cd8991bcd67aaea28130..443a219928f1c04229ed0f728fb9b2c436fafcd3 100644 (file)
@@ -195,10 +195,6 @@ macro_rules! add_lint_group {
             id: LintId::of(SUPER_OR_SELF_IN_GLOBAL_PATH),
             reference: "issue #36888 <https://github.com/rust-lang/rust/issues/36888>",
         },
-        FutureIncompatibleInfo {
-            id: LintId::of(TRANSMUTE_FROM_FN_ITEM_TYPES),
-            reference: "issue #19925 <https://github.com/rust-lang/rust/issues/19925>",
-        },
         FutureIncompatibleInfo {
             id: LintId::of(OVERLAPPING_INHERENT_IMPLS),
             reference: "issue #36889 <https://github.com/rust-lang/rust/issues/36889>",
@@ -247,6 +243,10 @@ macro_rules! add_lint_group {
             id: LintId::of(LEGACY_CONSTRUCTOR_VISIBILITY),
             reference: "issue #39207 <https://github.com/rust-lang/rust/issues/39207>",
         },
+        FutureIncompatibleInfo {
+            id: LintId::of(MISSING_FRAGMENT_SPECIFIER),
+            reference: "issue #40107 <https://github.com/rust-lang/rust/issues/40107>",
+        },
         ]);
 
     // Register renamed and removed lints
@@ -260,4 +260,6 @@ macro_rules! add_lint_group {
     store.register_removed("raw_pointer_deriving",
                            "using derive with raw pointers is ok");
     store.register_removed("drop_with_repr_extern", "drop flags have been removed");
+    store.register_removed("transmute_from_fn_item_types",
+        "always cast functions before transmuting them");
 }
index b5add6404fc9f0e37f953c77fba7583c7fa416d7..6c93744f014a3c2b678be69ef78b351c56e8bc2f 100644 (file)
@@ -287,7 +287,7 @@ pub fn signature_string_for_span(&self, span: Span) -> String {
         let mut toks = toks.parse_all_token_trees().unwrap().into_iter();
         let mut prev = toks.next().unwrap();
 
-        let first_span = prev.get_span();
+        let first_span = prev.span();
         let mut angle_count = 0;
         for tok in toks {
             if let TokenTree::Token(_, ref tok) = prev {
@@ -305,10 +305,10 @@ pub fn signature_string_for_span(&self, span: Span) -> String {
                 continue;
             }
             if let TokenTree::Token(_, token::Semi) = tok {
-                return self.snippet(mk_sp(first_span.lo, prev.get_span().hi));
+                return self.snippet(mk_sp(first_span.lo, prev.span().hi));
             } else if let TokenTree::Delimited(_, ref d) = tok {
                 if d.delim == token::Brace {
-                    return self.snippet(mk_sp(first_span.lo, prev.get_span().hi));
+                    return self.snippet(mk_sp(first_span.lo, prev.span().hi));
                 }
             }
             prev = tok;
index 3cad2bc1d842369e9a9000412fbd272f5e581f69..34d8c6500b9263d325bb3251cff8202a9904caf4 100644 (file)
@@ -21,7 +21,7 @@
 use common::{self, Funclet};
 use common::{C_bool, C_str_slice, C_struct, C_u32, C_undef};
 use consts;
-use machine::{llalign_of_min, llbitsize_of_real};
+use machine::llalign_of_min;
 use meth;
 use type_of::{self, align_of};
 use glue;
@@ -869,24 +869,7 @@ fn trans_transmute(&mut self, bcx: &Builder<'a, 'tcx>,
     fn trans_transmute_into(&mut self, bcx: &Builder<'a, 'tcx>,
                             src: &mir::Operand<'tcx>,
                             dst: &LvalueRef<'tcx>) {
-        let mut val = self.trans_operand(bcx, src);
-        if let ty::TyFnDef(def_id, substs, _) = val.ty.sty {
-            let llouttype = type_of::type_of(bcx.ccx, dst.ty.to_ty(bcx.tcx()));
-            let out_type_size = llbitsize_of_real(bcx.ccx, llouttype);
-            if out_type_size != 0 {
-                // FIXME #19925 Remove this hack after a release cycle.
-                let f = Callee::def(bcx.ccx, def_id, substs);
-                let ty = match f.ty.sty {
-                    ty::TyFnDef(.., f) => bcx.tcx().mk_fn_ptr(f),
-                    _ => f.ty
-                };
-                val = OperandRef {
-                    val: Immediate(f.reify(bcx.ccx)),
-                    ty: ty
-                };
-            }
-        }
-
+        let val = self.trans_operand(bcx, src);
         let llty = type_of::type_of(bcx.ccx, val.ty);
         let cast_ptr = bcx.pointercast(dst.llval, llty.ptr_to());
         let in_type = val.ty;
index 73b82fbad5dfda02aa50b0fb975ddd442c6a7143..1294296840ebd75b329bd6df6c81e1309883caf3 100644 (file)
@@ -1476,7 +1476,7 @@ pub struct PolyTrait {
 /// A representation of a Type suitable for hyperlinking purposes. Ideally one can get the original
 /// type out of the AST/TyCtxt given one of these, if more information is needed. Most importantly
 /// it does not preserve mutability or boxes.
-#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)]
+#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq)]
 pub enum Type {
     /// structs/enums/traits (most that'd be an hir::TyPath)
     ResolvedPath {
index 6f8c6aa7094dde3b994234642e18748885f4bb73..23507dc889b710be084cb53985f9575d7ba87dc3 100644 (file)
@@ -90,6 +90,16 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
     }
 }
 
+impl<'a, T: fmt::Debug> fmt::Debug for CommaSep<'a, T> {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        for (i, item) in self.0.iter().enumerate() {
+            if i != 0 { write!(f, ", ")?; }
+            fmt::Debug::fmt(item, f)?;
+        }
+        Ok(())
+    }
+}
+
 impl<'a> fmt::Display for TyParamBounds<'a> {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         let &TyParamBounds(bounds) = self;
@@ -165,7 +175,7 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         if f.alternate() {
             clause.push_str(" where ");
         } else {
-            clause.push_str(" <span class='where fmt-newline'>where ");
+            clause.push_str(" <span class=\"where fmt-newline\">where ");
         }
         for (i, pred) in gens.where_predicates.iter().enumerate() {
             if i > 0 {
@@ -449,8 +459,8 @@ fn resolved_path(w: &mut fmt::Formatter, did: DefId, path: &clean::Path,
                     } else {
                         root.push_str(&seg.name);
                         root.push_str("/");
-                        write!(w, "<a class='mod'
-                                       href='{}index.html'>{}</a>::",
+                        write!(w, "<a class=\"mod\"
+                                       href=\"{}index.html\">{}</a>::",
                                  root,
                                  seg.name)?;
                     }
@@ -491,7 +501,7 @@ fn primitive_link(f: &mut fmt::Formatter,
             Some(&def_id) if def_id.is_local() => {
                 let len = CURRENT_LOCATION_KEY.with(|s| s.borrow().len());
                 let len = if len == 0 {0} else {len - 1};
-                write!(f, "<a class='primitive' href='{}primitive.{}.html'>",
+                write!(f, "<a class=\"primitive\" href=\"{}primitive.{}.html\">",
                        repeat("../").take(len).collect::<String>(),
                        prim.to_url_str())?;
                 needs_termination = true;
@@ -508,7 +518,7 @@ fn primitive_link(f: &mut fmt::Formatter,
                     (.., render::Unknown) => None,
                 };
                 if let Some((cname, root)) = loc {
-                    write!(f, "<a class='primitive' href='{}{}/primitive.{}.html'>",
+                    write!(f, "<a class=\"primitive\" href=\"{}{}/primitive.{}.html\">",
                            root,
                            cname,
                            prim.to_url_str())?;
@@ -550,7 +560,7 @@ impl<'a> fmt::Display for HRef<'a> {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         match href(self.did) {
             Some((url, shortty, fqp)) => if !f.alternate() {
-                write!(f, "<a class='{}' href='{}' title='{} {}'>{}</a>",
+                write!(f, "<a class=\"{}\" href=\"{}\" title=\"{} {}\">{}</a>",
                        shortty, url, shortty, fqp.join("::"), self.text)
             } else {
                 write!(f, "{}", self.text)
@@ -560,7 +570,8 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
     }
 }
 
-fn fmt_type(t: &clean::Type, f: &mut fmt::Formatter, use_absolute: bool) -> fmt::Result {
+fn fmt_type(t: &clean::Type, f: &mut fmt::Formatter, use_absolute: bool,
+            is_not_debug: bool) -> fmt::Result {
     match *t {
         clean::Generic(ref name) => {
             f.write_str(name)
@@ -571,7 +582,8 @@ fn fmt_type(t: &clean::Type, f: &mut fmt::Formatter, use_absolute: bool) -> fmt:
             tybounds(f, typarams)
         }
         clean::Infer => write!(f, "_"),
-        clean::Primitive(prim) => primitive_link(f, prim, prim.as_str()),
+        clean::Primitive(prim) if is_not_debug => primitive_link(f, prim, prim.as_str()),
+        clean::Primitive(prim) => write!(f, "{}", prim.as_str()),
         clean::BareFunction(ref decl) => {
             if f.alternate() {
                 write!(f, "{}{}fn{:#}{:#}",
@@ -589,26 +601,30 @@ fn fmt_type(t: &clean::Type, f: &mut fmt::Formatter, use_absolute: bool) -> fmt:
         }
         clean::Tuple(ref typs) => {
             match &typs[..] {
-                &[] => primitive_link(f, PrimitiveType::Tuple, "()"),
-                &[ref one] => {
+                &[] if is_not_debug => primitive_link(f, PrimitiveType::Tuple, "()"),
+                &[] => write!(f, "()"),
+                &[ref one] if is_not_debug => {
                     primitive_link(f, PrimitiveType::Tuple, "(")?;
                     //carry f.alternate() into this display w/o branching manually
                     fmt::Display::fmt(one, f)?;
                     primitive_link(f, PrimitiveType::Tuple, ",)")
                 }
-                many => {
+                &[ref one] => write!(f, "({:?},)", one),
+                many if is_not_debug => {
                     primitive_link(f, PrimitiveType::Tuple, "(")?;
                     fmt::Display::fmt(&CommaSep(&many), f)?;
                     primitive_link(f, PrimitiveType::Tuple, ")")
                 }
+                many => write!(f, "({:?})", &CommaSep(&many)),
             }
         }
-        clean::Vector(ref t) => {
+        clean::Vector(ref t) if is_not_debug => {
             primitive_link(f, PrimitiveType::Slice, &format!("["))?;
             fmt::Display::fmt(t, f)?;
             primitive_link(f, PrimitiveType::Slice, &format!("]"))
         }
-        clean::FixedVector(ref t, ref s) => {
+        clean::Vector(ref t) => write!(f, "[{:?}]", t),
+        clean::FixedVector(ref t, ref s) if is_not_debug => {
             primitive_link(f, PrimitiveType::Array, "[")?;
             fmt::Display::fmt(t, f)?;
             if f.alternate() {
@@ -619,10 +635,17 @@ fn fmt_type(t: &clean::Type, f: &mut fmt::Formatter, use_absolute: bool) -> fmt:
                                &format!("; {}]", Escape(s)))
             }
         }
+        clean::FixedVector(ref t, ref s) => {
+            if f.alternate() {
+                write!(f, "[{:?}; {}]", t, s)
+            } else {
+                write!(f, "[{:?}; {}]", t, Escape(s))
+            }
+        }
         clean::Never => f.write_str("!"),
         clean::RawPointer(m, ref t) => {
             match **t {
-                clean::Generic(_) | clean::ResolvedPath {is_generic: true, ..} => {
+                clean::Generic(_) | clean::ResolvedPath {is_generic: true, ..} if is_not_debug => {
                     if f.alternate() {
                         primitive_link(f, clean::PrimitiveType::RawPointer,
                                        &format!("*{}{:#}", RawMutableSpace(m), t))
@@ -631,11 +654,21 @@ fn fmt_type(t: &clean::Type, f: &mut fmt::Formatter, use_absolute: bool) -> fmt:
                                        &format!("*{}{}", RawMutableSpace(m), t))
                     }
                 }
-                _ => {
+                clean::Generic(_) | clean::ResolvedPath {is_generic: true, ..} => {
+                    if f.alternate() {
+                        write!(f, "*{}{:#?}", RawMutableSpace(m), t)
+                    } else {
+                        write!(f, "*{}{:?}", RawMutableSpace(m), t)
+                    }
+                }
+                _ if is_not_debug => {
                     primitive_link(f, clean::PrimitiveType::RawPointer,
                                    &format!("*{}", RawMutableSpace(m)))?;
                     fmt::Display::fmt(t, f)
                 }
+                _ => {
+                    write!(f, "*{}{:?}", RawMutableSpace(m), t)
+                }
             }
         }
         clean::BorrowedRef{ lifetime: ref l, mutability, type_: ref ty} => {
@@ -647,15 +680,23 @@ fn fmt_type(t: &clean::Type, f: &mut fmt::Formatter, use_absolute: bool) -> fmt:
             match **ty {
                 clean::Vector(ref bt) => { // BorrowedRef{ ... Vector(T) } is &[T]
                     match **bt {
-                        clean::Generic(_) =>
+                        clean::Generic(_) if is_not_debug => {
                             if f.alternate() {
                                 primitive_link(f, PrimitiveType::Slice,
                                     &format!("&{}{}[{:#}]", lt, m, **bt))
                             } else {
                                 primitive_link(f, PrimitiveType::Slice,
                                     &format!("&amp;{}{}[{}]", lt, m, **bt))
-                            },
-                        _ => {
+                            }
+                        }
+                        clean::Generic(_) => {
+                            if f.alternate() {
+                                write!(f, "&{}{}[{:#?}]", lt, m, **bt)
+                            } else {
+                                write!(f, "&{}{}[{:?}]", lt, m, **bt)
+                            }
+                        }
+                        _ if is_not_debug => {
                             if f.alternate() {
                                 primitive_link(f, PrimitiveType::Slice,
                                                &format!("&{}{}[", lt, m))?;
@@ -667,15 +708,26 @@ fn fmt_type(t: &clean::Type, f: &mut fmt::Formatter, use_absolute: bool) -> fmt:
                             }
                             primitive_link(f, PrimitiveType::Slice, "]")
                         }
+                        _ => {
+                            if f.alternate() {
+                                write!(f, "&{}{}[{:#?}]", lt, m, **bt)
+                            } else {
+                                write!(f, "&{}{}[{:?}]", lt, m, **bt)
+                            }
+                        }
                     }
                 }
                 _ => {
                     if f.alternate() {
                         write!(f, "&{}{}", lt, m)?;
-                        fmt_type(&ty, f, use_absolute)
+                        fmt_type(&ty, f, use_absolute, is_not_debug)
                     } else {
-                        write!(f, "&amp;{}{}", lt, m)?;
-                        fmt_type(&ty, f, use_absolute)
+                        if is_not_debug {
+                            write!(f, "&amp;{}{}", lt, m)?;
+                        } else {
+                            write!(f, "&{}{}", lt, m)?;
+                        }
+                        fmt_type(&ty, f, use_absolute, is_not_debug)
                     }
                 }
             }
@@ -723,9 +775,17 @@ fn fmt_type(t: &clean::Type, f: &mut fmt::Formatter, use_absolute: bool) -> fmt:
         }
         clean::QPath { ref name, ref self_type, ref trait_ } => {
             if f.alternate() {
-                write!(f, "<{:#} as {:#}>::{}", self_type, trait_, name)
+                if is_not_debug {
+                    write!(f, "<{:#} as {:#}>::{}", self_type, trait_, name)
+                } else {
+                    write!(f, "<{:#?} as {:#?}>::{}", self_type, trait_, name)
+                }
             } else {
-                write!(f, "&lt;{} as {}&gt;::{}", self_type, trait_, name)
+                if is_not_debug {
+                    write!(f, "&lt;{} as {}&gt;::{}", self_type, trait_, name)
+                } else {
+                    write!(f, "<{:?} as {:?}>::{}", self_type, trait_, name)
+                }
             }
         }
         clean::Unique(..) => {
@@ -736,7 +796,13 @@ fn fmt_type(t: &clean::Type, f: &mut fmt::Formatter, use_absolute: bool) -> fmt:
 
 impl fmt::Display for clean::Type {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        fmt_type(self, f, false)
+        fmt_type(self, f, false, true)
+    }
+}
+
+impl fmt::Debug for clean::Type {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        fmt_type(self, f, false, false)
     }
 }
 
@@ -777,7 +843,7 @@ fn fmt_impl(i: &clean::Impl,
         plain.push_str(" for ");
     }
 
-    fmt_type(&i.for_, f, use_absolute)?;
+    fmt_type(&i.for_, f, use_absolute, true)?;
     plain.push_str(&format!("{:#}", i.for_));
 
     fmt::Display::fmt(&WhereClause(&i.generics, plain.len() + 1), f)?;
index 0629e93e7ef5d1f222dea269492fef4d150ad7e7..0dafc4225a3210653e6d9b445914c825db8c9a35 100644 (file)
@@ -144,12 +144,12 @@ fn string<T: Display>(&mut self,
                           -> io::Result<()> {
         match klass {
             Class::None => write!(self, "{}", text),
-            klass => write!(self, "<span class='{}'>{}</span>", klass.rustdoc_class(), text),
+            klass => write!(self, "<span class=\"{}\">{}</span>", klass.rustdoc_class(), text),
         }
     }
 
     fn enter_span(&mut self, klass: Class) -> io::Result<()> {
-        write!(self, "<span class='{}'>", klass.rustdoc_class())
+        write!(self, "<span class=\"{}\">", klass.rustdoc_class())
     }
 
     fn exit_span(&mut self) -> io::Result<()> {
@@ -315,7 +315,7 @@ fn write_token<W: Writer>(&mut self,
             token::Lifetime(..) => Class::Lifetime,
 
             token::Underscore | token::Eof | token::Interpolated(..) |
-            token::MatchNt(..) | token::SubstNt(..) | token::Tilde | token::At => Class::None,
+            token::SubstNt(..) | token::Tilde | token::At => Class::None,
         };
 
         // Anything that didn't return above is the simple case where we the
@@ -363,7 +363,7 @@ fn write_header(class: Option<&str>,
     if let Some(id) = id {
         write!(out, "id='{}' ", id)?;
     }
-    write!(out, "class='rust {}'>\n", class.unwrap_or(""))
+    write!(out, "class=\"rust {}\">\n", class.unwrap_or(""))
 }
 
 fn write_footer(out: &mut Write) -> io::Result<()> {
index ae4c94d4b38c096feb09e98b92b86a48c0528ae6..44f71d8952985b0098e482d561dad83d960969da 100644 (file)
@@ -1547,7 +1547,7 @@ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
                        component)?;
             }
         }
-        write!(fmt, "<a class='{}' href=''>{}</a>",
+        write!(fmt, "<a class=\"{}\" href=''>{}</a>",
                self.item.type_(), self.item.name.as_ref().unwrap())?;
 
         write!(fmt, "</span>")?; // in-band
@@ -1654,9 +1654,35 @@ fn document_short(w: &mut fmt::Formatter, item: &clean::Item, link: AssocItemLin
     Ok(())
 }
 
+fn md_render_assoc_item(item: &clean::Item) -> String {
+    match item.inner {
+        clean::AssociatedConstItem(ref ty, ref default) => {
+            if let Some(default) = default.as_ref() {
+                format!("```\n{}: {:?} = {}\n```\n\n", item.name.as_ref().unwrap(), ty, default)
+            } else {
+                format!("```\n{}: {:?}\n```\n\n", item.name.as_ref().unwrap(), ty)
+            }
+        }
+        _ => String::new(),
+    }
+}
+
+fn get_doc_value(item: &clean::Item) -> Option<&str> {
+    let x = item.doc_value();
+    if x.is_none() {
+        match item.inner {
+            clean::AssociatedConstItem(_, _) => Some(""),
+            _ => None,
+        }
+    } else {
+        x
+    }
+}
+
 fn document_full(w: &mut fmt::Formatter, item: &clean::Item) -> fmt::Result {
-    if let Some(s) = item.doc_value() {
-        write!(w, "<div class='docblock'>{}</div>", Markdown(s))?;
+    if let Some(s) = get_doc_value(item) {
+        write!(w, "<div class='docblock'>{}</div>",
+               Markdown(&format!("{}{}", md_render_assoc_item(item), s)))?;
     }
     Ok(())
 }
@@ -1817,7 +1843,7 @@ fn cmp(i1: &clean::Item, i2: &clean::Item, idx1: usize, idx2: usize) -> Ordering
                 let doc_value = myitem.doc_value().unwrap_or("");
                 write!(w, "
                        <tr class='{stab} module-item'>
-                           <td><a class='{class}' href='{href}'
+                           <td><a class=\"{class}\" href=\"{href}\"
                                   title='{title_type} {title}'>{name}</a>{unsafety_flag}</td>
                            <td class='docblock-short'>
                                {stab_docs} {docs}
@@ -1878,7 +1904,7 @@ fn short_stability(item: &clean::Item, cx: &Context, show_reason: bool) -> Vec<S
                                             &cx.shared.issue_tracker_base_url,
                                             stab.issue) {
                     (true, &Some(ref tracker_url), Some(issue_no)) if issue_no > 0 =>
-                        format!(" (<code>{}</code> <a href=\"{}{}\">#{}</a>)",
+                        format!(" (<code>{} </code><a href=\"{}{}\">#{}</a>)",
                                 Escape(&stab.feature), tracker_url, issue_no, issue_no),
                     (false, &Some(ref tracker_url), Some(issue_no)) if issue_no > 0 =>
                         format!(" (<a href=\"{}{}\">#{}</a>)", Escape(&tracker_url), issue_no,
@@ -1890,12 +1916,12 @@ fn short_stability(item: &clean::Item, cx: &Context, show_reason: bool) -> Vec<S
                 if stab.unstable_reason.is_empty() {
                     stability.push(format!("<div class='stab unstable'>\
                                             <span class=microscope>🔬</span> \
-                                            This is a nightly-only experimental API. &nbsp;{}\
+                                            This is a nightly-only experimental API. {}\
                                             </div>",
-                                   unstable_extra));
+                                           unstable_extra));
                 } else {
                     let text = format!("<summary><span class=microscope>🔬</span> \
-                                        This is a nightly-only experimental API. &nbsp;{}\
+                                        This is a nightly-only experimental API. {}\
                                         </summary>{}",
                                        unstable_extra, MarkdownHtml(&stab.unstable_reason));
                     stability.push(format!("<div class='stab unstable'><details>{}</details></div>",
@@ -2215,16 +2241,12 @@ fn naive_assoc_href(it: &clean::Item, link: AssocItemLink) -> String {
 fn assoc_const(w: &mut fmt::Formatter,
                it: &clean::Item,
                ty: &clean::Type,
-               default: Option<&String>,
+               _default: Option<&String>,
                link: AssocItemLink) -> fmt::Result {
-    write!(w, "const <a href='{}' class='constant'>{}</a>",
+    write!(w, "const <a href='{}' class=\"constant\"><b>{}</b></a>: {}",
            naive_assoc_href(it, link),
-           it.name.as_ref().unwrap())?;
-
-    write!(w, ": {}", ty)?;
-    if let Some(default) = default {
-        write!(w, " = {}", Escape(default))?;
-    }
+           it.name.as_ref().unwrap(),
+           ty)?;
     Ok(())
 }
 
@@ -2232,7 +2254,7 @@ fn assoc_type(w: &mut fmt::Formatter, it: &clean::Item,
               bounds: &Vec<clean::TyParamBound>,
               default: Option<&clean::Type>,
               link: AssocItemLink) -> fmt::Result {
-    write!(w, "type <a href='{}' class='type'>{}</a>",
+    write!(w, "type <a href='{}' class=\"type\">{}</a>",
            naive_assoc_href(it, link),
            it.name.as_ref().unwrap())?;
     if !bounds.is_empty() {
@@ -2375,7 +2397,7 @@ fn item_struct(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item,
                 let ns_id = derive_id(format!("{}.{}",
                                               field.name.as_ref().unwrap(),
                                               ItemType::StructField.name_space()));
-                write!(w, "<span id='{id}' class='{item_type}'>
+                write!(w, "<span id='{id}' class=\"{item_type}\">
                            <span id='{ns_id}' class='invisible'>
                            <code>{name}: {ty}</code>
                            </span></span>",
@@ -2417,7 +2439,7 @@ fn item_union(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item,
     if fields.peek().is_some() {
         write!(w, "<h2 class='fields'>Fields</h2>")?;
         for (field, ty) in fields {
-            write!(w, "<span id='{shortty}.{name}' class='{shortty}'><code>{name}: {ty}</code>
+            write!(w, "<span id='{shortty}.{name}' class=\"{shortty}\"><code>{name}: {ty}</code>
                        </span>",
                    shortty = ItemType::StructField,
                    name = field.name.as_ref().unwrap(),
@@ -2902,7 +2924,7 @@ fn doc_impl_item(w: &mut fmt::Formatter, cx: &Context, item: &clean::Item,
                 if render_method_item {
                     let id = derive_id(format!("{}.{}", item_type, name));
                     let ns_id = derive_id(format!("{}.{}", name, item_type.name_space()));
-                    write!(w, "<h4 id='{}' class='{}'>", id, item_type)?;
+                    write!(w, "<h4 id='{}' class=\"{}\">", id, item_type)?;
                     write!(w, "<span id='{}' class='invisible'>", ns_id)?;
                     write!(w, "<code>")?;
                     render_assoc_item(w, item, link.anchor(&id), ItemType::Impl)?;
@@ -2914,7 +2936,7 @@ fn doc_impl_item(w: &mut fmt::Formatter, cx: &Context, item: &clean::Item,
             clean::TypedefItem(ref tydef, _) => {
                 let id = derive_id(format!("{}.{}", ItemType::AssociatedType, name));
                 let ns_id = derive_id(format!("{}.{}", name, item_type.name_space()));
-                write!(w, "<h4 id='{}' class='{}'>", id, item_type)?;
+                write!(w, "<h4 id='{}' class=\"{}\">", id, item_type)?;
                 write!(w, "<span id='{}' class='invisible'><code>", ns_id)?;
                 assoc_type(w, item, &Vec::new(), Some(&tydef.type_), link.anchor(&id))?;
                 write!(w, "</code></span></h4>\n")?;
@@ -2922,7 +2944,7 @@ fn doc_impl_item(w: &mut fmt::Formatter, cx: &Context, item: &clean::Item,
             clean::AssociatedConstItem(ref ty, ref default) => {
                 let id = derive_id(format!("{}.{}", item_type, name));
                 let ns_id = derive_id(format!("{}.{}", name, item_type.name_space()));
-                write!(w, "<h4 id='{}' class='{}'>", id, item_type)?;
+                write!(w, "<h4 id='{}' class=\"{}\">", id, item_type)?;
                 write!(w, "<span id='{}' class='invisible'><code>", ns_id)?;
                 assoc_const(w, item, ty, default.as_ref(), link.anchor(&id))?;
                 write!(w, "</code></span></h4>\n")?;
@@ -2930,7 +2952,7 @@ fn doc_impl_item(w: &mut fmt::Formatter, cx: &Context, item: &clean::Item,
             clean::ConstantItem(ref c) => {
                 let id = derive_id(format!("{}.{}", item_type, name));
                 let ns_id = derive_id(format!("{}.{}", name, item_type.name_space()));
-                write!(w, "<h4 id='{}' class='{}'>", id, item_type)?;
+                write!(w, "<h4 id='{}' class=\"{}\">", id, item_type)?;
                 write!(w, "<span id='{}' class='invisible'><code>", ns_id)?;
                 assoc_const(w, item, &c.type_, Some(&c.expr), link.anchor(&id))?;
                 write!(w, "</code></span></h4>\n")?;
@@ -2938,7 +2960,7 @@ fn doc_impl_item(w: &mut fmt::Formatter, cx: &Context, item: &clean::Item,
             clean::AssociatedTypeItem(ref bounds, ref default) => {
                 let id = derive_id(format!("{}.{}", item_type, name));
                 let ns_id = derive_id(format!("{}.{}", name, item_type.name_space()));
-                write!(w, "<h4 id='{}' class='{}'>", id, item_type)?;
+                write!(w, "<h4 id='{}' class=\"{}\">", id, item_type)?;
                 write!(w, "<span id='{}' class='invisible'><code>", ns_id)?;
                 assoc_type(w, item, bounds, default.as_ref(), link.anchor(&id))?;
                 write!(w, "</code></span></h4>\n")?;
@@ -2956,7 +2978,7 @@ fn doc_impl_item(w: &mut fmt::Formatter, cx: &Context, item: &clean::Item,
                         // We need the stability of the item from the trait
                         // because impls can't have a stability.
                         document_stability(w, cx, it)?;
-                        if item.doc_value().is_some() {
+                        if get_doc_value(item).is_some() {
                             document_full(w, item)?;
                         } else {
                             // In case the item isn't documented,
index c12e1e7d6080db8739e07db05e9744fd57aa745b..200285862276a5f8e63ea479e02fde2268a9538c 100644 (file)
             .html("[<span class='inner'></span>]");
         toggle.children(".inner").text(labelForToggleButton(false));
 
-        $(".method").each(function() {
+        $(".method, .impl-items > .associatedconstant").each(function() {
             if ($(this).next().is(".docblock") ||
                 ($(this).next().is(".stability") && $(this).next().next().is(".docblock"))) {
                     $(this).children().last().after(toggle.clone());
index 681d2354056f67b7257a71c933cd978f05decd29..b0bf69b0181f22ae2de2705e1a333788bda169d5 100644 (file)
@@ -89,7 +89,7 @@ h2 {
 h3 {
        font-size: 1.3em;
 }
-h1, h2, h3:not(.impl):not(.method):not(.type):not(.tymethod), h4:not(.method):not(.type):not(.tymethod) {
+h1, h2, h3:not(.impl):not(.method):not(.type):not(.tymethod), h4:not(.method):not(.type):not(.tymethod):not(.associatedconstant) {
        font-weight: 500;
        margin: 20px 0 15px 0;
        padding-bottom: 6px;
@@ -99,10 +99,10 @@ h1.fqn {
        margin-top: 0;
        position: relative;
 }
-h2, h3:not(.impl):not(.method):not(.type):not(.tymethod), h4:not(.method):not(.type):not(.tymethod) {
+h2, h3:not(.impl):not(.method):not(.type):not(.tymethod), h4:not(.method):not(.type):not(.tymethod):not(.associatedconstant) {
        border-bottom: 1px solid;
 }
-h3.impl, h3.method, h4.method, h3.type, h4.type {
+h3.impl, h3.method, h4.method, h3.type, h4.type, h4.associatedconstant {
        font-weight: 600;
        margin-top: 10px;
        margin-bottom: 10px;
@@ -382,7 +382,7 @@ h4 > code, h3 > code, .invisible > code {
 .content .impl-items .docblock, .content .impl-items .stability {
        margin-left: 40px;
 }
-.content .impl-items .method, .content .impl-items > .type {
+.content .impl-items .method, .content .impl-items > .type, .impl-items > .associatedconstant {
        margin-left: 20px;
 }
 
index 64f37925a98e283c797fdc32de2dee5a03f15cf6..236d9f230b5d470718467b351ed6ce75d16fafa5 100644 (file)
@@ -211,7 +211,7 @@ pub fn visit_mod_contents(&mut self, span: Span, attrs: hir::HirVec<ast::Attribu
                     };
 
                     // FIXME(jseyfried) merge with `self.visit_macro()`
-                    let matchers = def.body.chunks(4).map(|arm| arm[0].get_span()).collect();
+                    let matchers = def.body.chunks(4).map(|arm| arm[0].span()).collect();
                     om.macros.push(Macro {
                         def_id: def_id,
                         attrs: def.attrs.clone().into(),
@@ -521,7 +521,7 @@ pub fn visit_item(&mut self, item: &hir::Item,
     // convert each exported_macro into a doc item
     fn visit_local_macro(&self, def: &hir::MacroDef) -> Macro {
         // Extract the spans of all matchers. They represent the "interface" of the macro.
-        let matchers = def.body.chunks(4).map(|arm| arm[0].get_span()).collect();
+        let matchers = def.body.chunks(4).map(|arm| arm[0].span()).collect();
 
         Macro {
             def_id: self.cx.tcx.hir.local_def_id(def.id),
index dc3855367ae271604b1d13935ca5cab8dc092b55..bc678fcb8385b0a9a640afdc285fe8de2eebf8e3 100644 (file)
@@ -455,6 +455,20 @@ fn from(_: NulError) -> io::Error {
     }
 }
 
+#[stable(feature = "frombyteswithnulerror_impls", since = "1.17.0")]
+impl Error for FromBytesWithNulError {
+    fn description(&self) -> &str {
+        "data provided is not null terminated or contains an interior nul byte"
+    }
+}
+
+#[stable(feature = "frombyteswithnulerror_impls", since = "1.17.0")]
+impl fmt::Display for FromBytesWithNulError {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        self.description().fmt(f)
+    }
+}
+
 impl IntoStringError {
     /// Consumes this error, returning original `CString` which generated the
     /// error.
index 4ff35738b50fbd085657154c517563fbab6e991c..f846ef3e69e093f24f0cb88422f62575c7bd327e 100644 (file)
 //!
 //! assert!(ecode.success());
 //! ```
+//!
+//! Calling a command with input and reading its output:
+//!
+//! ```no_run
+//! use std::process::{Command, Stdio};
+//! use std::io::Write;
+//!
+//! let mut child = Command::new("/bin/cat")
+//!     .stdin(Stdio::piped())
+//!     .stdout(Stdio::piped())
+//!     .spawn()
+//!     .expect("failed to execute child");
+//!
+//! {
+//!     // limited borrow of stdin
+//!     let stdin = child.stdin.as_mut().expect("failed to get stdin");
+//!     stdin.write_all(b"test").expect("failed to write to stdin");
+//! }
+//!
+//! let output = child
+//!     .wait_with_output()
+//!     .expect("failed to wait on child");
+//!
+//! assert_eq!(b"test", output.stdout.as_slice());
+//! ```
 
 #![stable(feature = "process", since = "1.0.0")]
 
index 7ee1c98565cfd5525fd3afba2cdfba2e4743ac69..68c7e88f67fc56218f5f8fd557fc2a088ce1fe46 100644 (file)
 
 /// A type indicating whether a timed wait on a condition variable returned
 /// due to a time out or not.
+///
+/// It is returned by the [`wait_timeout`] method.
+///
+/// [`wait_timeout`]: struct.Condvar.html#method.wait_timeout
 #[derive(Debug, PartialEq, Eq, Copy, Clone)]
 #[stable(feature = "wait_timeout", since = "1.5.0")]
 pub struct WaitTimeoutResult(bool);
 
 impl WaitTimeoutResult {
     /// Returns whether the wait was known to have timed out.
+    ///
+    /// # Examples
+    ///
+    /// This example spawns a thread which will update the boolean value and
+    /// then wait 100 milliseconds before notifying the condvar.
+    ///
+    /// The main thread will wait with a timeout on the condvar and then leave
+    /// once the boolean has been updated and notified.
+    ///
+    /// ```
+    /// use std::sync::{Arc, Mutex, Condvar};
+    /// use std::thread;
+    /// use std::time::Duration;
+    ///
+    /// let pair = Arc::new((Mutex::new(false), Condvar::new()));
+    /// let pair2 = pair.clone();
+    ///
+    /// thread::spawn(move|| {
+    ///     let &(ref lock, ref cvar) = &*pair2;
+    ///     let mut started = lock.lock().unwrap();
+    ///     // We update the boolean value.
+    ///     *started = true;
+    ///     // Let's wait 20 milliseconds before notifying the condvar.
+    ///     thread::sleep(Duration::from_millis(20));
+    ///     cvar.notify_one();
+    /// });
+    ///
+    /// // Wait for the thread to start up.
+    /// let &(ref lock, ref cvar) = &*pair;
+    /// let mut started = lock.lock().unwrap();
+    /// loop {
+    ///     // Let's put a timeout on the condvar's wait.
+    ///     let result = cvar.wait_timeout(started, Duration::from_millis(10)).unwrap();
+    ///     // 10 milliseconds have passed, or maybe the value changed!
+    ///     started = result.0;
+    ///     if *started == true {
+    ///         // We received the notification and the value has been updated, we can leave.
+    ///         break
+    ///     }
+    /// }
+    /// ```
     #[stable(feature = "wait_timeout", since = "1.5.0")]
     pub fn timed_out(&self) -> bool {
         self.0
@@ -55,15 +100,16 @@ pub fn timed_out(&self) -> bool {
 /// let pair = Arc::new((Mutex::new(false), Condvar::new()));
 /// let pair2 = pair.clone();
 ///
-/// // Inside of our lock, spawn a new thread, and then wait for it to start
+/// // Inside of our lock, spawn a new thread, and then wait for it to start.
 /// thread::spawn(move|| {
 ///     let &(ref lock, ref cvar) = &*pair2;
 ///     let mut started = lock.lock().unwrap();
 ///     *started = true;
+///     // We notify the condvar that the value has changed.
 ///     cvar.notify_one();
 /// });
 ///
-/// // wait for the thread to start up
+/// // Wait for the thread to start up.
 /// let &(ref lock, ref cvar) = &*pair;
 /// let mut started = lock.lock().unwrap();
 /// while !*started {
@@ -79,6 +125,14 @@ pub struct Condvar {
 impl Condvar {
     /// Creates a new condition variable which is ready to be waited on and
     /// notified.
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::sync::Condvar;
+    ///
+    /// let condvar = Condvar::new();
+    /// ```
     #[stable(feature = "rust1", since = "1.0.0")]
     pub fn new() -> Condvar {
         let mut c = Condvar {
@@ -95,10 +149,10 @@ pub fn new() -> Condvar {
     /// notification.
     ///
     /// This function will atomically unlock the mutex specified (represented by
-    /// `mutex_guard`) and block the current thread. This means that any calls
-    /// to `notify_*()` which happen logically after the mutex is unlocked are
-    /// candidates to wake this thread up. When this function call returns, the
-    /// lock specified will have been re-acquired.
+    /// `guard`) and block the current thread. This means that any calls
+    /// to [`notify_one()`] or [`notify_all()`] which happen logically after the
+    /// mutex is unlocked are candidates to wake this thread up. When this
+    /// function call returns, the lock specified will have been re-acquired.
     ///
     /// Note that this function is susceptible to spurious wakeups. Condition
     /// variables normally have a boolean predicate associated with them, and
@@ -109,14 +163,46 @@ pub fn new() -> Condvar {
     ///
     /// This function will return an error if the mutex being waited on is
     /// poisoned when this thread re-acquires the lock. For more information,
-    /// see information about poisoning on the Mutex type.
+    /// see information about [poisoning] on the [`Mutex`] type.
     ///
     /// # Panics
     ///
-    /// This function will `panic!()` if it is used with more than one mutex
+    /// This function will [`panic!()`] if it is used with more than one mutex
     /// over time. Each condition variable is dynamically bound to exactly one
     /// mutex to ensure defined behavior across platforms. If this functionality
     /// is not desired, then unsafe primitives in `sys` are provided.
+    ///
+    /// [`notify_one()`]: #method.notify_one
+    /// [`notify_all()`]: #method.notify_all
+    /// [poisoning]: ../sync/struct.Mutex.html#poisoning
+    /// [`Mutex`]: ../sync/struct.Mutex.html
+    /// [`panic!()`]: ../../std/macro.panic.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::sync::{Arc, Mutex, Condvar};
+    /// use std::thread;
+    ///
+    /// let pair = Arc::new((Mutex::new(false), Condvar::new()));
+    /// let pair2 = pair.clone();
+    ///
+    /// thread::spawn(move|| {
+    ///     let &(ref lock, ref cvar) = &*pair2;
+    ///     let mut started = lock.lock().unwrap();
+    ///     *started = true;
+    ///     // We notify the condvar that the value has changed.
+    ///     cvar.notify_one();
+    /// });
+    ///
+    /// // Wait for the thread to start up.
+    /// let &(ref lock, ref cvar) = &*pair;
+    /// let mut started = lock.lock().unwrap();
+    /// // As long as the value inside the `Mutex` is false, we wait.
+    /// while !*started {
+    ///     started = cvar.wait(started).unwrap();
+    /// }
+    /// ```
     #[stable(feature = "rust1", since = "1.0.0")]
     pub fn wait<'a, T>(&self, guard: MutexGuard<'a, T>)
                        -> LockResult<MutexGuard<'a, T>> {
@@ -136,7 +222,7 @@ pub fn wait<'a, T>(&self, guard: MutexGuard<'a, T>)
     /// Waits on this condition variable for a notification, timing out after a
     /// specified duration.
     ///
-    /// The semantics of this function are equivalent to `wait()`
+    /// The semantics of this function are equivalent to [`wait`]
     /// except that the thread will be blocked for roughly no longer
     /// than `ms` milliseconds. This method should not be used for
     /// precise timing due to anomalies such as preemption or platform
@@ -150,8 +236,42 @@ pub fn wait<'a, T>(&self, guard: MutexGuard<'a, T>)
     /// The returned boolean is `false` only if the timeout is known
     /// to have elapsed.
     ///
-    /// Like `wait`, the lock specified will be re-acquired when this function
+    /// Like [`wait`], the lock specified will be re-acquired when this function
     /// returns, regardless of whether the timeout elapsed or not.
+    ///
+    /// [`wait`]: #method.wait
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::sync::{Arc, Mutex, Condvar};
+    /// use std::thread;
+    ///
+    /// let pair = Arc::new((Mutex::new(false), Condvar::new()));
+    /// let pair2 = pair.clone();
+    ///
+    /// thread::spawn(move|| {
+    ///     let &(ref lock, ref cvar) = &*pair2;
+    ///     let mut started = lock.lock().unwrap();
+    ///     *started = true;
+    ///     // We notify the condvar that the value has changed.
+    ///     cvar.notify_one();
+    /// });
+    ///
+    /// // Wait for the thread to start up.
+    /// let &(ref lock, ref cvar) = &*pair;
+    /// let mut started = lock.lock().unwrap();
+    /// // As long as the value inside the `Mutex` is false, we wait.
+    /// loop {
+    ///     let result = cvar.wait_timeout_ms(started, 10).unwrap();
+    ///     // 10 milliseconds have passed, or maybe the value changed!
+    ///     started = result.0;
+    ///     if *started == true {
+    ///         // We received the notification and the value has been updated, we can leave.
+    ///         break
+    ///     }
+    /// }
+    /// ```
     #[stable(feature = "rust1", since = "1.0.0")]
     #[rustc_deprecated(since = "1.6.0", reason = "replaced by `std::sync::Condvar::wait_timeout`")]
     pub fn wait_timeout_ms<'a, T>(&self, guard: MutexGuard<'a, T>, ms: u32)
@@ -165,7 +285,7 @@ pub fn wait_timeout_ms<'a, T>(&self, guard: MutexGuard<'a, T>, ms: u32)
     /// Waits on this condition variable for a notification, timing out after a
     /// specified duration.
     ///
-    /// The semantics of this function are equivalent to `wait()` except that
+    /// The semantics of this function are equivalent to [`wait`] except that
     /// the thread will be blocked for roughly no longer than `dur`. This
     /// method should not be used for precise timing due to anomalies such as
     /// preemption or platform differences that may not cause the maximum
@@ -175,11 +295,47 @@ pub fn wait_timeout_ms<'a, T>(&self, guard: MutexGuard<'a, T>, ms: u32)
     /// measured with a monotonic clock, and not affected by the changes made to
     /// the system time.
     ///
-    /// The returned `WaitTimeoutResult` value indicates if the timeout is
+    /// The returned [`WaitTimeoutResult`] value indicates if the timeout is
     /// known to have elapsed.
     ///
-    /// Like `wait`, the lock specified will be re-acquired when this function
+    /// Like [`wait`], the lock specified will be re-acquired when this function
     /// returns, regardless of whether the timeout elapsed or not.
+    ///
+    /// [`wait`]: #method.wait
+    /// [`WaitTimeoutResult`]: struct.WaitTimeoutResult.html
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::sync::{Arc, Mutex, Condvar};
+    /// use std::thread;
+    /// use std::time::Duration;
+    ///
+    /// let pair = Arc::new((Mutex::new(false), Condvar::new()));
+    /// let pair2 = pair.clone();
+    ///
+    /// thread::spawn(move|| {
+    ///     let &(ref lock, ref cvar) = &*pair2;
+    ///     let mut started = lock.lock().unwrap();
+    ///     *started = true;
+    ///     // We notify the condvar that the value has changed.
+    ///     cvar.notify_one();
+    /// });
+    ///
+    /// // wait for the thread to start up
+    /// let &(ref lock, ref cvar) = &*pair;
+    /// let mut started = lock.lock().unwrap();
+    /// // as long as the value inside the `Mutex` is false, we wait
+    /// loop {
+    ///     let result = cvar.wait_timeout(started, Duration::from_millis(10)).unwrap();
+    ///     // 10 milliseconds have passed, or maybe the value changed!
+    ///     started = result.0;
+    ///     if *started == true {
+    ///         // We received the notification and the value has been updated, we can leave.
+    ///         break
+    ///     }
+    /// }
+    /// ```
     #[stable(feature = "wait_timeout", since = "1.5.0")]
     pub fn wait_timeout<'a, T>(&self, guard: MutexGuard<'a, T>,
                                dur: Duration)
@@ -200,10 +356,40 @@ pub fn wait_timeout<'a, T>(&self, guard: MutexGuard<'a, T>,
     /// Wakes up one blocked thread on this condvar.
     ///
     /// If there is a blocked thread on this condition variable, then it will
-    /// be woken up from its call to `wait` or `wait_timeout`. Calls to
+    /// be woken up from its call to [`wait`] or [`wait_timeout`]. Calls to
     /// `notify_one` are not buffered in any way.
     ///
-    /// To wake up all threads, see `notify_all()`.
+    /// To wake up all threads, see [`notify_all()`].
+    ///
+    /// [`wait`]: #method.wait
+    /// [`wait_timeout`]: #method.wait_timeout
+    /// [`notify_all()`]: #method.notify_all
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::sync::{Arc, Mutex, Condvar};
+    /// use std::thread;
+    ///
+    /// let pair = Arc::new((Mutex::new(false), Condvar::new()));
+    /// let pair2 = pair.clone();
+    ///
+    /// thread::spawn(move|| {
+    ///     let &(ref lock, ref cvar) = &*pair2;
+    ///     let mut started = lock.lock().unwrap();
+    ///     *started = true;
+    ///     // We notify the condvar that the value has changed.
+    ///     cvar.notify_one();
+    /// });
+    ///
+    /// // Wait for the thread to start up.
+    /// let &(ref lock, ref cvar) = &*pair;
+    /// let mut started = lock.lock().unwrap();
+    /// // As long as the value inside the `Mutex` is false, we wait.
+    /// while !*started {
+    ///     started = cvar.wait(started).unwrap();
+    /// }
+    /// ```
     #[stable(feature = "rust1", since = "1.0.0")]
     pub fn notify_one(&self) {
         unsafe { self.inner.notify_one() }
@@ -215,7 +401,35 @@ pub fn notify_one(&self) {
     /// variable are awoken. Calls to `notify_all()` are not buffered in any
     /// way.
     ///
-    /// To wake up only one thread, see `notify_one()`.
+    /// To wake up only one thread, see [`notify_one()`].
+    ///
+    /// [`notify_one()`]: #method.notify_one
+    ///
+    /// # Examples
+    ///
+    /// ```
+    /// use std::sync::{Arc, Mutex, Condvar};
+    /// use std::thread;
+    ///
+    /// let pair = Arc::new((Mutex::new(false), Condvar::new()));
+    /// let pair2 = pair.clone();
+    ///
+    /// thread::spawn(move|| {
+    ///     let &(ref lock, ref cvar) = &*pair2;
+    ///     let mut started = lock.lock().unwrap();
+    ///     *started = true;
+    ///     // We notify the condvar that the value has changed.
+    ///     cvar.notify_all();
+    /// });
+    ///
+    /// // Wait for the thread to start up.
+    /// let &(ref lock, ref cvar) = &*pair;
+    /// let mut started = lock.lock().unwrap();
+    /// // As long as the value inside the `Mutex` is false, we wait.
+    /// while !*started {
+    ///     started = cvar.wait(started).unwrap();
+    /// }
+    /// ```
     #[stable(feature = "rust1", since = "1.0.0")]
     pub fn notify_all(&self) {
         unsafe { self.inner.notify_all() }
index bdc727f1dfcfe0f20536ce50054f7ec5e6cff3e7..d9d13240fcc3acc9a7ec9e4c27138182152cc215 100644 (file)
@@ -60,10 +60,13 @@ pub struct Guard {
 
 /// A type of error which can be returned whenever a lock is acquired.
 ///
-/// Both Mutexes and RwLocks are poisoned whenever a thread fails while the lock
+/// Both [`Mutex`]es and [`RwLock`]s are poisoned whenever a thread fails while the lock
 /// is held. The precise semantics for when a lock is poisoned is documented on
 /// each lock, but once a lock is poisoned then all future acquisitions will
 /// return this error.
+///
+/// [`Mutex`]: ../../std/sync/struct.Mutex.html
+/// [`RwLock`]: ../../std/sync/struct.RwLock.html
 #[stable(feature = "rust1", since = "1.0.0")]
 pub struct PoisonError<T> {
     guard: T,
@@ -85,19 +88,26 @@ pub enum TryLockError<T> {
 
 /// A type alias for the result of a lock method which can be poisoned.
 ///
-/// The `Ok` variant of this result indicates that the primitive was not
-/// poisoned, and the `Guard` is contained within. The `Err` variant indicates
-/// that the primitive was poisoned. Note that the `Err` variant *also* carries
-/// the associated guard, and it can be acquired through the `into_inner`
+/// The [`Ok`] variant of this result indicates that the primitive was not
+/// poisoned, and the `Guard` is contained within. The [`Err`] variant indicates
+/// that the primitive was poisoned. Note that the [`Err`] variant *also* carries
+/// the associated guard, and it can be acquired through the [`into_inner`]
 /// method.
+///
+/// [`Ok`]: ../../std/result/enum.Result.html#variant.Ok
+/// [`Err`]: ../../std/result/enum.Result.html#variant.Err
+/// [`into_inner`]: ../../std/sync/struct.Mutex.html#method.into_inner
 #[stable(feature = "rust1", since = "1.0.0")]
 pub type LockResult<Guard> = Result<Guard, PoisonError<Guard>>;
 
 /// A type alias for the result of a nonblocking locking method.
 ///
-/// For more information, see `LockResult`. A `TryLockResult` doesn't
-/// necessarily hold the associated guard in the `Err` type as the lock may not
+/// For more information, see [`LockResult`]. A `TryLockResult` doesn't
+/// necessarily hold the associated guard in the [`Err`] type as the lock may not
 /// have been acquired for other reasons.
+///
+/// [`LockResult`]: ../../std/sync/type.LockResult.html
+/// [`Err`]: ../../std/result/enum.Result.html#variant.Err
 #[stable(feature = "rust1", since = "1.0.0")]
 pub type TryLockResult<Guard> = Result<Guard, TryLockError<Guard>>;
 
@@ -124,6 +134,11 @@ fn description(&self) -> &str {
 
 impl<T> PoisonError<T> {
     /// Creates a `PoisonError`.
+    ///
+    /// This is generally created by methods like [`Mutex::lock`] or [`RwLock::read`].
+    ///
+    /// [`Mutex::lock`]: ../../std/sync/struct.Mutex.html#method.lock
+    /// [`RwLock::read`]: ../../std/sync/struct.RwLock.html#method.read
     #[stable(feature = "sync_poison", since = "1.2.0")]
     pub fn new(guard: T) -> PoisonError<T> {
         PoisonError { guard: guard }
index 6c46f90f3d4b95abdd791772314e5c8155218d8c..b1b69c80f4d0016c790c4554d0df6cc73350fd7b 100644 (file)
 use ext::base;
 use ext::build::AstBuilder;
 use parse::parser::{Parser, PathStyle};
-use parse::token::*;
 use parse::token;
 use ptr::P;
-use tokenstream::{self, TokenTree};
+use tokenstream::TokenTree;
 
 
 /// Quasiquoting works via token trees.
@@ -356,14 +355,35 @@ fn parse_expr(&self, s: String) -> P<ast::Expr> {
         }
 
         fn parse_tts(&self, s: String) -> Vec<TokenTree> {
-            panictry!(parse::parse_tts_from_source_str(
-                "<quote expansion>".to_string(),
-                s,
-                self.parse_sess()))
+            parse::parse_tts_from_source_str("<quote expansion>".to_string(), s, self.parse_sess())
         }
     }
 }
 
+// Replaces `Token::OpenDelim .. Token::CloseDelim` with `TokenTree::Delimited(..)`.
+pub fn unflatten(tts: Vec<TokenTree>) -> Vec<TokenTree> {
+    use std::rc::Rc;
+    use tokenstream::Delimited;
+
+    let mut results = Vec::new();
+    let mut result = Vec::new();
+    for tree in tts {
+        match tree {
+            TokenTree::Token(_, token::OpenDelim(..)) => {
+                results.push(::std::mem::replace(&mut result, Vec::new()));
+            }
+            TokenTree::Token(span, token::CloseDelim(delim)) => {
+                let tree =
+                    TokenTree::Delimited(span, Rc::new(Delimited { delim: delim, tts: result }));
+                result = results.pop().unwrap();
+                result.push(tree);
+            }
+            tree @ _ => result.push(tree),
+        }
+    }
+    result
+}
+
 // These panicking parsing functions are used by the quote_*!() syntax extensions,
 // but shouldn't be used otherwise.
 pub fn parse_expr_panic(parser: &mut Parser) -> P<Expr> {
@@ -510,20 +530,6 @@ pub fn expand_quote_path(cx: &mut ExtCtxt,
     base::MacEager::expr(expanded)
 }
 
-pub fn expand_quote_matcher(cx: &mut ExtCtxt,
-                            sp: Span,
-                            tts: &[TokenTree])
-                            -> Box<base::MacResult+'static> {
-    let (cx_expr, tts) = parse_arguments_to_quote(cx, tts);
-    let mut vector = mk_stmts_let(cx, sp);
-    vector.extend(statements_mk_tts(cx, &tts[..], true));
-    vector.push(cx.stmt_expr(cx.expr_ident(sp, id_ext("tt"))));
-    let block = cx.expr_block(cx.block(sp, vector));
-
-    let expanded = expand_wrapper(cx, sp, cx_expr, block, &[&["syntax", "ext", "quote", "rt"]]);
-    base::MacEager::expr(expanded)
-}
-
 fn ids_ext(strs: Vec<String>) -> Vec<ast::Ident> {
     strs.iter().map(|s| ast::Ident::from_str(s)).collect()
 }
@@ -669,12 +675,6 @@ macro_rules! mk_lit {
                                 vec![mk_name(cx, sp, ast::Ident::with_empty_ctxt(ident))]);
         }
 
-        token::MatchNt(name, kind) => {
-            return cx.expr_call(sp,
-                                mk_token_path(cx, sp, "MatchNt"),
-                                vec![mk_ident(cx, sp, name), mk_ident(cx, sp, kind)]);
-        }
-
         token::Interpolated(_) => panic!("quote! with interpolated token"),
 
         _ => ()
@@ -712,9 +712,9 @@ macro_rules! mk_lit {
     mk_token_path(cx, sp, name)
 }
 
-fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, matcher: bool) -> Vec<ast::Stmt> {
+fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, quoted: bool) -> Vec<ast::Stmt> {
     match *tt {
-        TokenTree::Token(sp, SubstNt(ident)) => {
+        TokenTree::Token(sp, token::Ident(ident)) if quoted => {
             // tt.extend($ident.to_tokens(ext_cx))
 
             let e_to_toks =
@@ -733,13 +733,6 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, matcher: bool) -> Vec<ast::Stm
 
             vec![cx.stmt_expr(e_push)]
         }
-        ref tt @ TokenTree::Token(_, MatchNt(..)) if !matcher => {
-            let mut seq = vec![];
-            for i in 0..tt.len() {
-                seq.push(tt.get_tt(i));
-            }
-            statements_mk_tts(cx, &seq[..], matcher)
-        }
         TokenTree::Token(sp, ref tok) => {
             let e_sp = cx.expr_ident(sp, id_ext("_sp"));
             let e_tok = cx.expr_call(sp,
@@ -753,77 +746,17 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, matcher: bool) -> Vec<ast::Stm
             vec![cx.stmt_expr(e_push)]
         },
         TokenTree::Delimited(span, ref delimed) => {
-            statements_mk_tt(cx, &delimed.open_tt(span), matcher).into_iter()
-                .chain(delimed.tts.iter()
-                                  .flat_map(|tt| statements_mk_tt(cx, tt, matcher)))
-                .chain(statements_mk_tt(cx, &delimed.close_tt(span), matcher))
-                .collect()
-        },
-        TokenTree::Sequence(sp, ref seq) => {
-            if !matcher {
-                panic!("TokenTree::Sequence in quote!");
-            }
-
-            let e_sp = cx.expr_ident(sp, id_ext("_sp"));
-
-            let stmt_let_tt = cx.stmt_let(sp, true, id_ext("tt"), cx.expr_vec_ng(sp));
-            let mut tts_stmts = vec![stmt_let_tt];
-            tts_stmts.extend(statements_mk_tts(cx, &seq.tts[..], matcher));
-            tts_stmts.push(cx.stmt_expr(cx.expr_ident(sp, id_ext("tt"))));
-            let e_tts = cx.expr_block(cx.block(sp, tts_stmts));
-
-            let e_separator = match seq.separator {
-                Some(ref sep) => cx.expr_some(sp, expr_mk_token(cx, sp, sep)),
-                None => cx.expr_none(sp),
-            };
-            let e_op = match seq.op {
-                tokenstream::KleeneOp::ZeroOrMore => "ZeroOrMore",
-                tokenstream::KleeneOp::OneOrMore => "OneOrMore",
-            };
-            let e_op_idents = vec![
-                id_ext("syntax"),
-                id_ext("tokenstream"),
-                id_ext("KleeneOp"),
-                id_ext(e_op),
-            ];
-            let e_op = cx.expr_path(cx.path_global(sp, e_op_idents));
-            let fields = vec![cx.field_imm(sp, id_ext("tts"), e_tts),
-                              cx.field_imm(sp, id_ext("separator"), e_separator),
-                              cx.field_imm(sp, id_ext("op"), e_op),
-                              cx.field_imm(sp, id_ext("num_captures"),
-                                               cx.expr_usize(sp, seq.num_captures))];
-            let seq_path = vec![id_ext("syntax"),
-                                id_ext("tokenstream"),
-                                id_ext("SequenceRepetition")];
-            let e_seq_struct = cx.expr_struct(sp, cx.path_global(sp, seq_path), fields);
-            let e_rc_new = cx.expr_call_global(sp, vec![id_ext("std"),
-                                                        id_ext("rc"),
-                                                        id_ext("Rc"),
-                                                        id_ext("new")],
-                                                   vec![e_seq_struct]);
-            let e_tok = cx.expr_call(sp,
-                                     mk_tt_path(cx, sp, "Sequence"),
-                                     vec![e_sp, e_rc_new]);
-            let e_push =
-                cx.expr_method_call(sp,
-                                    cx.expr_ident(sp, id_ext("tt")),
-                                    id_ext("push"),
-                                    vec![e_tok]);
-            vec![cx.stmt_expr(e_push)]
+            let mut stmts = statements_mk_tt(cx, &delimed.open_tt(span), false);
+            stmts.extend(statements_mk_tts(cx, &delimed.tts));
+            stmts.extend(statements_mk_tt(cx, &delimed.close_tt(span), false));
+            stmts
         }
     }
 }
 
 fn parse_arguments_to_quote(cx: &ExtCtxt, tts: &[TokenTree])
                             -> (P<ast::Expr>, Vec<TokenTree>) {
-    // NB: It appears that the main parser loses its mind if we consider
-    // $foo as a SubstNt during the main parse, so we have to re-parse
-    // under quote_depth > 0. This is silly and should go away; the _guess_ is
-    // it has to do with transition away from supporting old-style macros, so
-    // try removing it when enough of them are gone.
-
     let mut p = cx.new_parser_from_tts(tts);
-    p.quote_depth += 1;
 
     let cx_expr = panictry!(p.parse_expr());
     if !p.eat(&token::Comma) {
@@ -877,24 +810,31 @@ fn mk_stmts_let(cx: &ExtCtxt, sp: Span) -> Vec<ast::Stmt> {
     vec![stmt_let_sp, stmt_let_tt]
 }
 
-fn statements_mk_tts(cx: &ExtCtxt, tts: &[TokenTree], matcher: bool) -> Vec<ast::Stmt> {
+fn statements_mk_tts(cx: &ExtCtxt, tts: &[TokenTree]) -> Vec<ast::Stmt> {
     let mut ss = Vec::new();
+    let mut quoted = false;
     for tt in tts {
-        ss.extend(statements_mk_tt(cx, tt, matcher));
+        quoted = match *tt {
+            TokenTree::Token(_, token::Dollar) if !quoted => true,
+            _ => {
+                ss.extend(statements_mk_tt(cx, tt, quoted));
+                false
+            }
+        }
     }
     ss
 }
 
-fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[TokenTree])
-              -> (P<ast::Expr>, P<ast::Expr>) {
+fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[TokenTree]) -> (P<ast::Expr>, P<ast::Expr>) {
     let (cx_expr, tts) = parse_arguments_to_quote(cx, tts);
 
     let mut vector = mk_stmts_let(cx, sp);
-    vector.extend(statements_mk_tts(cx, &tts[..], false));
+    vector.extend(statements_mk_tts(cx, &tts[..]));
     vector.push(cx.stmt_expr(cx.expr_ident(sp, id_ext("tt"))));
     let block = cx.expr_block(cx.block(sp, vector));
+    let unflatten = vec![id_ext("syntax"), id_ext("ext"), id_ext("quote"), id_ext("unflatten")];
 
-    (cx_expr, block)
+    (cx_expr, cx.expr_call_global(sp, unflatten, vec![block]))
 }
 
 fn expand_wrapper(cx: &ExtCtxt,
index 089c35c694a78e3eb9e9c79f0268a80d27a59dc2..6ab5123bc87b16b799aa6630843dfd08accd9af1 100644 (file)
 use syntax_pos::{self, BytePos, mk_sp, Span};
 use codemap::Spanned;
 use errors::FatalError;
+use ext::tt::quoted;
 use parse::{Directory, ParseSess};
 use parse::parser::{PathStyle, Parser};
-use parse::token::{DocComment, MatchNt, SubstNt};
-use parse::token::{Token, Nonterminal};
-use parse::token;
+use parse::token::{self, DocComment, Token, Nonterminal};
 use print::pprust;
-use tokenstream::{self, TokenTree};
+use symbol::keywords;
+use tokenstream::TokenTree;
 use util::small_vector::SmallVector;
 
 use std::mem;
 
 #[derive(Clone)]
 enum TokenTreeOrTokenTreeVec {
-    Tt(tokenstream::TokenTree),
-    TtSeq(Vec<tokenstream::TokenTree>),
+    Tt(quoted::TokenTree),
+    TtSeq(Vec<quoted::TokenTree>),
 }
 
 impl TokenTreeOrTokenTreeVec {
@@ -113,7 +113,7 @@ fn len(&self) -> usize {
         }
     }
 
-    fn get_tt(&self, index: usize) -> TokenTree {
+    fn get_tt(&self, index: usize) -> quoted::TokenTree {
         match *self {
             TtSeq(ref v) => v[index].clone(),
             Tt(ref tt) => tt.get_tt(index),
@@ -144,7 +144,9 @@ struct MatcherPos {
 
 pub type NamedParseResult = ParseResult<HashMap<Ident, Rc<NamedMatch>>>;
 
-pub fn count_names(ms: &[TokenTree]) -> usize {
+pub fn count_names(ms: &[quoted::TokenTree]) -> usize {
+    use self::quoted::TokenTree;
+
     ms.iter().fold(0, |count, elt| {
         count + match *elt {
             TokenTree::Sequence(_, ref seq) => {
@@ -153,7 +155,7 @@ pub fn count_names(ms: &[TokenTree]) -> usize {
             TokenTree::Delimited(_, ref delim) => {
                 count_names(&delim.tts)
             }
-            TokenTree::Token(_, MatchNt(..)) => {
+            TokenTree::MetaVarDecl(..) => {
                 1
             }
             TokenTree::Token(..) => 0,
@@ -161,7 +163,7 @@ pub fn count_names(ms: &[TokenTree]) -> usize {
     })
 }
 
-fn initial_matcher_pos(ms: Vec<TokenTree>, lo: BytePos) -> Box<MatcherPos> {
+fn initial_matcher_pos(ms: Vec<quoted::TokenTree>, lo: BytePos) -> Box<MatcherPos> {
     let match_idx_hi = count_names(&ms[..]);
     let matches = create_matches(match_idx_hi);
     Box::new(MatcherPos {
@@ -200,22 +202,30 @@ pub enum NamedMatch {
     MatchedNonterminal(Rc<Nonterminal>)
 }
 
-fn nameize<I: Iterator<Item=Rc<NamedMatch>>>(ms: &[TokenTree], mut res: I) -> NamedParseResult {
-    fn n_rec<I: Iterator<Item=Rc<NamedMatch>>>(m: &TokenTree, mut res: &mut I,
+fn nameize<I: Iterator<Item=Rc<NamedMatch>>>(sess: &ParseSess, ms: &[quoted::TokenTree], mut res: I)
+                                             -> NamedParseResult {
+    use self::quoted::TokenTree;
+
+    fn n_rec<I: Iterator<Item=Rc<NamedMatch>>>(sess: &ParseSess, m: &TokenTree, mut res: &mut I,
              ret_val: &mut HashMap<Ident, Rc<NamedMatch>>)
              -> Result<(), (syntax_pos::Span, String)> {
         match *m {
             TokenTree::Sequence(_, ref seq) => {
                 for next_m in &seq.tts {
-                    n_rec(next_m, res.by_ref(), ret_val)?
+                    n_rec(sess, next_m, res.by_ref(), ret_val)?
                 }
             }
             TokenTree::Delimited(_, ref delim) => {
                 for next_m in &delim.tts {
-                    n_rec(next_m, res.by_ref(), ret_val)?;
+                    n_rec(sess, next_m, res.by_ref(), ret_val)?;
                 }
             }
-            TokenTree::Token(sp, MatchNt(bind_name, _)) => {
+            TokenTree::MetaVarDecl(span, _, id) if id.name == keywords::Invalid.name() => {
+                if sess.missing_fragment_specifiers.borrow_mut().remove(&span) {
+                    return Err((span, "missing fragment specifier".to_string()));
+                }
+            }
+            TokenTree::MetaVarDecl(sp, bind_name, _) => {
                 match ret_val.entry(bind_name) {
                     Vacant(spot) => {
                         spot.insert(res.next().unwrap());
@@ -225,9 +235,6 @@ fn n_rec<I: Iterator<Item=Rc<NamedMatch>>>(m: &TokenTree, mut res: &mut I,
                     }
                 }
             }
-            TokenTree::Token(sp, SubstNt(..)) => {
-                return Err((sp, "missing fragment specifier".to_string()))
-            }
             TokenTree::Token(..) => (),
         }
 
@@ -236,7 +243,7 @@ fn n_rec<I: Iterator<Item=Rc<NamedMatch>>>(m: &TokenTree, mut res: &mut I,
 
     let mut ret_val = HashMap::new();
     for m in ms {
-        match n_rec(m, res.by_ref(), &mut ret_val) {
+        match n_rec(sess, m, res.by_ref(), &mut ret_val) {
             Ok(_) => {},
             Err((sp, msg)) => return Error(sp, msg),
         }
@@ -276,11 +283,15 @@ fn create_matches(len: usize) -> Vec<Vec<Rc<NamedMatch>>> {
     (0..len).into_iter().map(|_| Vec::new()).collect()
 }
 
-fn inner_parse_loop(cur_eis: &mut SmallVector<Box<MatcherPos>>,
+fn inner_parse_loop(sess: &ParseSess,
+                    cur_eis: &mut SmallVector<Box<MatcherPos>>,
                     next_eis: &mut Vec<Box<MatcherPos>>,
                     eof_eis: &mut SmallVector<Box<MatcherPos>>,
                     bb_eis: &mut SmallVector<Box<MatcherPos>>,
-                    token: &Token, span: &syntax_pos::Span) -> ParseResult<()> {
+                    token: &Token,
+                    span: &syntax_pos::Span) -> ParseResult<()> {
+    use self::quoted::TokenTree;
+
     while let Some(mut ei) = cur_eis.pop() {
         // When unzipped trees end, remove them
         while ei.idx >= ei.top_elts.len() {
@@ -346,7 +357,7 @@ fn inner_parse_loop(cur_eis: &mut SmallVector<Box<MatcherPos>>,
             match ei.top_elts.get_tt(idx) {
                 /* need to descend into sequence */
                 TokenTree::Sequence(sp, seq) => {
-                    if seq.op == tokenstream::KleeneOp::ZeroOrMore {
+                    if seq.op == quoted::KleeneOp::ZeroOrMore {
                         // Examine the case where there are 0 matches of this sequence
                         let mut new_ei = ei.clone();
                         new_ei.match_cur += seq.num_captures;
@@ -372,7 +383,12 @@ fn inner_parse_loop(cur_eis: &mut SmallVector<Box<MatcherPos>>,
                         top_elts: Tt(TokenTree::Sequence(sp, seq)),
                     }));
                 }
-                TokenTree::Token(_, MatchNt(..)) => {
+                TokenTree::MetaVarDecl(span, _, id) if id.name == keywords::Invalid.name() => {
+                    if sess.missing_fragment_specifiers.borrow_mut().remove(&span) {
+                        return Error(span, "missing fragment specifier".to_string());
+                    }
+                }
+                TokenTree::MetaVarDecl(..) => {
                     // Built-in nonterminals never start with these tokens,
                     // so we can eliminate them from consideration.
                     match *token {
@@ -380,9 +396,6 @@ fn inner_parse_loop(cur_eis: &mut SmallVector<Box<MatcherPos>>,
                         _ => bb_eis.push(ei),
                     }
                 }
-                TokenTree::Token(sp, SubstNt(..)) => {
-                    return Error(sp, "missing fragment specifier".to_string())
-                }
                 seq @ TokenTree::Delimited(..) | seq @ TokenTree::Token(_, DocComment(..)) => {
                     let lower_elts = mem::replace(&mut ei.top_elts, Tt(seq));
                     let idx = ei.idx;
@@ -406,8 +419,13 @@ fn inner_parse_loop(cur_eis: &mut SmallVector<Box<MatcherPos>>,
     Success(())
 }
 
-pub fn parse(sess: &ParseSess, tts: Vec<TokenTree>, ms: &[TokenTree], directory: Option<Directory>)
+pub fn parse(sess: &ParseSess,
+             tts: Vec<TokenTree>,
+             ms: &[quoted::TokenTree],
+             directory: Option<Directory>)
              -> NamedParseResult {
+    use self::quoted::TokenTree;
+
     let mut parser = Parser::new(sess, tts, directory, true);
     let mut cur_eis = SmallVector::one(initial_matcher_pos(ms.to_owned(), parser.span.lo));
     let mut next_eis = Vec::new(); // or proceed normally
@@ -417,7 +435,7 @@ pub fn parse(sess: &ParseSess, tts: Vec<TokenTree>, ms: &[TokenTree], directory:
         let mut eof_eis = SmallVector::new();
         assert!(next_eis.is_empty());
 
-        match inner_parse_loop(&mut cur_eis, &mut next_eis, &mut eof_eis, &mut bb_eis,
+        match inner_parse_loop(sess, &mut cur_eis, &mut next_eis, &mut eof_eis, &mut bb_eis,
                                &parser.token, &parser.span) {
             Success(_) => {},
             Failure(sp, tok) => return Failure(sp, tok),
@@ -430,7 +448,8 @@ pub fn parse(sess: &ParseSess, tts: Vec<TokenTree>, ms: &[TokenTree], directory:
         /* error messages here could be improved with links to orig. rules */
         if token_name_eq(&parser.token, &token::Eof) {
             if eof_eis.len() == 1 {
-                return nameize(ms, eof_eis[0].matches.iter_mut().map(|mut dv| dv.pop().unwrap()));
+                let matches = eof_eis[0].matches.iter_mut().map(|mut dv| dv.pop().unwrap());
+                return nameize(sess, ms, matches);
             } else if eof_eis.len() > 1 {
                 return Error(parser.span, "ambiguity: multiple successful parses".to_string());
             } else {
@@ -438,7 +457,7 @@ pub fn parse(sess: &ParseSess, tts: Vec<TokenTree>, ms: &[TokenTree], directory:
             }
         } else if (!bb_eis.is_empty() && !next_eis.is_empty()) || bb_eis.len() > 1 {
             let nts = bb_eis.iter().map(|ei| match ei.top_elts.get_tt(ei.idx) {
-                TokenTree::Token(_, MatchNt(bind, name)) => {
+                TokenTree::MetaVarDecl(_, bind, name) => {
                     format!("{} ('{}')", name, bind)
                 }
                 _ => panic!()
@@ -460,7 +479,7 @@ pub fn parse(sess: &ParseSess, tts: Vec<TokenTree>, ms: &[TokenTree], directory:
             parser.bump();
         } else /* bb_eis.len() == 1 */ {
             let mut ei = bb_eis.pop().unwrap();
-            if let TokenTree::Token(span, MatchNt(_, ident)) = ei.top_elts.get_tt(ei.idx) {
+            if let TokenTree::MetaVarDecl(span, _, ident) = ei.top_elts.get_tt(ei.idx) {
                 let match_cur = ei.match_cur;
                 ei.matches[match_cur].push(Rc::new(MatchedNonterminal(
                             Rc::new(parse_nt(&mut parser, span, &ident.name.as_str())))));
@@ -479,10 +498,7 @@ pub fn parse(sess: &ParseSess, tts: Vec<TokenTree>, ms: &[TokenTree], directory:
 fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal {
     match name {
         "tt" => {
-            p.quote_depth += 1; //but in theory, non-quoted tts might be useful
-            let tt = panictry!(p.parse_token_tree());
-            p.quote_depth -= 1;
-            return token::NtTT(tt);
+            return token::NtTT(panictry!(p.parse_token_tree()));
         }
         _ => {}
     }
index d0c1c0efea7a399e4496e8b24c9e732d5a06e6dc..193c06707c7a6860a2bb794aa01946f2fcc94442 100644 (file)
 use ext::tt::macro_parser::{Success, Error, Failure};
 use ext::tt::macro_parser::{MatchedSeq, MatchedNonterminal};
 use ext::tt::macro_parser::{parse, parse_failure_msg};
+use ext::tt::quoted;
 use ext::tt::transcribe::transcribe;
 use parse::{Directory, ParseSess};
 use parse::parser::Parser;
-use parse::token::{self, NtTT, Token};
+use parse::token::{self, NtTT};
 use parse::token::Token::*;
 use print;
 use symbol::Symbol;
-use tokenstream::{self, TokenTree};
+use tokenstream::TokenTree;
 
 use std::collections::{HashMap};
 use std::collections::hash_map::{Entry};
@@ -58,8 +59,8 @@ pub fn make(mut self: Box<ParserAnyMacro<'a>>, kind: ExpansionKind) -> Expansion
 
 struct MacroRulesMacroExpander {
     name: ast::Ident,
-    lhses: Vec<TokenTree>,
-    rhses: Vec<TokenTree>,
+    lhses: Vec<quoted::TokenTree>,
+    rhses: Vec<quoted::TokenTree>,
     valid: bool,
 }
 
@@ -86,8 +87,8 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
                           sp: Span,
                           name: ast::Ident,
                           arg: &[TokenTree],
-                          lhses: &[TokenTree],
-                          rhses: &[TokenTree])
+                          lhses: &[quoted::TokenTree],
+                          rhses: &[quoted::TokenTree])
                           -> Box<MacResult+'cx> {
     if cx.trace_macros() {
         println!("{}! {{ {} }}",
@@ -101,7 +102,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
 
     for (i, lhs) in lhses.iter().enumerate() { // try each arm's matchers
         let lhs_tt = match *lhs {
-            TokenTree::Delimited(_, ref delim) => &delim.tts[..],
+            quoted::TokenTree::Delimited(_, ref delim) => &delim.tts[..],
             _ => cx.span_bug(sp, "malformed macro lhs")
         };
 
@@ -109,7 +110,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
             Success(named_matches) => {
                 let rhs = match rhses[i] {
                     // ignore delimiters
-                    TokenTree::Delimited(_, ref delimed) => delimed.tts.clone(),
+                    quoted::TokenTree::Delimited(_, ref delimed) => delimed.tts.clone(),
                     _ => cx.span_bug(sp, "malformed macro rhs"),
                 };
                 // rhs has holes ( `$id` and `$(...)` that need filled)
@@ -164,24 +165,22 @@ pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension {
     // $( $lhs:tt => $rhs:tt );+
     // ...quasiquoting this would be nice.
     // These spans won't matter, anyways
-    let match_lhs_tok = MatchNt(lhs_nm, ast::Ident::from_str("tt"));
-    let match_rhs_tok = MatchNt(rhs_nm, ast::Ident::from_str("tt"));
     let argument_gram = vec![
-        TokenTree::Sequence(DUMMY_SP, Rc::new(tokenstream::SequenceRepetition {
+        quoted::TokenTree::Sequence(DUMMY_SP, Rc::new(quoted::SequenceRepetition {
             tts: vec![
-                TokenTree::Token(DUMMY_SP, match_lhs_tok),
-                TokenTree::Token(DUMMY_SP, token::FatArrow),
-                TokenTree::Token(DUMMY_SP, match_rhs_tok),
+                quoted::TokenTree::MetaVarDecl(DUMMY_SP, lhs_nm, ast::Ident::from_str("tt")),
+                quoted::TokenTree::Token(DUMMY_SP, token::FatArrow),
+                quoted::TokenTree::MetaVarDecl(DUMMY_SP, rhs_nm, ast::Ident::from_str("tt")),
             ],
             separator: Some(token::Semi),
-            op: tokenstream::KleeneOp::OneOrMore,
+            op: quoted::KleeneOp::OneOrMore,
             num_captures: 2,
         })),
         // to phase into semicolon-termination instead of semicolon-separation
-        TokenTree::Sequence(DUMMY_SP, Rc::new(tokenstream::SequenceRepetition {
-            tts: vec![TokenTree::Token(DUMMY_SP, token::Semi)],
+        quoted::TokenTree::Sequence(DUMMY_SP, Rc::new(quoted::SequenceRepetition {
+            tts: vec![quoted::TokenTree::Token(DUMMY_SP, token::Semi)],
             separator: None,
-            op: tokenstream::KleeneOp::ZeroOrMore,
+            op: quoted::KleeneOp::ZeroOrMore,
             num_captures: 0
         })),
     ];
@@ -206,12 +205,13 @@ pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension {
             s.iter().map(|m| {
                 if let MatchedNonterminal(ref nt) = **m {
                     if let NtTT(ref tt) = **nt {
-                        valid &= check_lhs_nt_follows(sess, tt);
-                        return (*tt).clone();
+                        let tt = quoted::parse(&[tt.clone()], true, sess).pop().unwrap();
+                        valid &= check_lhs_nt_follows(sess, &tt);
+                        return tt;
                     }
                 }
                 sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
-            }).collect::<Vec<TokenTree>>()
+            }).collect::<Vec<quoted::TokenTree>>()
         }
         _ => sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
     };
@@ -221,11 +221,11 @@ pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension {
             s.iter().map(|m| {
                 if let MatchedNonterminal(ref nt) = **m {
                     if let NtTT(ref tt) = **nt {
-                        return (*tt).clone();
+                        return quoted::parse(&[tt.clone()], false, sess).pop().unwrap();
                     }
                 }
                 sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
-            }).collect()
+            }).collect::<Vec<quoted::TokenTree>>()
         }
         _ => sess.span_diagnostic.span_bug(def.span, "wrong-structured rhs")
     };
@@ -249,14 +249,14 @@ pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension {
     NormalTT(exp, Some(def.span), attr::contains_name(&def.attrs, "allow_internal_unstable"))
 }
 
-fn check_lhs_nt_follows(sess: &ParseSess, lhs: &TokenTree) -> bool {
+fn check_lhs_nt_follows(sess: &ParseSess, lhs: &quoted::TokenTree) -> bool {
     // lhs is going to be like TokenTree::Delimited(...), where the
     // entire lhs is those tts. Or, it can be a "bare sequence", not wrapped in parens.
     match lhs {
-        &TokenTree::Delimited(_, ref tts) => check_matcher(sess, &tts.tts),
+        &quoted::TokenTree::Delimited(_, ref tts) => check_matcher(sess, &tts.tts),
         _ => {
             let msg = "invalid macro matcher; matchers must be contained in balanced delimiters";
-            sess.span_diagnostic.span_err(lhs.get_span(), msg);
+            sess.span_diagnostic.span_err(lhs.span(), msg);
             false
         }
     }
@@ -266,10 +266,11 @@ fn check_lhs_nt_follows(sess: &ParseSess, lhs: &TokenTree) -> bool {
 
 /// Check that the lhs contains no repetition which could match an empty token
 /// tree, because then the matcher would hang indefinitely.
-fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[TokenTree]) -> bool {
+fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[quoted::TokenTree]) -> bool {
+    use self::quoted::TokenTree;
     for tt in tts {
         match *tt {
-            TokenTree::Token(_, _) => (),
+            TokenTree::Token(..) | TokenTree::MetaVarDecl(..) => (),
             TokenTree::Delimited(_, ref del) => if !check_lhs_no_empty_seq(sess, &del.tts) {
                 return false;
             },
@@ -278,7 +279,7 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[TokenTree]) -> bool {
                     if seq.tts.iter().all(|seq_tt| {
                         match *seq_tt {
                             TokenTree::Sequence(_, ref sub_seq) =>
-                                sub_seq.op == tokenstream::KleeneOp::ZeroOrMore,
+                                sub_seq.op == quoted::KleeneOp::ZeroOrMore,
                             _ => false,
                         }
                     }) {
@@ -296,15 +297,15 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[TokenTree]) -> bool {
     true
 }
 
-fn check_rhs(sess: &ParseSess, rhs: &TokenTree) -> bool {
+fn check_rhs(sess: &ParseSess, rhs: &quoted::TokenTree) -> bool {
     match *rhs {
-        TokenTree::Delimited(..) => return true,
-        _ => sess.span_diagnostic.span_err(rhs.get_span(), "macro rhs must be delimited")
+        quoted::TokenTree::Delimited(..) => return true,
+        _ => sess.span_diagnostic.span_err(rhs.span(), "macro rhs must be delimited")
     }
     false
 }
 
-fn check_matcher(sess: &ParseSess, matcher: &[TokenTree]) -> bool {
+fn check_matcher(sess: &ParseSess, matcher: &[quoted::TokenTree]) -> bool {
     let first_sets = FirstSets::new(matcher);
     let empty_suffix = TokenSet::empty();
     let err = sess.span_diagnostic.err_count();
@@ -335,7 +336,9 @@ struct FirstSets {
 }
 
 impl FirstSets {
-    fn new(tts: &[TokenTree]) -> FirstSets {
+    fn new(tts: &[quoted::TokenTree]) -> FirstSets {
+        use self::quoted::TokenTree;
+
         let mut sets = FirstSets { first: HashMap::new() };
         build_recur(&mut sets, tts);
         return sets;
@@ -347,13 +350,12 @@ fn build_recur(sets: &mut FirstSets, tts: &[TokenTree]) -> TokenSet {
             let mut first = TokenSet::empty();
             for tt in tts.iter().rev() {
                 match *tt {
-                    TokenTree::Token(sp, ref tok) => {
-                        first.replace_with((sp, tok.clone()));
+                    TokenTree::Token(..) | TokenTree::MetaVarDecl(..) => {
+                        first.replace_with(tt.clone());
                     }
                     TokenTree::Delimited(span, ref delimited) => {
                         build_recur(sets, &delimited.tts[..]);
-                        first.replace_with((delimited.open_tt(span).span(),
-                                            Token::OpenDelim(delimited.delim)));
+                        first.replace_with(delimited.open_tt(span));
                     }
                     TokenTree::Sequence(sp, ref seq_rep) => {
                         let subfirst = build_recur(sets, &seq_rep.tts[..]);
@@ -378,11 +380,11 @@ fn build_recur(sets: &mut FirstSets, tts: &[TokenTree]) -> TokenSet {
 
                         if let (Some(ref sep), true) = (seq_rep.separator.clone(),
                                                         subfirst.maybe_empty) {
-                            first.add_one_maybe((sp, sep.clone()));
+                            first.add_one_maybe(TokenTree::Token(sp, sep.clone()));
                         }
 
                         // Reverse scan: Sequence comes before `first`.
-                        if subfirst.maybe_empty || seq_rep.op == tokenstream::KleeneOp::ZeroOrMore {
+                        if subfirst.maybe_empty || seq_rep.op == quoted::KleeneOp::ZeroOrMore {
                             // If sequence is potentially empty, then
                             // union them (preserving first emptiness).
                             first.add_all(&TokenSet { maybe_empty: true, ..subfirst });
@@ -401,18 +403,19 @@ fn build_recur(sets: &mut FirstSets, tts: &[TokenTree]) -> TokenSet {
 
     // walks forward over `tts` until all potential FIRST tokens are
     // identified.
-    fn first(&self, tts: &[TokenTree]) -> TokenSet {
+    fn first(&self, tts: &[quoted::TokenTree]) -> TokenSet {
+        use self::quoted::TokenTree;
+
         let mut first = TokenSet::empty();
         for tt in tts.iter() {
             assert!(first.maybe_empty);
             match *tt {
-                TokenTree::Token(sp, ref tok) => {
-                    first.add_one((sp, tok.clone()));
+                TokenTree::Token(..) | TokenTree::MetaVarDecl(..) => {
+                    first.add_one(tt.clone());
                     return first;
                 }
                 TokenTree::Delimited(span, ref delimited) => {
-                    first.add_one((delimited.open_tt(span).span(),
-                                   Token::OpenDelim(delimited.delim)));
+                    first.add_one(delimited.open_tt(span));
                     return first;
                 }
                 TokenTree::Sequence(sp, ref seq_rep) => {
@@ -424,13 +427,13 @@ fn first(&self, tts: &[TokenTree]) -> TokenSet {
 
                             if let (Some(ref sep), true) = (seq_rep.separator.clone(),
                                                             subfirst.maybe_empty) {
-                                first.add_one_maybe((sp, sep.clone()));
+                                first.add_one_maybe(TokenTree::Token(sp, sep.clone()));
                             }
 
                             assert!(first.maybe_empty);
                             first.add_all(subfirst);
                             if subfirst.maybe_empty ||
-                               seq_rep.op == tokenstream::KleeneOp::ZeroOrMore {
+                               seq_rep.op == quoted::KleeneOp::ZeroOrMore {
                                 // continue scanning for more first
                                 // tokens, but also make sure we
                                 // restore empty-tracking state
@@ -460,8 +463,8 @@ fn first(&self, tts: &[TokenTree]) -> TokenSet {
     }
 }
 
-// A set of Tokens, which may include MatchNt tokens (for
-// macro-by-example syntactic variables). It also carries the
+// A set of `quoted::TokenTree`s, which may include `TokenTree::Match`s
+// (for macro-by-example syntactic variables). It also carries the
 // `maybe_empty` flag; that is true if and only if the matcher can
 // match an empty token sequence.
 //
@@ -472,7 +475,7 @@ fn first(&self, tts: &[TokenTree]) -> TokenSet {
 // (Notably, we must allow for *-op to occur zero times.)
 #[derive(Clone, Debug)]
 struct TokenSet {
-    tokens: Vec<(Span, Token)>,
+    tokens: Vec<quoted::TokenTree>,
     maybe_empty: bool,
 }
 
@@ -482,13 +485,13 @@ fn empty() -> Self { TokenSet { tokens: Vec::new(), maybe_empty: true } }
 
     // Returns the set `{ tok }` for the single-token (and thus
     // non-empty) sequence [tok].
-    fn singleton(tok: (Span, Token)) -> Self {
+    fn singleton(tok: quoted::TokenTree) -> Self {
         TokenSet { tokens: vec![tok], maybe_empty: false }
     }
 
     // Changes self to be the set `{ tok }`.
     // Since `tok` is always present, marks self as non-empty.
-    fn replace_with(&mut self, tok: (Span, Token)) {
+    fn replace_with(&mut self, tok: quoted::TokenTree) {
         self.tokens.clear();
         self.tokens.push(tok);
         self.maybe_empty = false;
@@ -503,7 +506,7 @@ fn replace_with_irrelevant(&mut self) {
     }
 
     // Adds `tok` to the set for `self`, marking sequence as non-empy.
-    fn add_one(&mut self, tok: (Span, Token)) {
+    fn add_one(&mut self, tok: quoted::TokenTree) {
         if !self.tokens.contains(&tok) {
             self.tokens.push(tok);
         }
@@ -511,7 +514,7 @@ fn add_one(&mut self, tok: (Span, Token)) {
     }
 
     // Adds `tok` to the set for `self`. (Leaves `maybe_empty` flag alone.)
-    fn add_one_maybe(&mut self, tok: (Span, Token)) {
+    fn add_one_maybe(&mut self, tok: quoted::TokenTree) {
         if !self.tokens.contains(&tok) {
             self.tokens.push(tok);
         }
@@ -549,9 +552,9 @@ fn add_all(&mut self, other: &Self) {
 // see `FirstSets::new`.
 fn check_matcher_core(sess: &ParseSess,
                       first_sets: &FirstSets,
-                      matcher: &[TokenTree],
+                      matcher: &[quoted::TokenTree],
                       follow: &TokenSet) -> TokenSet {
-    use print::pprust::token_to_string;
+    use self::quoted::TokenTree;
 
     let mut last = TokenSet::empty();
 
@@ -576,11 +579,11 @@ fn check_matcher_core(sess: &ParseSess,
         // First, update `last` so that it corresponds to the set
         // of NT tokens that might end the sequence `... token`.
         match *token {
-            TokenTree::Token(sp, ref tok) => {
+            TokenTree::Token(..) | TokenTree::MetaVarDecl(..) => {
                 let can_be_followed_by_any;
-                if let Err(bad_frag) = has_legal_fragment_specifier(tok) {
+                if let Err(bad_frag) = has_legal_fragment_specifier(token) {
                     let msg = format!("invalid fragment specifier `{}`", bad_frag);
-                    sess.span_diagnostic.struct_span_err(sp, &msg)
+                    sess.span_diagnostic.struct_span_err(token.span(), &msg)
                         .help("valid fragment specifiers are `ident`, `block`, \
                                `stmt`, `expr`, `pat`, `ty`, `path`, `meta`, `tt` \
                                and `item`")
@@ -589,7 +592,7 @@ fn check_matcher_core(sess: &ParseSess,
                     // from error messages.)
                     can_be_followed_by_any = true;
                 } else {
-                    can_be_followed_by_any = token_can_be_followed_by_any(tok);
+                    can_be_followed_by_any = token_can_be_followed_by_any(token);
                 }
 
                 if can_be_followed_by_any {
@@ -599,13 +602,12 @@ fn check_matcher_core(sess: &ParseSess,
                     // followed by anything against SUFFIX.
                     continue 'each_token;
                 } else {
-                    last.replace_with((sp, tok.clone()));
+                    last.replace_with(token.clone());
                     suffix_first = build_suffix_first();
                 }
             }
             TokenTree::Delimited(span, ref d) => {
-                let my_suffix = TokenSet::singleton((d.close_tt(span).span(),
-                                                     Token::CloseDelim(d.delim)));
+                let my_suffix = TokenSet::singleton(d.close_tt(span));
                 check_matcher_core(sess, first_sets, &d.tts, &my_suffix);
                 // don't track non NT tokens
                 last.replace_with_irrelevant();
@@ -629,7 +631,7 @@ fn check_matcher_core(sess: &ParseSess,
                 let mut new;
                 let my_suffix = if let Some(ref u) = seq_rep.separator {
                     new = suffix_first.clone();
-                    new.add_one_maybe((sp, u.clone()));
+                    new.add_one_maybe(TokenTree::Token(sp, u.clone()));
                     &new
                 } else {
                     &suffix_first
@@ -655,12 +657,13 @@ fn check_matcher_core(sess: &ParseSess,
 
         // Now `last` holds the complete set of NT tokens that could
         // end the sequence before SUFFIX. Check that every one works with `suffix`.
-        'each_last: for &(_sp, ref t) in &last.tokens {
-            if let MatchNt(ref name, ref frag_spec) = *t {
-                for &(sp, ref next_token) in &suffix_first.tokens {
+        'each_last: for token in &last.tokens {
+            if let TokenTree::MetaVarDecl(_, ref name, ref frag_spec) = *token {
+                for next_token in &suffix_first.tokens {
                     match is_in_follow(next_token, &frag_spec.name.as_str()) {
                         Err((msg, help)) => {
-                            sess.span_diagnostic.struct_span_err(sp, &msg).help(help).emit();
+                            sess.span_diagnostic.struct_span_err(next_token.span(), &msg)
+                                .help(help).emit();
                             // don't bother reporting every source of
                             // conflict for a particular element of `last`.
                             continue 'each_last;
@@ -676,12 +679,12 @@ fn check_matcher_core(sess: &ParseSess,
                             };
 
                             sess.span_diagnostic.span_err(
-                                sp,
+                                next_token.span(),
                                 &format!("`${name}:{frag}` {may_be} followed by `{next}`, which \
                                           is not allowed for `{frag}` fragments",
                                          name=name,
                                          frag=frag_spec,
-                                         next=token_to_string(next_token),
+                                         next=quoted_tt_to_string(next_token),
                                          may_be=may_be)
                             );
                         }
@@ -693,8 +696,8 @@ fn check_matcher_core(sess: &ParseSess,
     last
 }
 
-fn token_can_be_followed_by_any(tok: &Token) -> bool {
-    if let &MatchNt(_, ref frag_spec) = tok {
+fn token_can_be_followed_by_any(tok: &quoted::TokenTree) -> bool {
+    if let quoted::TokenTree::MetaVarDecl(_, _, frag_spec) = *tok {
         frag_can_be_followed_by_any(&frag_spec.name.as_str())
     } else {
         // (Non NT's can always be followed by anthing in matchers.)
@@ -732,8 +735,10 @@ fn frag_can_be_followed_by_any(frag: &str) -> bool {
 /// break macros that were relying on that binary operator as a
 /// separator.
 // when changing this do not forget to update doc/book/macros.md!
-fn is_in_follow(tok: &Token, frag: &str) -> Result<bool, (String, &'static str)> {
-    if let &CloseDelim(_) = tok {
+fn is_in_follow(tok: &quoted::TokenTree, frag: &str) -> Result<bool, (String, &'static str)> {
+    use self::quoted::TokenTree;
+
+    if let TokenTree::Token(_, token::CloseDelim(_)) = *tok {
         // closing a token tree can never be matched by any fragment;
         // iow, we always require that `(` and `)` match, etc.
         Ok(true)
@@ -749,27 +754,30 @@ fn is_in_follow(tok: &Token, frag: &str) -> Result<bool, (String, &'static str)>
                 // maintain
                 Ok(true)
             },
-            "stmt" | "expr"  => {
-                match *tok {
+            "stmt" | "expr"  => match *tok {
+                TokenTree::Token(_, ref tok) => match *tok {
                     FatArrow | Comma | Semi => Ok(true),
                     _ => Ok(false)
-                }
+                },
+                _ => Ok(false),
             },
-            "pat" => {
-                match *tok {
+            "pat" => match *tok {
+                TokenTree::Token(_, ref tok) => match *tok {
                     FatArrow | Comma | Eq | BinOp(token::Or) => Ok(true),
                     Ident(i) if i.name == "if" || i.name == "in" => Ok(true),
                     _ => Ok(false)
-                }
+                },
+                _ => Ok(false),
             },
-            "path" | "ty" => {
-                match *tok {
+            "path" | "ty" => match *tok {
+                TokenTree::Token(_, ref tok) => match *tok {
                     OpenDelim(token::DelimToken::Brace) | OpenDelim(token::DelimToken::Bracket) |
                     Comma | FatArrow | Colon | Eq | Gt | Semi | BinOp(token::Or) => Ok(true),
-                    MatchNt(_, ref frag) if frag.name == "block" => Ok(true),
                     Ident(i) if i.name == "as" || i.name == "where" => Ok(true),
                     _ => Ok(false)
-                }
+                },
+                TokenTree::MetaVarDecl(_, _, frag) if frag.name == "block" => Ok(true),
+                _ => Ok(false),
             },
             "ident" => {
                 // being a single token, idents are harmless
@@ -780,6 +788,7 @@ fn is_in_follow(tok: &Token, frag: &str) -> Result<bool, (String, &'static str)>
                 // harmless
                 Ok(true)
             },
+            "" => Ok(true), // keywords::Invalid
             _ => Err((format!("invalid fragment specifier `{}`", frag),
                      "valid fragment specifiers are `ident`, `block`, \
                       `stmt`, `expr`, `pat`, `ty`, `path`, `meta`, `tt` \
@@ -788,9 +797,9 @@ fn is_in_follow(tok: &Token, frag: &str) -> Result<bool, (String, &'static str)>
     }
 }
 
-fn has_legal_fragment_specifier(tok: &Token) -> Result<(), String> {
+fn has_legal_fragment_specifier(tok: &quoted::TokenTree) -> Result<(), String> {
     debug!("has_legal_fragment_specifier({:?})", tok);
-    if let &MatchNt(_, ref frag_spec) = tok {
+    if let quoted::TokenTree::MetaVarDecl(_, _, frag_spec) = *tok {
         let s = &frag_spec.name.as_str();
         if !is_legal_fragment_specifier(s) {
             return Err(s.to_string());
@@ -802,7 +811,15 @@ fn has_legal_fragment_specifier(tok: &Token) -> Result<(), String> {
 fn is_legal_fragment_specifier(frag: &str) -> bool {
     match frag {
         "item" | "block" | "stmt" | "expr" | "pat" |
-        "path" | "ty" | "ident" | "meta" | "tt" => true,
+        "path" | "ty" | "ident" | "meta" | "tt" | "" => true,
         _ => false,
     }
 }
+
+fn quoted_tt_to_string(tt: &quoted::TokenTree) -> String {
+    match *tt {
+        quoted::TokenTree::Token(_, ref tok) => ::print::pprust::token_to_string(tok),
+        quoted::TokenTree::MetaVarDecl(_, name, kind) => format!("${}:{}", name, kind),
+        _ => panic!("unexpected quoted::TokenTree::{Sequence or Delimited} in follow set checker"),
+    }
+}
diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/tt/quoted.rs
new file mode 100644 (file)
index 0000000..530824b
--- /dev/null
@@ -0,0 +1,234 @@
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use ast;
+use ext::tt::macro_parser;
+use parse::{ParseSess, token};
+use print::pprust;
+use symbol::{keywords, Symbol};
+use syntax_pos::{DUMMY_SP, Span, BytePos};
+use tokenstream;
+
+use std::rc::Rc;
+
+#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
+pub struct Delimited {
+    pub delim: token::DelimToken,
+    pub tts: Vec<TokenTree>,
+}
+
+impl Delimited {
+    pub fn open_token(&self) -> token::Token {
+        token::OpenDelim(self.delim)
+    }
+
+    pub fn close_token(&self) -> token::Token {
+        token::CloseDelim(self.delim)
+    }
+
+    pub fn open_tt(&self, span: Span) -> TokenTree {
+        let open_span = match span {
+            DUMMY_SP => DUMMY_SP,
+            _ => Span { hi: span.lo + BytePos(self.delim.len() as u32), ..span },
+        };
+        TokenTree::Token(open_span, self.open_token())
+    }
+
+    pub fn close_tt(&self, span: Span) -> TokenTree {
+        let close_span = match span {
+            DUMMY_SP => DUMMY_SP,
+            _ => Span { lo: span.hi - BytePos(self.delim.len() as u32), ..span },
+        };
+        TokenTree::Token(close_span, self.close_token())
+    }
+}
+
+#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
+pub struct SequenceRepetition {
+    /// The sequence of token trees
+    pub tts: Vec<TokenTree>,
+    /// The optional separator
+    pub separator: Option<token::Token>,
+    /// Whether the sequence can be repeated zero (*), or one or more times (+)
+    pub op: KleeneOp,
+    /// The number of `Match`s that appear in the sequence (and subsequences)
+    pub num_captures: usize,
+}
+
+/// A Kleene-style [repetition operator](http://en.wikipedia.org/wiki/Kleene_star)
+/// for token sequences.
+#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
+pub enum KleeneOp {
+    ZeroOrMore,
+    OneOrMore,
+}
+
+/// Similar to `tokenstream::TokenTree`, except that `$i`, `$i:ident`, and `$(...)`
+/// are "first-class" token trees.
+#[derive(Debug, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)]
+pub enum TokenTree {
+    Token(Span, token::Token),
+    Delimited(Span, Rc<Delimited>),
+    /// A kleene-style repetition sequence with a span
+    Sequence(Span, Rc<SequenceRepetition>),
+    /// Matches a nonterminal. This is only used in the left hand side of MBE macros.
+    MetaVarDecl(Span, ast::Ident /* name to bind */, ast::Ident /* kind of nonterminal */),
+}
+
+impl TokenTree {
+    pub fn len(&self) -> usize {
+        match *self {
+            TokenTree::Delimited(_, ref delimed) => match delimed.delim {
+                token::NoDelim => delimed.tts.len(),
+                _ => delimed.tts.len() + 2,
+            },
+            TokenTree::Sequence(_, ref seq) => seq.tts.len(),
+            _ => 0,
+        }
+    }
+
+    pub fn get_tt(&self, index: usize) -> TokenTree {
+        match (self, index) {
+            (&TokenTree::Delimited(_, ref delimed), _) if delimed.delim == token::NoDelim => {
+                delimed.tts[index].clone()
+            }
+            (&TokenTree::Delimited(span, ref delimed), _) => {
+                if index == 0 {
+                    return delimed.open_tt(span);
+                }
+                if index == delimed.tts.len() + 1 {
+                    return delimed.close_tt(span);
+                }
+                delimed.tts[index - 1].clone()
+            }
+            (&TokenTree::Sequence(_, ref seq), _) => seq.tts[index].clone(),
+            _ => panic!("Cannot expand a token tree"),
+        }
+    }
+
+    /// Retrieve the TokenTree's span.
+    pub fn span(&self) -> Span {
+        match *self {
+            TokenTree::Token(sp, _) |
+            TokenTree::MetaVarDecl(sp, _, _) |
+            TokenTree::Delimited(sp, _) |
+            TokenTree::Sequence(sp, _) => sp,
+        }
+    }
+}
+
+pub fn parse(input: &[tokenstream::TokenTree], expect_matchers: bool, sess: &ParseSess)
+             -> Vec<TokenTree> {
+    let mut result = Vec::new();
+    let mut trees = input.iter().cloned();
+    while let Some(tree) = trees.next() {
+        let tree = parse_tree(tree, &mut trees, expect_matchers, sess);
+        match tree {
+            TokenTree::Token(start_sp, token::SubstNt(ident)) if expect_matchers => {
+                let span = match trees.next() {
+                    Some(tokenstream::TokenTree::Token(span, token::Colon)) => match trees.next() {
+                        Some(tokenstream::TokenTree::Token(end_sp, token::Ident(kind))) => {
+                            let span = Span { lo: start_sp.lo, ..end_sp };
+                            result.push(TokenTree::MetaVarDecl(span, ident, kind));
+                            continue
+                        }
+                        tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
+                    },
+                    tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(start_sp),
+                };
+                sess.missing_fragment_specifiers.borrow_mut().insert(span);
+                result.push(TokenTree::MetaVarDecl(span, ident, keywords::Invalid.ident()));
+            }
+            _ => result.push(tree),
+        }
+    }
+    result
+}
+
+fn parse_tree<I>(tree: tokenstream::TokenTree,
+                 trees: &mut I,
+                 expect_matchers: bool,
+                 sess: &ParseSess)
+                 -> TokenTree
+    where I: Iterator<Item = tokenstream::TokenTree>,
+{
+    match tree {
+        tokenstream::TokenTree::Token(span, token::Dollar) => match trees.next() {
+            Some(tokenstream::TokenTree::Delimited(span, ref delimited)) => {
+                if delimited.delim != token::Paren {
+                    let tok = pprust::token_to_string(&token::OpenDelim(delimited.delim));
+                    let msg = format!("expected `(`, found `{}`", tok);
+                    sess.span_diagnostic.span_err(span, &msg);
+                }
+                let sequence = parse(&delimited.tts, expect_matchers, sess);
+                let (separator, op) = parse_sep_and_kleene_op(trees, span, sess);
+                let name_captures = macro_parser::count_names(&sequence);
+                TokenTree::Sequence(span, Rc::new(SequenceRepetition {
+                    tts: sequence,
+                    separator: separator,
+                    op: op,
+                    num_captures: name_captures,
+                }))
+            }
+            Some(tokenstream::TokenTree::Token(ident_span, token::Ident(ident))) => {
+                let span = Span { lo: span.lo, ..ident_span };
+                if ident.name == keywords::Crate.name() {
+                    let ident = ast::Ident { name: Symbol::intern("$crate"), ..ident };
+                    TokenTree::Token(span, token::Ident(ident))
+                } else {
+                    TokenTree::Token(span, token::SubstNt(ident))
+                }
+            }
+            Some(tokenstream::TokenTree::Token(span, tok)) => {
+                let msg = format!("expected identifier, found `{}`", pprust::token_to_string(&tok));
+                sess.span_diagnostic.span_err(span, &msg);
+                TokenTree::Token(span, token::SubstNt(keywords::Invalid.ident()))
+            }
+            None => TokenTree::Token(span, token::Dollar),
+        },
+        tokenstream::TokenTree::Token(span, tok) => TokenTree::Token(span, tok),
+        tokenstream::TokenTree::Delimited(span, delimited) => {
+            TokenTree::Delimited(span, Rc::new(Delimited {
+                delim: delimited.delim,
+                tts: parse(&delimited.tts, expect_matchers, sess),
+            }))
+        }
+    }
+}
+
+fn parse_sep_and_kleene_op<I>(input: &mut I, span: Span, sess: &ParseSess)
+                              -> (Option<token::Token>, KleeneOp)
+    where I: Iterator<Item = tokenstream::TokenTree>,
+{
+    fn kleene_op(token: &token::Token) -> Option<KleeneOp> {
+        match *token {
+            token::BinOp(token::Star) => Some(KleeneOp::ZeroOrMore),
+            token::BinOp(token::Plus) => Some(KleeneOp::OneOrMore),
+            _ => None,
+        }
+    }
+
+    let span = match input.next() {
+        Some(tokenstream::TokenTree::Token(span, tok)) => match kleene_op(&tok) {
+            Some(op) => return (None, op),
+            None => match input.next() {
+                Some(tokenstream::TokenTree::Token(span, tok2)) => match kleene_op(&tok2) {
+                    Some(op) => return (Some(tok), op),
+                    None => span,
+                },
+                tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
+            }
+        },
+        tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
+    };
+
+    sess.span_diagnostic.span_err(span, "expected `*` or `+`");
+    (None, KleeneOp::ZeroOrMore)
+}
index 38becbe7b1d30a31a5ba0869f065adfd1fcad786..90f64a5208f75cc006830fc74b5abf51a68f2dff 100644 (file)
 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
-use self::LockstepIterSize::*;
 
 use ast::Ident;
 use errors::Handler;
 use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
-use parse::token::{self, MatchNt, SubstNt, Token, NtIdent, NtTT};
+use ext::tt::quoted;
+use parse::token::{self, SubstNt, Token, NtIdent, NtTT};
 use syntax_pos::{Span, DUMMY_SP};
-use tokenstream::{self, TokenTree};
+use tokenstream::{TokenTree, Delimited};
 use util::small_vector::SmallVector;
 
 use std::rc::Rc;
+use std::mem;
 use std::ops::Add;
 use std::collections::HashMap;
 
-///an unzipping of `TokenTree`s
-#[derive(Clone)]
-struct TtFrame {
-    forest: TokenTree,
-    idx: usize,
-    dotdotdoted: bool,
-    sep: Option<Token>,
+// An iterator over the token trees in a delimited token tree (`{ ... }`) or a sequence (`$(...)`).
+enum Frame {
+    Delimited {
+        forest: Rc<quoted::Delimited>,
+        idx: usize,
+        span: Span,
+    },
+    Sequence {
+        forest: Rc<quoted::SequenceRepetition>,
+        idx: usize,
+        sep: Option<Token>,
+    },
 }
 
-#[derive(Clone)]
-struct TtReader<'a> {
-    sp_diag: &'a Handler,
-    /// the unzipped tree:
-    stack: SmallVector<TtFrame>,
-    /* for MBE-style macro transcription */
-    interpolations: HashMap<Ident, Rc<NamedMatch>>,
+impl Frame {
+    fn new(tts: Vec<quoted::TokenTree>) -> Frame {
+        let forest = Rc::new(quoted::Delimited { delim: token::NoDelim, tts: tts });
+        Frame::Delimited { forest: forest, idx: 0, span: DUMMY_SP }
+    }
+}
+
+impl Iterator for Frame {
+    type Item = quoted::TokenTree;
 
-    repeat_idx: Vec<usize>,
-    repeat_len: Vec<usize>,
+    fn next(&mut self) -> Option<quoted::TokenTree> {
+        match *self {
+            Frame::Delimited { ref forest, ref mut idx, .. } => {
+                *idx += 1;
+                forest.tts.get(*idx - 1).cloned()
+            }
+            Frame::Sequence { ref forest, ref mut idx, .. } => {
+                *idx += 1;
+                forest.tts.get(*idx - 1).cloned()
+            }
+        }
+    }
 }
 
 /// This can do Macro-By-Example transcription. On the other hand, if
-/// `src` contains no `TokenTree::Sequence`s, `MatchNt`s or `SubstNt`s, `interp` can
+/// `src` contains no `TokenTree::{Sequence, Match}`s, or `SubstNt`s, `interp` can
 /// (and should) be None.
 pub fn transcribe(sp_diag: &Handler,
                   interp: Option<HashMap<Ident, Rc<NamedMatch>>>,
-                  src: Vec<tokenstream::TokenTree>)
+                  src: Vec<quoted::TokenTree>)
                   -> Vec<TokenTree> {
-    let mut r = TtReader {
-        sp_diag: sp_diag,
-        stack: SmallVector::one(TtFrame {
-            forest: TokenTree::Sequence(DUMMY_SP, Rc::new(tokenstream::SequenceRepetition {
-                tts: src,
-                // doesn't matter. This merely holds the root unzipping.
-                separator: None, op: tokenstream::KleeneOp::ZeroOrMore, num_captures: 0
-            })),
-            idx: 0,
-            dotdotdoted: false,
-            sep: None,
-        }),
-        interpolations: match interp { /* just a convenience */
-            None => HashMap::new(),
-            Some(x) => x,
-        },
-        repeat_idx: Vec::new(),
-        repeat_len: Vec::new(),
-    };
+    let mut stack = SmallVector::one(Frame::new(src));
+    let interpolations = interp.unwrap_or_else(HashMap::new); /* just a convenience */
+    let mut repeats = Vec::new();
+    let mut result = Vec::new();
+    let mut result_stack = Vec::new();
 
-    let mut tts = Vec::new();
-    let mut prev_span = DUMMY_SP;
-    while let Some(tt) = tt_next_token(&mut r, prev_span) {
-        prev_span = tt.span();
-        tts.push(tt);
-    }
-    tts
-}
-
-fn lookup_cur_matched_by_matched(r: &TtReader, start: Rc<NamedMatch>) -> Rc<NamedMatch> {
-    r.repeat_idx.iter().fold(start, |ad, idx| {
-        match *ad {
-            MatchedNonterminal(_) => {
-                // end of the line; duplicate henceforth
-                ad.clone()
+    loop {
+        let tree = if let Some(tree) = stack.last_mut().unwrap().next() {
+            tree
+        } else {
+            if let Frame::Sequence { ref mut idx, ref sep, .. } = *stack.last_mut().unwrap() {
+                let (ref mut repeat_idx, repeat_len) = *repeats.last_mut().unwrap();
+                *repeat_idx += 1;
+                if *repeat_idx < repeat_len {
+                    *idx = 0;
+                    if let Some(sep) = sep.clone() {
+                        // repeat same span, I guess
+                        let prev_span = result.last().map(TokenTree::span).unwrap_or(DUMMY_SP);
+                        result.push(TokenTree::Token(prev_span, sep));
+                    }
+                    continue
+                }
             }
-            MatchedSeq(ref ads, _) => ads[*idx].clone()
-        }
-    })
-}
-
-fn lookup_cur_matched(r: &TtReader, name: Ident) -> Option<Rc<NamedMatch>> {
-    let matched_opt = r.interpolations.get(&name).cloned();
-    matched_opt.map(|s| lookup_cur_matched_by_matched(r, s))
-}
-
-#[derive(Clone)]
-enum LockstepIterSize {
-    LisUnconstrained,
-    LisConstraint(usize, Ident),
-    LisContradiction(String),
-}
-
-impl Add for LockstepIterSize {
-    type Output = LockstepIterSize;
 
-    fn add(self, other: LockstepIterSize) -> LockstepIterSize {
-        match self {
-            LisUnconstrained => other,
-            LisContradiction(_) => self,
-            LisConstraint(l_len, ref l_id) => match other {
-                LisUnconstrained => self.clone(),
-                LisContradiction(_) => other,
-                LisConstraint(r_len, _) if l_len == r_len => self.clone(),
-                LisConstraint(r_len, r_id) => {
-                    LisContradiction(format!("inconsistent lockstep iteration: \
-                                              '{}' has {} items, but '{}' has {}",
-                                              l_id, l_len, r_id, r_len))
+            match stack.pop().unwrap() {
+                Frame::Sequence { .. } => {
+                    repeats.pop();
+                }
+                Frame::Delimited { forest, span, .. } => {
+                    if result_stack.is_empty() {
+                        return result;
+                    }
+                    let tree = TokenTree::Delimited(span, Rc::new(Delimited {
+                        delim: forest.delim,
+                        tts: result,
+                    }));
+                    result = result_stack.pop().unwrap();
+                    result.push(tree);
                 }
-            },
-        }
-    }
-}
-
-fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize {
-    match *t {
-        TokenTree::Delimited(_, ref delimed) => {
-            delimed.tts.iter().fold(LisUnconstrained, |size, tt| {
-                size + lockstep_iter_size(tt, r)
-            })
-        },
-        TokenTree::Sequence(_, ref seq) => {
-            seq.tts.iter().fold(LisUnconstrained, |size, tt| {
-                size + lockstep_iter_size(tt, r)
-            })
-        },
-        TokenTree::Token(_, SubstNt(name)) | TokenTree::Token(_, MatchNt(name, _)) =>
-            match lookup_cur_matched(r, name) {
-                Some(matched) => match *matched {
-                    MatchedNonterminal(_) => LisUnconstrained,
-                    MatchedSeq(ref ads, _) => LisConstraint(ads.len(), name),
-                },
-                _ => LisUnconstrained
-            },
-        TokenTree::Token(..) => LisUnconstrained,
-    }
-}
-
-/// Return the next token from the TtReader.
-/// EFFECT: advances the reader's token field
-fn tt_next_token(r: &mut TtReader, prev_span: Span) -> Option<TokenTree> {
-    loop {
-        let should_pop = if let Some(frame) = r.stack.last() {
-            if frame.idx < frame.forest.len() {
-                break;
             }
-            !frame.dotdotdoted || *r.repeat_idx.last().unwrap() == *r.repeat_len.last().unwrap() - 1
-        } else {
-            return None;
+            continue
         };
 
-        /* done with this set; pop or repeat? */
-        if should_pop {
-            let prev = r.stack.pop().unwrap();
-            if let Some(frame) = r.stack.last_mut() {
-                frame.idx += 1;
-            } else {
-                return None;
-            }
-            if prev.dotdotdoted {
-                r.repeat_idx.pop();
-                r.repeat_len.pop();
-            }
-        } else { /* repeat */
-            *r.repeat_idx.last_mut().unwrap() += 1;
-            r.stack.last_mut().unwrap().idx = 0;
-            if let Some(tk) = r.stack.last().unwrap().sep.clone() {
-                return Some(TokenTree::Token(prev_span, tk)); // repeat same span, I guess
-            }
-        }
-    }
-    loop { /* because it's easiest, this handles `TokenTree::Delimited` not starting
-              with a `TokenTree::Token`, even though it won't happen */
-        let t = {
-            let frame = r.stack.last().unwrap();
-            // FIXME(pcwalton): Bad copy.
-            frame.forest.get_tt(frame.idx)
-        };
-        match t {
-            TokenTree::Sequence(sp, seq) => {
+        match tree {
+            quoted::TokenTree::Sequence(sp, seq) => {
                 // FIXME(pcwalton): Bad copy.
-                match lockstep_iter_size(&TokenTree::Sequence(sp, seq.clone()),
-                                         r) {
-                    LisUnconstrained => {
-                        panic!(r.sp_diag.span_fatal(
+                match lockstep_iter_size(&quoted::TokenTree::Sequence(sp, seq.clone()),
+                                         &interpolations,
+                                         &repeats) {
+                    LockstepIterSize::Unconstrained => {
+                        panic!(sp_diag.span_fatal(
                             sp.clone(), /* blame macro writer */
                             "attempted to repeat an expression \
                              containing no syntax \
                              variables matched as repeating at this depth"));
                     }
-                    LisContradiction(ref msg) => {
+                    LockstepIterSize::Contradiction(ref msg) => {
                         // FIXME #2887 blame macro invoker instead
-                        panic!(r.sp_diag.span_fatal(sp.clone(), &msg[..]));
+                        panic!(sp_diag.span_fatal(sp.clone(), &msg[..]));
                     }
-                    LisConstraint(len, _) => {
+                    LockstepIterSize::Constraint(len, _) => {
                         if len == 0 {
-                            if seq.op == tokenstream::KleeneOp::OneOrMore {
+                            if seq.op == quoted::KleeneOp::OneOrMore {
                                 // FIXME #2887 blame invoker
-                                panic!(r.sp_diag.span_fatal(sp.clone(),
-                                                     "this must repeat at least once"));
+                                panic!(sp_diag.span_fatal(sp.clone(),
+                                                          "this must repeat at least once"));
                             }
-
-                            r.stack.last_mut().unwrap().idx += 1;
-                            return tt_next_token(r, prev_span);
+                        } else {
+                            repeats.push((0, len));
+                            stack.push(Frame::Sequence {
+                                idx: 0,
+                                sep: seq.separator.clone(),
+                                forest: seq,
+                            });
                         }
-                        r.repeat_len.push(len);
-                        r.repeat_idx.push(0);
-                        r.stack.push(TtFrame {
-                            idx: 0,
-                            dotdotdoted: true,
-                            sep: seq.separator.clone(),
-                            forest: TokenTree::Sequence(sp, seq),
-                        });
                     }
                 }
             }
             // FIXME #2887: think about span stuff here
-            TokenTree::Token(sp, SubstNt(ident)) => {
-                r.stack.last_mut().unwrap().idx += 1;
-                match lookup_cur_matched(r, ident) {
-                    None => {
-                        return Some(TokenTree::Token(sp, SubstNt(ident)));
-                        // this can't be 0 length, just like TokenTree::Delimited
-                    }
+            quoted::TokenTree::Token(sp, SubstNt(ident)) => {
+                match lookup_cur_matched(ident, &interpolations, &repeats) {
+                    None => result.push(TokenTree::Token(sp, SubstNt(ident))),
                     Some(cur_matched) => if let MatchedNonterminal(ref nt) = *cur_matched {
                         match **nt {
                             // sidestep the interpolation tricks for ident because
                             // (a) idents can be in lots of places, so it'd be a pain
                             // (b) we actually can, since it's a token.
                             NtIdent(ref sn) => {
-                                return Some(TokenTree::Token(sn.span, token::Ident(sn.node)));
+                                result.push(TokenTree::Token(sn.span, token::Ident(sn.node)));
                             }
-                            NtTT(ref tt) => return Some(tt.clone()),
+                            NtTT(ref tt) => result.push(tt.clone()),
                             _ => {
                                 // FIXME(pcwalton): Bad copy
-                                return Some(TokenTree::Token(sp, token::Interpolated(nt.clone())));
+                                result.push(TokenTree::Token(sp, token::Interpolated(nt.clone())));
                             }
                         }
                     } else {
-                        panic!(r.sp_diag.span_fatal(
+                        panic!(sp_diag.span_fatal(
                             sp, /* blame the macro writer */
                             &format!("variable '{}' is still repeating at this depth", ident)));
                     }
                 }
             }
-            // TokenTree::Delimited or any token that can be unzipped
-            seq @ TokenTree::Delimited(..) | seq @ TokenTree::Token(_, MatchNt(..)) => {
-                // do not advance the idx yet
-                r.stack.push(TtFrame {
-                   forest: seq,
-                   idx: 0,
-                   dotdotdoted: false,
-                   sep: None
-                });
-                // if this could be 0-length, we'd need to potentially recur here
+            quoted::TokenTree::Delimited(span, delimited) => {
+                stack.push(Frame::Delimited { forest: delimited, idx: 0, span: span });
+                result_stack.push(mem::replace(&mut result, Vec::new()));
             }
-            tt @ TokenTree::Token(..) => {
-                r.stack.last_mut().unwrap().idx += 1;
-                return Some(tt);
+            quoted::TokenTree::Token(span, tok) => result.push(TokenTree::Token(span, tok)),
+            quoted::TokenTree::MetaVarDecl(..) => panic!("unexpected `TokenTree::MetaVarDecl"),
+        }
+    }
+}
+
+fn lookup_cur_matched(ident: Ident,
+                      interpolations: &HashMap<Ident, Rc<NamedMatch>>,
+                      repeats: &[(usize, usize)])
+                      -> Option<Rc<NamedMatch>> {
+    interpolations.get(&ident).map(|matched| {
+        repeats.iter().fold(matched.clone(), |ad, &(idx, _)| {
+            match *ad {
+                MatchedNonterminal(_) => {
+                    // end of the line; duplicate henceforth
+                    ad.clone()
+                }
+                MatchedSeq(ref ads, _) => ads[idx].clone()
             }
+        })
+    })
+}
+
+#[derive(Clone)]
+enum LockstepIterSize {
+    Unconstrained,
+    Constraint(usize, Ident),
+    Contradiction(String),
+}
+
+impl Add for LockstepIterSize {
+    type Output = LockstepIterSize;
+
+    fn add(self, other: LockstepIterSize) -> LockstepIterSize {
+        match self {
+            LockstepIterSize::Unconstrained => other,
+            LockstepIterSize::Contradiction(_) => self,
+            LockstepIterSize::Constraint(l_len, ref l_id) => match other {
+                LockstepIterSize::Unconstrained => self.clone(),
+                LockstepIterSize::Contradiction(_) => other,
+                LockstepIterSize::Constraint(r_len, _) if l_len == r_len => self.clone(),
+                LockstepIterSize::Constraint(r_len, r_id) => {
+                    let msg = format!("inconsistent lockstep iteration: \
+                                       '{}' has {} items, but '{}' has {}",
+                                      l_id, l_len, r_id, r_len);
+                    LockstepIterSize::Contradiction(msg)
+                }
+            },
         }
     }
 }
+
+fn lockstep_iter_size(tree: &quoted::TokenTree,
+                      interpolations: &HashMap<Ident, Rc<NamedMatch>>,
+                      repeats: &[(usize, usize)])
+                      -> LockstepIterSize {
+    use self::quoted::TokenTree;
+    match *tree {
+        TokenTree::Delimited(_, ref delimed) => {
+            delimed.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
+                size + lockstep_iter_size(tt, interpolations, repeats)
+            })
+        },
+        TokenTree::Sequence(_, ref seq) => {
+            seq.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
+                size + lockstep_iter_size(tt, interpolations, repeats)
+            })
+        },
+        TokenTree::Token(_, SubstNt(name)) | TokenTree::MetaVarDecl(_, name, _) =>
+            match lookup_cur_matched(name, interpolations, repeats) {
+                Some(matched) => match *matched {
+                    MatchedNonterminal(_) => LockstepIterSize::Unconstrained,
+                    MatchedSeq(ref ads, _) => LockstepIterSize::Constraint(ads.len(), name),
+                },
+                _ => LockstepIterSize::Unconstrained
+            },
+        TokenTree::Token(..) => LockstepIterSize::Unconstrained,
+    }
+}
index 1ee070cb92d9fe03de2a6bbcf2b9fb6a96d5be7f..257b7efba5c8e8d2f49cf96eef59d93a56a125f1 100644 (file)
@@ -551,13 +551,6 @@ pub fn noop_fold_tt<T: Folder>(tt: &TokenTree, fld: &mut T) -> TokenTree {
                             }
                         ))
         },
-        TokenTree::Sequence(span, ref seq) =>
-            TokenTree::Sequence(fld.new_span(span),
-                       Rc::new(SequenceRepetition {
-                           tts: fld.fold_tts(&seq.tts),
-                           separator: seq.separator.clone().map(|tok| fld.fold_token(tok)),
-                           ..**seq
-                       })),
     }
 }
 
@@ -578,7 +571,6 @@ pub fn noop_fold_token<T: Folder>(t: token::Token, fld: &mut T) -> token::Token
             token::Interpolated(Rc::new(fld.fold_interpolated(nt)))
         }
         token::SubstNt(ident) => token::SubstNt(fld.fold_ident(ident)),
-        token::MatchNt(name, kind) => token::MatchNt(fld.fold_ident(name), fld.fold_ident(kind)),
         _ => t
     }
 }
index 87a03adf6b77c018784aa535a3939f0ba1db4a2c..39a9aff48bf27163d0b8dcea76916863b54bd7de 100644 (file)
@@ -139,6 +139,7 @@ pub mod tt {
         pub mod transcribe;
         pub mod macro_parser;
         pub mod macro_rules;
+        pub mod quoted;
     }
 }
 
index b7f6e6a2384f71d56435e3f3b0b12711dfd25c15..de8a87e3a2b3293c9531d75b6dc53b7fe1cf3833 100644 (file)
@@ -1693,6 +1693,7 @@ mod tests {
     use feature_gate::UnstableFeatures;
     use parse::token;
     use std::cell::RefCell;
+    use std::collections::HashSet;
     use std::io;
     use std::rc::Rc;
 
@@ -1704,6 +1705,7 @@ fn mk_sess(cm: Rc<CodeMap>) -> ParseSess {
             config: CrateConfig::new(),
             included_mod_stack: RefCell::new(Vec::new()),
             code_map: cm,
+            missing_fragment_specifiers: RefCell::new(HashSet::new()),
         }
     }
 
index 20e80afc115f5676ffc746377dd6920f2f2ae7ec..6fec49b229abeeb315d37c608d7c033381619f7a 100644 (file)
@@ -46,6 +46,7 @@ pub struct ParseSess {
     pub span_diagnostic: Handler,
     pub unstable_features: UnstableFeatures,
     pub config: CrateConfig,
+    pub missing_fragment_specifiers: RefCell<HashSet<Span>>,
     /// Used to determine and report recursive mod inclusions
     included_mod_stack: RefCell<Vec<PathBuf>>,
     code_map: Rc<CodeMap>,
@@ -66,6 +67,7 @@ pub fn with_span_handler(handler: Handler, code_map: Rc<CodeMap>) -> ParseSess {
             span_diagnostic: handler,
             unstable_features: UnstableFeatures::from_environment(),
             config: HashSet::new(),
+            missing_fragment_specifiers: RefCell::new(HashSet::new()),
             included_mod_stack: RefCell::new(vec![]),
             code_map: code_map
         }
@@ -139,13 +141,9 @@ pub fn parse_stmt_from_source_str<'a>(name: String, source: String, sess: &'a Pa
     new_parser_from_source_str(sess, name, source).parse_stmt()
 }
 
-// Warning: This parses with quote_depth > 0, which is not the default.
 pub fn parse_tts_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
-                                     -> PResult<'a, Vec<tokenstream::TokenTree>> {
-    let mut p = new_parser_from_source_str(sess, name, source);
-    p.quote_depth += 1;
-    // right now this is re-creating the token trees from ... token trees.
-    p.parse_all_token_trees()
+                                     -> Vec<tokenstream::TokenTree> {
+    filemap_to_tts(sess, sess.codemap().new_filemap(name, None, source))
 }
 
 // Create a new parser from a source string
@@ -986,7 +984,7 @@ fn ttdelim_span() {
             _ => panic!("not a macro"),
         };
 
-        let span = tts.iter().rev().next().unwrap().get_span();
+        let span = tts.iter().rev().next().unwrap().span();
 
         match sess.codemap().span_to_snippet(span) {
             Ok(s) => assert_eq!(&s[..], "{ body }"),
index b5b8a6bc0ef64c80f08a32ed04739606146392a4..71274c4fdaa4ead88e9512baac8a1ec75d7b065c 100644 (file)
 use codemap::{self, CodeMap, Spanned, spanned, respan};
 use syntax_pos::{self, Span, Pos, BytePos, mk_sp};
 use errors::{self, DiagnosticBuilder};
-use ext::tt::macro_parser;
-use parse;
-use parse::classify;
+use parse::{self, classify, token};
 use parse::common::SeqSep;
 use parse::lexer::TokenAndSpan;
 use parse::obsolete::ObsoleteSyntax;
-use parse::token::{self, MatchNt, SubstNt};
 use parse::{new_sub_parser_from_file, ParseSess, Directory, DirectoryOwnership};
 use util::parser::{AssocOp, Fixity};
 use print::pprust;
 use ptr::P;
 use parse::PResult;
-use tokenstream::{self, Delimited, SequenceRepetition, TokenTree};
+use tokenstream::{Delimited, TokenTree};
 use symbol::{Symbol, keywords};
 use util::ThinVec;
 
@@ -168,8 +165,6 @@ pub struct Parser<'a> {
     /// the previous token kind
     prev_token_kind: PrevTokenKind,
     pub restrictions: Restrictions,
-    pub quote_depth: usize, // not (yet) related to the quasiquoter
-    parsing_token_tree: bool,
     /// The set of seen errors about obsolete syntax. Used to suppress
     /// extra detail when the same error is seen twice
     pub obsolete_set: HashSet<ObsoleteSyntax>,
@@ -329,8 +324,6 @@ pub fn new(sess: &'a ParseSess,
             prev_span: syntax_pos::DUMMY_SP,
             prev_token_kind: PrevTokenKind::Other,
             restrictions: Restrictions::empty(),
-            quote_depth: 0,
-            parsing_token_tree: false,
             obsolete_set: HashSet::new(),
             directory: Directory { path: PathBuf::new(), ownership: DirectoryOwnership::Owned },
             root_module_name: None,
@@ -359,20 +352,11 @@ fn next_tok(&mut self) -> TokenAndSpan {
                 if i + 1 < tts.len() {
                     self.tts.push((tts, i + 1));
                 }
-                // FIXME(jseyfried): remove after fixing #39390 in #39419.
-                if self.quote_depth > 0 {
-                    if let TokenTree::Sequence(sp, _) = tt {
-                        self.span_err(sp, "attempted to repeat an expression containing no \
-                                           syntax variables matched as repeating at this depth");
-                    }
-                }
-                match tt {
-                    TokenTree::Token(sp, tok) => TokenAndSpan { tok: tok, sp: sp },
-                    _ if tt.len() > 0 => {
-                        self.tts.push((tt, 0));
-                        continue
-                    }
-                    _ => continue,
+                if let TokenTree::Token(sp, tok) = tt {
+                    TokenAndSpan { tok: tok, sp: sp }
+                } else {
+                    self.tts.push((tt, 0));
+                    continue
                 }
             } else {
                 TokenAndSpan { tok: token::Eof, sp: self.span }
@@ -997,7 +981,6 @@ pub fn look_ahead<R, F>(&mut self, dist: usize, f: F) -> R where
                 tok = match tts.get_tt(i) {
                     TokenTree::Token(_, tok) => tok,
                     TokenTree::Delimited(_, delimited) => token::OpenDelim(delimited.delim),
-                    TokenTree::Sequence(..) => token::Dollar,
                 };
             }
         }
@@ -1187,10 +1170,7 @@ pub fn parse_trait_item(&mut self) -> PResult<'a, TraitItem> {
             self.expect(&token::Not)?;
 
             // eat a matched-delimiter token tree:
-            let delim = self.expect_open_delim()?;
-            let tts = self.parse_seq_to_end(&token::CloseDelim(delim),
-                                            SeqSep::none(),
-                                            |pp| pp.parse_token_tree())?;
+            let (delim, tts) = self.expect_delimited_token_tree()?;
             if delim != token::Brace {
                 self.expect(&token::Semi)?
             }
@@ -1448,10 +1428,7 @@ pub fn parse_ty_no_plus(&mut self) -> PResult<'a, P<Ty>> {
             let path = self.parse_path(PathStyle::Type)?;
             if self.eat(&token::Not) {
                 // MACRO INVOCATION
-                let delim = self.expect_open_delim()?;
-                let tts = self.parse_seq_to_end(&token::CloseDelim(delim),
-                                                SeqSep::none(),
-                                                |p| p.parse_token_tree())?;
+                let (_, tts) = self.expect_delimited_token_tree()?;
                 let hi = self.span.hi;
                 TyKind::Mac(spanned(lo, hi, Mac_ { path: path, tts: tts }))
             } else {
@@ -2045,13 +2022,12 @@ pub fn mk_lit_u32(&mut self, i: u32, attrs: ThinVec<Attribute>) -> P<Expr> {
         })
     }
 
-    fn expect_open_delim(&mut self) -> PResult<'a, token::DelimToken> {
-        self.expected_tokens.push(TokenType::Token(token::Gt));
+    fn expect_delimited_token_tree(&mut self) -> PResult<'a, (token::DelimToken, Vec<TokenTree>)> {
         match self.token {
-            token::OpenDelim(delim) => {
-                self.bump();
-                Ok(delim)
-            },
+            token::OpenDelim(delim) => self.parse_token_tree().map(|tree| match tree {
+                TokenTree::Delimited(_, delimited) => (delim, delimited.tts.clone()),
+                _ => unreachable!(),
+            }),
             _ => Err(self.fatal("expected open delimiter")),
         }
     }
@@ -2261,10 +2237,7 @@ fn parse_bottom_expr(&mut self) -> PResult<'a, P<Expr>> {
                     // `!`, as an operator, is prefix, so we know this isn't that
                     if self.eat(&token::Not) {
                         // MACRO INVOCATION expression
-                        let delim = self.expect_open_delim()?;
-                        let tts = self.parse_seq_to_end(&token::CloseDelim(delim),
-                                                        SeqSep::none(),
-                                                        |p| p.parse_token_tree())?;
+                        let (_, tts) = self.expect_delimited_token_tree()?;
                         let hi = self.prev_span.hi;
                         return Ok(self.mk_mac_expr(lo, hi, Mac_ { path: pth, tts: tts }, attrs));
                     }
@@ -2586,139 +2559,22 @@ fn parse_dot_or_call_expr_with_(&mut self, e0: P<Expr>, lo: BytePos) -> PResult<
         return Ok(e);
     }
 
-    // Parse unquoted tokens after a `$` in a token tree
-    fn parse_unquoted(&mut self) -> PResult<'a, TokenTree> {
-        let mut sp = self.span;
-        let name = match self.token {
-            token::Dollar => {
-                self.bump();
-
-                if self.token == token::OpenDelim(token::Paren) {
-                    let Spanned { node: seq, span: seq_span } = self.parse_seq(
-                        &token::OpenDelim(token::Paren),
-                        &token::CloseDelim(token::Paren),
-                        SeqSep::none(),
-                        |p| p.parse_token_tree()
-                    )?;
-                    let (sep, repeat) = self.parse_sep_and_kleene_op()?;
-                    let name_num = macro_parser::count_names(&seq);
-                    return Ok(TokenTree::Sequence(mk_sp(sp.lo, seq_span.hi),
-                                      Rc::new(SequenceRepetition {
-                                          tts: seq,
-                                          separator: sep,
-                                          op: repeat,
-                                          num_captures: name_num
-                                      })));
-                } else if self.token.is_keyword(keywords::Crate) {
-                    let ident = match self.token {
-                        token::Ident(id) => ast::Ident { name: Symbol::intern("$crate"), ..id },
-                        _ => unreachable!(),
-                    };
-                    self.bump();
-                    return Ok(TokenTree::Token(sp, token::Ident(ident)));
-                } else {
-                    sp = mk_sp(sp.lo, self.span.hi);
-                    self.parse_ident().unwrap_or_else(|mut e| {
-                        e.emit();
-                        keywords::Invalid.ident()
-                    })
-                }
-            }
-            token::SubstNt(name) => {
-                self.bump();
-                name
-            }
-            _ => unreachable!()
-        };
-        // continue by trying to parse the `:ident` after `$name`
-        if self.token == token::Colon &&
-                self.look_ahead(1, |t| t.is_ident() && !t.is_any_keyword()) {
-            self.bump();
-            sp = mk_sp(sp.lo, self.span.hi);
-            let nt_kind = self.parse_ident()?;
-            Ok(TokenTree::Token(sp, MatchNt(name, nt_kind)))
-        } else {
-            Ok(TokenTree::Token(sp, SubstNt(name)))
-        }
-    }
-
     pub fn check_unknown_macro_variable(&mut self) {
-        if self.quote_depth == 0 && !self.parsing_token_tree {
-            match self.token {
-                token::SubstNt(name) =>
-                    self.fatal(&format!("unknown macro variable `{}`", name)).emit(),
-                _ => {}
-            }
-        }
-    }
-
-    /// Parse an optional separator followed by a Kleene-style
-    /// repetition token (+ or *).
-    pub fn parse_sep_and_kleene_op(&mut self)
-                                   -> PResult<'a, (Option<token::Token>, tokenstream::KleeneOp)> {
-        fn parse_kleene_op<'a>(parser: &mut Parser<'a>) ->
-          PResult<'a,  Option<tokenstream::KleeneOp>> {
-            match parser.token {
-                token::BinOp(token::Star) => {
-                    parser.bump();
-                    Ok(Some(tokenstream::KleeneOp::ZeroOrMore))
-                },
-                token::BinOp(token::Plus) => {
-                    parser.bump();
-                    Ok(Some(tokenstream::KleeneOp::OneOrMore))
-                },
-                _ => Ok(None)
-            }
-        };
-
-        if let Some(kleene_op) = parse_kleene_op(self)? {
-            return Ok((None, kleene_op));
-        }
-
-        let separator = match self.token {
-            token::CloseDelim(..) => None,
-            _ => Some(self.bump_and_get()),
-        };
-        match parse_kleene_op(self)? {
-            Some(zerok) => Ok((separator, zerok)),
-            None => return Err(self.fatal("expected `*` or `+`"))
+        if let token::SubstNt(name) = self.token {
+            self.fatal(&format!("unknown macro variable `{}`", name)).emit()
         }
     }
 
     /// parse a single token tree from the input.
     pub fn parse_token_tree(&mut self) -> PResult<'a, TokenTree> {
-        // FIXME #6994: currently, this is too eager. It
-        // parses token trees but also identifies TokenType::Sequence's
-        // and token::SubstNt's; it's too early to know yet
-        // whether something will be a nonterminal or a seq
-        // yet.
         match self.token {
-            token::OpenDelim(delim) => {
-                if self.quote_depth == 0 && self.tts.last().map(|&(_, i)| i == 1).unwrap_or(false) {
-                    let tt = self.tts.pop().unwrap().0;
-                    self.bump();
-                    return Ok(tt);
-                }
-
-                let parsing_token_tree = ::std::mem::replace(&mut self.parsing_token_tree, true);
-                let lo = self.span.lo;
-                self.bump();
-                let tts = self.parse_seq_to_before_tokens(&[&token::CloseDelim(token::Brace),
-                                                            &token::CloseDelim(token::Paren),
-                                                            &token::CloseDelim(token::Bracket)],
-                                                          SeqSep::none(),
-                                                          |p| p.parse_token_tree(),
-                                                          |mut e| e.emit());
-                self.parsing_token_tree = parsing_token_tree;
+            token::OpenDelim(..) => {
+                let tt = self.tts.pop().unwrap().0;
+                self.span = tt.span();
                 self.bump();
-
-                Ok(TokenTree::Delimited(Span { lo: lo, ..self.prev_span }, Rc::new(Delimited {
-                    delim: delim,
-                    tts: tts,
-                })))
+                return Ok(tt);
             },
-            token::CloseDelim(..) | token::Eof => Ok(TokenTree::Token(self.span, token::Eof)),
-            token::Dollar | token::SubstNt(..) if self.quote_depth > 0 => self.parse_unquoted(),
+            token::CloseDelim(_) | token::Eof => unreachable!(),
             _ => Ok(TokenTree::Token(self.span, self.bump_and_get())),
         }
     }
@@ -3528,10 +3384,7 @@ pub fn parse_pat(&mut self) -> PResult<'a, P<Pat>> {
                     token::Not if qself.is_none() => {
                         // Parse macro invocation
                         self.bump();
-                        let delim = self.expect_open_delim()?;
-                        let tts = self.parse_seq_to_end(&token::CloseDelim(delim),
-                                                        SeqSep::none(),
-                                                        |p| p.parse_token_tree())?;
+                        let (_, tts) = self.expect_delimited_token_tree()?;
                         let mac = spanned(lo, self.prev_span.hi, Mac_ { path: path, tts: tts });
                         pat = PatKind::Mac(mac);
                     }
@@ -3831,12 +3684,7 @@ fn parse_stmt_without_recovery(&mut self,
                 },
             };
 
-            let tts = self.parse_unspanned_seq(
-                &token::OpenDelim(delim),
-                &token::CloseDelim(delim),
-                SeqSep::none(),
-                |p| p.parse_token_tree()
-            )?;
+            let (_, tts) = self.expect_delimited_token_tree()?;
             let hi = self.prev_span.hi;
 
             let style = if delim == token::Brace {
@@ -4744,10 +4592,7 @@ fn parse_impl_method(&mut self, vis: &Visibility)
             self.expect(&token::Not)?;
 
             // eat a matched-delimiter token tree:
-            let delim = self.expect_open_delim()?;
-            let tts = self.parse_seq_to_end(&token::CloseDelim(delim),
-                                            SeqSep::none(),
-                                            |p| p.parse_token_tree())?;
+            let (delim, tts) = self.expect_delimited_token_tree()?;
             if delim != token::Brace {
                 self.expect(&token::Semi)?
             }
@@ -5893,10 +5738,7 @@ fn parse_macro_use_or_failure(
                 keywords::Invalid.ident() // no special identifier
             };
             // eat a matched-delimiter token tree:
-            let delim = self.expect_open_delim()?;
-            let tts = self.parse_seq_to_end(&token::CloseDelim(delim),
-                                            SeqSep::none(),
-                                            |p| p.parse_token_tree())?;
+            let (delim, tts) = self.expect_delimited_token_tree()?;
             if delim != token::Brace {
                 if !self.eat(&token::Semi) {
                     let prev_span = self.prev_span;
index 0f0c6d0ca83f5f01a333de4f542a9755f7966786..5b65aac92b81c2d28ad6efe19d6ba38e1d0c59bc 100644 (file)
@@ -50,8 +50,8 @@ pub enum DelimToken {
 }
 
 impl DelimToken {
-    pub fn len(&self) -> u32 {
-        if *self == NoDelim { 0 } else { 1 }
+    pub fn len(self) -> usize {
+        if self == NoDelim { 0 } else { 1 }
     }
 }
 
@@ -152,9 +152,6 @@ pub enum Token {
     // Can be expanded into several tokens.
     /// Doc comment
     DocComment(ast::Name),
-    // In left-hand-sides of MBE macros:
-    /// Parse a nonterminal (name to bind, name of NT)
-    MatchNt(ast::Ident, ast::Ident),
     // In right-hand-sides of MBE macros:
     /// A syntactic variable that will be filled in by macro expansion.
     SubstNt(ast::Ident),
index f8f1820d0b97ea47f3f566e49d877c8c13c54ae8..ec962d03458d1753c905e7cffd90269efb092f7d 100644 (file)
@@ -271,7 +271,6 @@ pub fn token_to_string(tok: &Token) -> String {
         /* Other */
         token::DocComment(s)        => s.to_string(),
         token::SubstNt(s)           => format!("${}", s),
-        token::MatchNt(s, t)        => format!("${}:{}", s, t),
         token::Eof                  => "<eof>".to_string(),
         token::Whitespace           => " ".to_string(),
         token::Comment              => "/* */".to_string(),
@@ -1475,20 +1474,6 @@ pub fn print_tt(&mut self, tt: &tokenstream::TokenTree) -> io::Result<()> {
                 space(&mut self.s)?;
                 word(&mut self.s, &token_to_string(&delimed.close_token()))
             },
-            TokenTree::Sequence(_, ref seq) => {
-                word(&mut self.s, "$(")?;
-                for tt_elt in &seq.tts {
-                    self.print_tt(tt_elt)?;
-                }
-                word(&mut self.s, ")")?;
-                if let Some(ref tk) = seq.separator {
-                    word(&mut self.s, &token_to_string(tk))?;
-                }
-                match seq.op {
-                    tokenstream::KleeneOp::ZeroOrMore => word(&mut self.s, "*"),
-                    tokenstream::KleeneOp::OneOrMore => word(&mut self.s, "+"),
-                }
-            }
         }
     }
 
index 86b0fcebeb21ecf0d37548bf007e12ea3d4c378b..666540467213326d13bc18a0ac7a2691793897e6 100644 (file)
@@ -12,9 +12,7 @@
 //!
 //! TokenStreams represent syntactic objects before they are converted into ASTs.
 //! A `TokenStream` is, roughly speaking, a sequence (eg stream) of `TokenTree`s,
-//! which are themselves either a single Token, a Delimited subsequence of tokens,
-//! or a SequenceRepetition specifier (for the purpose of sequence generation during macro
-//! expansion).
+//! which are themselves a single `Token` or a `Delimited` subsequence of tokens.
 //!
 //! ## Ownership
 //! TokenStreams are persistent data structures constructed as ropes with reference
 use syntax_pos::{BytePos, Span, DUMMY_SP};
 use codemap::Spanned;
 use ext::base;
-use ext::tt::macro_parser;
+use ext::tt::{macro_parser, quoted};
 use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
 use parse::{self, Directory};
-use parse::token::{self, Token, Lit, Nonterminal};
+use parse::token::{self, Token, Lit};
 use print::pprust;
 use serialize::{Decoder, Decodable, Encoder, Encodable};
 use symbol::Symbol;
@@ -64,7 +62,7 @@ pub fn close_token(&self) -> token::Token {
     pub fn open_tt(&self, span: Span) -> TokenTree {
         let open_span = match span {
             DUMMY_SP => DUMMY_SP,
-            _ => Span { hi: span.lo + BytePos(self.delim.len()), ..span },
+            _ => Span { hi: span.lo + BytePos(self.delim.len() as u32), ..span },
         };
         TokenTree::Token(open_span, self.open_token())
     }
@@ -73,7 +71,7 @@ pub fn open_tt(&self, span: Span) -> TokenTree {
     pub fn close_tt(&self, span: Span) -> TokenTree {
         let close_span = match span {
             DUMMY_SP => DUMMY_SP,
-            _ => Span { lo: span.hi - BytePos(self.delim.len()), ..span },
+            _ => Span { lo: span.hi - BytePos(self.delim.len() as u32), ..span },
         };
         TokenTree::Token(close_span, self.close_token())
     }
@@ -84,27 +82,6 @@ pub fn subtrees(&self) -> &[TokenTree] {
     }
 }
 
-/// A sequence of token trees
-#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
-pub struct SequenceRepetition {
-    /// The sequence of token trees
-    pub tts: Vec<TokenTree>,
-    /// The optional separator
-    pub separator: Option<token::Token>,
-    /// Whether the sequence can be repeated zero (*), or one or more times (+)
-    pub op: KleeneOp,
-    /// The number of `MatchNt`s that appear in the sequence (and subsequences)
-    pub num_captures: usize,
-}
-
-/// A Kleene-style [repetition operator](http://en.wikipedia.org/wiki/Kleene_star)
-/// for token sequences.
-#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
-pub enum KleeneOp {
-    ZeroOrMore,
-    OneOrMore,
-}
-
 /// When the main rust parser encounters a syntax-extension invocation, it
 /// parses the arguments to the invocation as a token-tree. This is a very
 /// loose structure, such that all sorts of different AST-fragments can
@@ -123,10 +100,6 @@ pub enum TokenTree {
     Token(Span, token::Token),
     /// A delimited sequence of token trees
     Delimited(Span, Rc<Delimited>),
-
-    // This only makes sense in MBE macros.
-    /// A kleene-style repetition sequence with a span
-    Sequence(Span, Rc<SequenceRepetition>),
 }
 
 impl TokenTree {
@@ -138,15 +111,10 @@ pub fn len(&self) -> usize {
                     AttrStyle::Inner => 3,
                 }
             }
-            TokenTree::Token(_, token::Interpolated(ref nt)) => {
-                if let Nonterminal::NtTT(..) = **nt { 1 } else { 0 }
-            },
-            TokenTree::Token(_, token::MatchNt(..)) => 3,
             TokenTree::Delimited(_, ref delimed) => match delimed.delim {
                 token::NoDelim => delimed.tts.len(),
                 _ => delimed.tts.len() + 2,
             },
-            TokenTree::Sequence(_, ref seq) => seq.tts.len(),
             TokenTree::Token(..) => 0,
         }
     }
@@ -197,30 +165,12 @@ pub fn get_tt(&self, index: usize) -> TokenTree {
                 }
                 delimed.tts[index - 1].clone()
             }
-            (&TokenTree::Token(sp, token::MatchNt(name, kind)), _) => {
-                let v = [TokenTree::Token(sp, token::SubstNt(name)),
-                         TokenTree::Token(sp, token::Colon),
-                         TokenTree::Token(sp, token::Ident(kind))];
-                v[index].clone()
-            }
-            (&TokenTree::Sequence(_, ref seq), _) => seq.tts[index].clone(),
             _ => panic!("Cannot expand a token tree"),
         }
     }
 
-    /// Returns the `Span` corresponding to this token tree.
-    pub fn get_span(&self) -> Span {
-        match *self {
-            TokenTree::Token(span, _) => span,
-            TokenTree::Delimited(span, _) => span,
-            TokenTree::Sequence(span, _) => span,
-        }
-    }
-
     /// Use this token tree as a matcher to parse given tts.
-    pub fn parse(cx: &base::ExtCtxt,
-                 mtch: &[TokenTree],
-                 tts: &[TokenTree])
+    pub fn parse(cx: &base::ExtCtxt, mtch: &[quoted::TokenTree], tts: &[TokenTree])
                  -> macro_parser::NamedParseResult {
         // `None` is because we're not interpolating
         let directory = Directory {
@@ -252,9 +202,7 @@ pub fn eq_unspanned(&self, other: &TokenTree) -> bool {
     /// Retrieve the TokenTree's span.
     pub fn span(&self) -> Span {
         match *self {
-            TokenTree::Token(sp, _) |
-            TokenTree::Delimited(sp, _) |
-            TokenTree::Sequence(sp, _) => sp,
+            TokenTree::Token(sp, _) | TokenTree::Delimited(sp, _) => sp,
         }
     }
 
index 7533171b08556df082f2fcabfd27dbcb11e3e3d0..f92cde4019f67b06e9147c6264badd7e4e41cfac 100644 (file)
@@ -79,7 +79,6 @@ macro_rules! register {
             quote_pat: expand_quote_pat,
             quote_arm: expand_quote_arm,
             quote_stmt: expand_quote_stmt,
-            quote_matcher: expand_quote_matcher,
             quote_attr: expand_quote_attr,
             quote_arg: expand_quote_arg,
             quote_block: expand_quote_block,
index 726af9864b48215022022f436320d276c500a516..63e1c6f16b3e6be4b8ac4727833ef6f6cf182d9c 100644 (file)
@@ -54,8 +54,6 @@ pub fn main() {
     //~^ ERROR cannot find macro `quote_arm!` in this scope
     let x = quote_stmt!(ecx, 3);
     //~^ ERROR cannot find macro `quote_stmt!` in this scope
-    let x = quote_matcher!(ecx, 3);
-    //~^ ERROR cannot find macro `quote_matcher!` in this scope
     let x = quote_attr!(ecx, 3);
     //~^ ERROR cannot find macro `quote_attr!` in this scope
     let x = quote_arg!(ecx, 3);
diff --git a/src/test/compile-fail/feature-gate-cfg-target-has-atomic.rs b/src/test/compile-fail/feature-gate-cfg-target-has-atomic.rs
new file mode 100644 (file)
index 0000000..aa27f89
--- /dev/null
@@ -0,0 +1,86 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![crate_type="rlib"]
+#![no_core]
+
+extern "rust-intrinsic" {
+    fn atomic_xadd<T>(dst: *mut T, src: T) -> T;
+}
+
+#[lang = "sized"]
+trait Sized {}
+#[lang = "copy"]
+trait Copy {}
+
+#[cfg(target_has_atomic = "8")]
+//~^ ERROR `cfg(target_has_atomic)` is experimental and subject to change (see issue #32976)
+pub unsafe fn atomic_u8(x: *mut u8) {
+    atomic_xadd(x, 1);
+    atomic_xadd(x, 1);
+}
+#[cfg(target_has_atomic = "8")]
+//~^ ERROR `cfg(target_has_atomic)` is experimental and subject to change (see issue #32976)
+pub unsafe fn atomic_i8(x: *mut i8) {
+    atomic_xadd(x, 1);
+}
+#[cfg(target_has_atomic = "16")]
+//~^ ERROR `cfg(target_has_atomic)` is experimental and subject to change (see issue #32976)
+pub unsafe fn atomic_u16(x: *mut u16) {
+    atomic_xadd(x, 1);
+}
+#[cfg(target_has_atomic = "16")]
+//~^ ERROR `cfg(target_has_atomic)` is experimental and subject to change (see issue #32976)
+pub unsafe fn atomic_i16(x: *mut i16) {
+    atomic_xadd(x, 1);
+}
+#[cfg(target_has_atomic = "32")]
+//~^ ERROR `cfg(target_has_atomic)` is experimental and subject to change (see issue #32976)
+pub unsafe fn atomic_u32(x: *mut u32) {
+    atomic_xadd(x, 1);
+}
+#[cfg(target_has_atomic = "32")]
+//~^ ERROR `cfg(target_has_atomic)` is experimental and subject to change (see issue #32976)
+pub unsafe fn atomic_i32(x: *mut i32) {
+    atomic_xadd(x, 1);
+}
+#[cfg(target_has_atomic = "64")]
+//~^ ERROR `cfg(target_has_atomic)` is experimental and subject to change (see issue #32976)
+pub unsafe fn atomic_u64(x: *mut u64) {
+    atomic_xadd(x, 1);
+}
+#[cfg(target_has_atomic = "64")]
+//~^ ERROR `cfg(target_has_atomic)` is experimental and subject to change (see issue #32976)
+pub unsafe fn atomic_i64(x: *mut i64) {
+    atomic_xadd(x, 1);
+}
+#[cfg(target_has_atomic = "ptr")]
+//~^ ERROR `cfg(target_has_atomic)` is experimental and subject to change (see issue #32976)
+pub unsafe fn atomic_usize(x: *mut usize) {
+    atomic_xadd(x, 1);
+}
+#[cfg(target_has_atomic = "ptr")]
+//~^ ERROR `cfg(target_has_atomic)` is experimental and subject to change (see issue #32976)
+pub unsafe fn atomic_isize(x: *mut isize) {
+    atomic_xadd(x, 1);
+}
+
+fn main() {
+    cfg!(target_has_atomic = "8");
+    //~^ ERROR `cfg(target_has_atomic)` is experimental and subject to change (see issue #32976)
+    cfg!(target_has_atomic = "16");
+    //~^ ERROR `cfg(target_has_atomic)` is experimental and subject to change (see issue #32976)
+    cfg!(target_has_atomic = "32");
+    //~^ ERROR `cfg(target_has_atomic)` is experimental and subject to change (see issue #32976)
+    cfg!(target_has_atomic = "64");
+    //~^ ERROR `cfg(target_has_atomic)` is experimental and subject to change (see issue #32976)
+    cfg!(target_has_atomic = "ptr");
+    //~^ ERROR `cfg(target_has_atomic)` is experimental and subject to change (see issue #32976)
+}
diff --git a/src/test/compile-fail/feature-gate-unboxed-closures.rs b/src/test/compile-fail/feature-gate-unboxed-closures.rs
new file mode 100644 (file)
index 0000000..4005021
--- /dev/null
@@ -0,0 +1,24 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+struct Test;
+
+impl FnOnce<(u32, u32)> for Test {
+    type Output = u32;
+
+    extern "rust-call" fn call_once(self, (a, b): (u32, u32)) -> u32 {
+        a + b
+    }
+    //~^^^ ERROR rust-call ABI is subject to change (see issue #29625)
+}
+
+fn main() {
+    assert_eq!(Test(1u32, 2u32), 3u32);
+}
index d890d02a910478f978cdca71137bbd28ef05bedc..5f54f269c6c55694fb7596043f44abf457c81683 100644 (file)
@@ -8,9 +8,8 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-macro_rules! m { ($t:tt) => { $t } }
+macro_rules! m { ($($t:tt)*) => { $($t)* } }
 
 fn main() {
-    m!($t); //~ ERROR unknown macro variable
-            //~| ERROR expected expression
+    m!($t); //~ ERROR expected expression
 }
diff --git a/src/test/compile-fail/issue-39404.rs b/src/test/compile-fail/issue-39404.rs
new file mode 100644 (file)
index 0000000..0168ae7
--- /dev/null
@@ -0,0 +1,18 @@
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![deny(missing_fragment_specifier)] //~ NOTE lint level defined here
+
+macro_rules! m { ($i) => {} }
+//~^ ERROR missing fragment specifier
+//~| WARN previously accepted
+//~| NOTE issue #40107
+
+fn main() {}
diff --git a/src/test/compile-fail/issue-39709.rs b/src/test/compile-fail/issue-39709.rs
deleted file mode 100644 (file)
index 0f66fe8..0000000
+++ /dev/null
@@ -1,15 +0,0 @@
-// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-fn main() {
-    println!("{}", { macro_rules! x { ($()*) => {} } 33 });
-    //~^ ERROR no syntax variables matched as repeating at this depth
-}
-
index 78f95e365c44b7ef8510c649a35b1fe0ebf0d038..82a5aa487291320cd4bc27dda7c77b30e7a9b459 100644 (file)
@@ -9,7 +9,7 @@
 // except according to those terms.
 
 macro_rules! foo {
-    ($a:expr) => $a; //~ ERROR macro rhs must be delimited
+    ($a:expr) => a; //~ ERROR macro rhs must be delimited
 }
 
 fn main() {
index 969f1500717d73f41cabba776782a741105b204f..7255e7d00b6115c1a60caa9f279a969fb3c9cfe1 100644 (file)
@@ -17,16 +17,5 @@ macro_rules! foo {
 
 foo!(Box);
 
-macro_rules! bar {
-    ($x:tt) => {
-        macro_rules! baz {
-            ($x:tt, $y:tt) => { ($x, $y) }
-        }
-    }
-}
-
 #[rustc_error]
-fn main() { //~ ERROR compilation successful
-    bar!($y);
-    let _: (i8, i16) = baz!(0i8, 0i16);
-}
+fn main() {} //~ ERROR compilation successful
index 5d81e21f05684e1f816522c214a289bc287458b7..0b437be5393edac4aafdf4b3f4b36dc0c57382a2 100644 (file)
@@ -9,7 +9,7 @@
 // except according to those terms.
 
 macro_rules! my_precioooous {
-    $($t:tt)* => (1); //~ ERROR invalid macro matcher
+    t => (1); //~ ERROR invalid macro matcher
 }
 
 fn main() {
index 50bcd53ecb82c55e6777dfc7471ddf535354848b..c3fe1de895df9dcf6ef9e3dfa4bccc596662eecd 100644 (file)
 
 use std::mem;
 
+unsafe fn foo() -> (isize, *const (), Option<fn()>) {
+    let i = mem::transmute(bar);
+    //~^ ERROR is zero-sized and can't be transmuted
+    //~^^ NOTE cast with `as` to a pointer instead
+
+    let p = mem::transmute(foo);
+    //~^ ERROR is zero-sized and can't be transmuted
+    //~^^ NOTE cast with `as` to a pointer instead
+
+    let of = mem::transmute(main);
+    //~^ ERROR is zero-sized and can't be transmuted
+    //~^^ NOTE cast with `as` to a pointer instead
+
+    (i, p, of)
+}
+
 unsafe fn bar() {
-    // Error, still, if the resulting type is not pointer-sized.
+    // Error as usual if the resulting type is not pointer-sized.
     mem::transmute::<_, u8>(main);
     //~^ ERROR transmute called with differently sized types
+    //~^^ NOTE transmuting between 0 bits and 8 bits
+
+    mem::transmute::<_, *mut ()>(foo);
+    //~^ ERROR is zero-sized and can't be transmuted
+    //~^^ NOTE cast with `as` to a pointer instead
+
+    mem::transmute::<_, fn()>(bar);
+    //~^ ERROR is zero-sized and can't be transmuted
+    //~^^ NOTE cast with `as` to a pointer instead
+
+    // No error if a coercion would otherwise occur.
+    mem::transmute::<fn(), usize>(main);
+}
+
+unsafe fn baz() {
+    mem::transmute::<_, *mut ()>(Some(foo));
+    //~^ ERROR is zero-sized and can't be transmuted
+    //~^^ NOTE cast with `as` to a pointer instead
+
+    mem::transmute::<_, fn()>(Some(bar));
+    //~^ ERROR is zero-sized and can't be transmuted
+    //~^^ NOTE cast with `as` to a pointer instead
+
+    mem::transmute::<_, Option<fn()>>(Some(baz));
+    //~^ ERROR is zero-sized and can't be transmuted
+    //~^^ NOTE cast with `as` to a pointer instead
+
+    // No error if a coercion would otherwise occur.
+    mem::transmute::<Option<fn()>, usize>(Some(main));
 }
 
 fn main() {
     unsafe {
+        foo();
         bar();
+        baz();
     }
 }
diff --git a/src/test/compile-fail/transmute-from-fn-item-types-lint.rs b/src/test/compile-fail/transmute-from-fn-item-types-lint.rs
deleted file mode 100644 (file)
index 08e660e..0000000
+++ /dev/null
@@ -1,49 +0,0 @@
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-#![deny(transmute_from_fn_item_types)]
-
-use std::mem;
-
-unsafe fn foo() -> (isize, *const (), Option<fn()>) {
-    let i = mem::transmute(bar);
-    //~^ ERROR is now zero-sized and has to be cast to a pointer before transmuting
-    //~^^ WARNING was previously accepted
-
-    let p = mem::transmute(foo);
-    //~^ ERROR is now zero-sized and has to be cast to a pointer before transmuting
-    //~^^ WARNING was previously accepted
-
-    let of = mem::transmute(main);
-    //~^ ERROR is now zero-sized and has to be cast to a pointer before transmuting
-    //~^^ WARNING was previously accepted
-
-    (i, p, of)
-}
-
-unsafe fn bar() {
-    mem::transmute::<_, *mut ()>(foo);
-    //~^ ERROR is now zero-sized and has to be cast to a pointer before transmuting
-    //~^^ WARNING was previously accepted
-
-    mem::transmute::<_, fn()>(bar);
-    //~^ ERROR is now zero-sized and has to be cast to a pointer before transmuting
-    //~^^ WARNING was previously accepted
-
-    // No error if a coercion would otherwise occur.
-    mem::transmute::<fn(), usize>(main);
-}
-
-fn main() {
-    unsafe {
-        foo();
-        bar();
-    }
-}
index e3c17af82aab403d2eda1ce51af24d796447e050..15d491719a6d58dd8edf2350edd32d3309d98fee 100644 (file)
@@ -12,7 +12,9 @@
 
 macro_rules! foo {
     { $+ } => { //~ ERROR expected identifier, found `+`
+                //~^ ERROR missing fragment specifier
         $(x)(y) //~ ERROR expected `*` or `+`
-                //~^ ERROR no rules expected the token `)`
     }
 }
+
+foo!();
diff --git a/src/test/run-make/fpic/Makefile b/src/test/run-make/fpic/Makefile
new file mode 100644 (file)
index 0000000..6de58c2
--- /dev/null
@@ -0,0 +1,13 @@
+-include ../tools.mk
+
+# Test for #39529.
+# `-z text` causes ld to error if there are any non-PIC sections
+
+ifeq ($(UNAME),Darwin)
+all:
+else ifdef IS_WINDOWS
+all:
+else
+all:
+       $(RUSTC) hello.rs -C link-args=-Wl,-z,text
+endif
diff --git a/src/test/run-make/fpic/hello.rs b/src/test/run-make/fpic/hello.rs
new file mode 100644 (file)
index 0000000..a9e231b
--- /dev/null
@@ -0,0 +1,11 @@
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn main() { }
index 2b3857048f36732acc33f5c524aa8cd6f7fa978c..3db69f2167cc6e4ba7bc5fb89456a7761c532004 100644 (file)
@@ -23,6 +23,7 @@
 use syntax::tokenstream::{TokenTree};
 use syntax::ext::base::{ExtCtxt, MacResult, MacEager};
 use syntax::ext::build::AstBuilder;
+use syntax::ext::tt::quoted;
 use syntax::ext::tt::macro_parser::{MatchedSeq, MatchedNonterminal};
 use syntax::ext::tt::macro_parser::{Success, Failure, Error};
 use syntax::ext::tt::macro_parser::parse_failure_msg;
@@ -33,7 +34,8 @@
 fn expand_mbe_matches(cx: &mut ExtCtxt, _: Span, args: &[TokenTree])
         -> Box<MacResult + 'static> {
 
-    let mbe_matcher = quote_matcher!(cx, $matched:expr, $($pat:pat)|+);
+    let mbe_matcher = quote_tokens!(cx, $$matched:expr, $$($$pat:pat)|+);
+    let mbe_matcher = quoted::parse(&mbe_matcher, true, cx.parse_sess);
     let map = match TokenTree::parse(cx, &mbe_matcher, args) {
         Success(map) => map,
         Failure(_, tok) => {
index 5383b11cf5363d720858fb93248c2d19a4eacea7..822b2c9b93b4a070d921f681fec3c50818b0c8e8 100644 (file)
 #![feature(plugin)]
 #![plugin(procedural_mbe_matching)]
 
-#[no_link]
-extern crate procedural_mbe_matching;
-
 pub fn main() {
-    let abc = 123u32;
     assert_eq!(matches!(Some(123), None | Some(0)), false);
     assert_eq!(matches!(Some(123), None | Some(123)), true);
     assert_eq!(matches!(true, true), true);
diff --git a/src/test/run-pass-fulldeps/proc-macro/auxiliary/issue-39889.rs b/src/test/run-pass-fulldeps/proc-macro/auxiliary/issue-39889.rs
new file mode 100644 (file)
index 0000000..9094310
--- /dev/null
@@ -0,0 +1,27 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// force-host
+// no-prefer-dynamic
+
+#![crate_type = "proc-macro"]
+
+extern crate proc_macro;
+use proc_macro::TokenStream;
+
+#[proc_macro_derive(Issue39889)]
+pub fn f(_input: TokenStream) -> TokenStream {
+    let rules = r#"
+        macro_rules! id {
+            ($($tt:tt)*) => { $($tt)* };
+        }
+    "#;
+    rules.parse().unwrap()
+}
diff --git a/src/test/run-pass-fulldeps/proc-macro/issue-39889.rs b/src/test/run-pass-fulldeps/proc-macro/issue-39889.rs
new file mode 100644 (file)
index 0000000..0561011
--- /dev/null
@@ -0,0 +1,22 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// aux-build:issue-39889.rs
+
+#![feature(proc_macro)]
+#![allow(unused)]
+
+extern crate issue_39889;
+use issue_39889::Issue39889;
+
+#[derive(Issue39889)]
+struct S;
+
+fn main() {}
index 9e9b7ce5bf29d2f80de3631ea1b6da9624991272..8e6a69cb58479e1af384cb19729e1554ca4d12dc 100644 (file)
@@ -37,7 +37,6 @@ fn syntax_extension(cx: &ExtCtxt) {
 
     let _l: P<syntax::ast::Ty> = quote_ty!(cx, &isize);
 
-    let _m: Vec<syntax::tokenstream::TokenTree> = quote_matcher!(cx, $($foo:tt,)* bar);
     let _n: syntax::ast::Attribute = quote_attr!(cx, #![cfg(foo, bar = "baz")]);
 
     let _o: Option<P<syntax::ast::Item>> = quote_item!(cx, fn foo<T: ?Sized>() {});
diff --git a/src/test/run-pass/enum-layout-optimization.rs b/src/test/run-pass/enum-layout-optimization.rs
new file mode 100644 (file)
index 0000000..a562761
--- /dev/null
@@ -0,0 +1,59 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Test that we will do various size optimizations to enum layout, but
+// *not* if `#[repr(u8)]` or `#[repr(C)]` is passed. See also #40029.
+
+#![allow(dead_code)]
+
+use std::mem;
+
+enum Nullable<T> {
+    Alive(T),
+    Dropped,
+}
+
+#[repr(u8)]
+enum NullableU8<T> {
+    Alive(T),
+    Dropped,
+}
+
+#[repr(C)]
+enum NullableC<T> {
+    Alive(T),
+    Dropped,
+}
+
+struct StructNewtype<T>(T);
+
+#[repr(C)]
+struct StructNewtypeC<T>(T);
+
+enum EnumNewtype<T> { Variant(T) }
+
+#[repr(u8)]
+enum EnumNewtypeU8<T> { Variant(T) }
+
+#[repr(C)]
+enum EnumNewtypeC<T> { Variant(T) }
+
+fn main() {
+    assert!(mem::size_of::<Box<i32>>() == mem::size_of::<Nullable<Box<i32>>>());
+    assert!(mem::size_of::<Box<i32>>() < mem::size_of::<NullableU8<Box<i32>>>());
+    assert!(mem::size_of::<Box<i32>>() < mem::size_of::<NullableC<Box<i32>>>());
+
+    assert!(mem::size_of::<i32>() == mem::size_of::<StructNewtype<i32>>());
+    assert!(mem::size_of::<i32>() == mem::size_of::<StructNewtypeC<i32>>());
+
+    assert!(mem::size_of::<i32>() == mem::size_of::<EnumNewtype<i32>>());
+    assert!(mem::size_of::<i32>() < mem::size_of::<EnumNewtypeU8<i32>>());
+    assert!(mem::size_of::<i32>() < mem::size_of::<EnumNewtypeC<i32>>());
+}
diff --git a/src/test/run-pass/issue-39709.rs b/src/test/run-pass/issue-39709.rs
new file mode 100644 (file)
index 0000000..ebca931
--- /dev/null
@@ -0,0 +1,14 @@
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn main() {
+    println!("{}", { macro_rules! x { ($(t:tt)*) => {} } 33 });
+}
+
diff --git a/src/test/run-pass/transmute-from-fn-item-types.rs b/src/test/run-pass/transmute-from-fn-item-types.rs
deleted file mode 100644 (file)
index 574a90e..0000000
+++ /dev/null
@@ -1,27 +0,0 @@
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-#![allow(transmute_from_fn_item_types)]
-
-use std::mem;
-
-fn main() {
-    unsafe {
-        let u = mem::transmute(main);
-        let p = mem::transmute(main);
-        let f = mem::transmute(main);
-        let tuple: (usize, *mut (), fn()) = (u, p, f);
-        assert_eq!(mem::transmute::<_, [usize; 3]>(tuple), [main as usize; 3]);
-
-        mem::transmute::<_, usize>(main);
-        mem::transmute::<_, *mut ()>(main);
-        mem::transmute::<_, fn()>(main);
-    }
-}
index 8d3f9b59bb2eebe0e25cf48f66bb659b267675e5..d4119f5d351c16fecae2c652bae61a8282b16bc1 100644 (file)
 pub trait Foo {
     // @has assoc_consts/trait.Foo.html '//*[@class="rust trait"]' \
     //      'const FOO: usize;'
-    // @has - '//*[@id="associatedconstant.FOO"]' 'const FOO'
-    const FOO: usize;
+    // @has - '//*[@id="associatedconstant.FOO"]' 'const FOO: usize'
+    // @has - '//*[@class="docblock"]' 'FOO: usize = 12'
+    const FOO: usize = 12;
 }
 
 pub struct Bar;
 
 impl Bar {
     // @has assoc_consts/struct.Bar.html '//*[@id="associatedconstant.BAR"]' \
-    //      'const BAR: usize = 3'
+    //      'const BAR: usize'
+    // @has - '//*[@class="docblock"]' 'BAR: usize = 3'
     pub const BAR: usize = 3;
 }
index fe40ce2bd7ddbfd0b494b0f45ea02ca274f091a8..e82e93230aa07bf587a993254928cffffbd15491 100644 (file)
 #![unstable(feature="test", issue="27759")]
 
 // @has issue_27759/unstable/index.html
-// @has - '<code>test</code>'
+// @has - '<code>test </code>'
 // @has - '<a href="http://issue_url/27759">#27759</a>'
 #[unstable(feature="test", issue="27759")]
 pub mod unstable {
     // @has issue_27759/unstable/fn.issue.html
-    // @has - '<code>test_function</code>'
+    // @has - '<code>test_function </code>'
     // @has - '<a href="http://issue_url/1234567890">#1234567890</a>'
     #[unstable(feature="test_function", issue="1234567890")]
     pub fn issue() {}
index 0db92a491ed180d18b654c76994ae97f0b309a8a..493c08693e94a6d1e317f5d69000dff1d4792780 100644 (file)
@@ -16,7 +16,8 @@ pub trait Bar {
     // @has - '//*[@id="associatedtype.Bar"]' 'type Bar = ()'
     // @has - '//*[@href="#associatedtype.Bar"]' 'Bar'
     type Bar = ();
-    // @has - '//*[@id="associatedconstant.Baz"]' 'const Baz: usize = 7'
+    // @has - '//*[@id="associatedconstant.Baz"]' 'const Baz: usize'
+    // @has - '//*[@class="docblock"]' 'Baz: usize = 7'
     // @has - '//*[@href="#associatedconstant.Baz"]' 'Baz'
     const Baz: usize = 7;
     // @has - '//*[@id="tymethod.bar"]' 'fn bar'
index 5cca3708292012159fd47263cd54d9f72cc19346..6d1f8bc1cf9bac27ccfcd6421b6dceea06d9e32c 100644 (file)
 
 // @has issue_32374/struct.T.html '//*[@class="stab deprecated"]' \
 //      'Deprecated since 1.0.0: text'
-// @has - '<code>test</code>'
+// @has - '<code>test </code>'
 // @has - '<a href="http://issue_url/32374">#32374</a>'
 // @matches issue_32374/struct.T.html '//*[@class="stab unstable"]' \
-//      '🔬 This is a nightly-only experimental API.   \(test #32374\)$'
+//      '🔬 This is a nightly-only experimental API.  \(test #32374\)$'
 #[rustc_deprecated(since = "1.0.0", reason = "text")]
 #[unstable(feature = "test", issue = "32374")]
 pub struct T;
 // @has issue_32374/struct.U.html '//*[@class="stab deprecated"]' \
 //      'Deprecated since 1.0.0: deprecated'
 // @has issue_32374/struct.U.html '//*[@class="stab unstable"]' \
-//      '🔬 This is a nightly-only experimental API.  (test #32374)'
+//      '🔬 This is a nightly-only experimental API. (test #32374)'
 // @has issue_32374/struct.U.html '//details' \
-//      '🔬 This is a nightly-only experimental API.  (test #32374)'
+//      '🔬 This is a nightly-only experimental API. (test #32374)'
 // @has issue_32374/struct.U.html '//summary' \
-//      '🔬 This is a nightly-only experimental API.  (test #32374)'
+//      '🔬 This is a nightly-only experimental API. (test #32374)'
 // @has issue_32374/struct.U.html '//details/p' \
 //      'unstable'
 #[rustc_deprecated(since = "1.0.0", reason = "deprecated")]
index c6da6b0575b87c3bef842f051525a3ae3df97920..a34ee908ef295adff271c5ef0aa09044fc29ac78 100644 (file)
@@ -28,18 +28,40 @@ pub trait T<X> {
             fn ignore(_: &X) {}
             const C: X;
             // @has issue_33302/trait.T.html \
-            //        '//*[@class="rust trait"]' 'const D: i32 = 4 * 4;'
-            // @has - '//*[@id="associatedconstant.D"]' 'const D: i32 = 4 * 4'
+            //        '//*[@class="rust trait"]' 'const D: i32'
+            // @has - '//*[@class="docblock"]' 'D: i32 = 4 * 4'
+            // @has - '//*[@id="associatedconstant.D"]' 'const D: i32'
             const D: i32 = ($n * $n);
         }
 
         // @has issue_33302/struct.S.html \
         //        '//h3[@class="impl"]' 'impl T<[i32; 16]> for S'
-        // @has - '//*[@id="associatedconstant.C"]' 'const C: [i32; 16] = [0; 4 * 4]'
-        // @has - '//*[@id="associatedconstant.D"]' 'const D: i32 = 4 * 4'
+        // @has - '//*[@id="associatedconstant.C"]' 'const C: [i32; 16]'
+        // @has - '//*[@id="associatedconstant.D"]' 'const D: i32'
+        // @has - '//*[@class="docblock"]' 'C: [i32; 16] = [0; 4 * 4]'
         impl T<[i32; ($n * $n)]> for S {
             const C: [i32; ($n * $n)] = [0; ($n * $n)];
         }
+
+        // @has issue_33302/struct.S.html \
+        //        '//h3[@class="impl"]' 'impl T<[i32; 16]> for S'
+        // @has - '//*[@id="associatedconstant.C-1"]' 'const C: (i32,)'
+        // @has - '//*[@id="associatedconstant.D-1"]' 'const D: i32'
+        // @has - '//*[@class="docblock"]' 'C: (i32,) = (4,)'
+        impl T<(i32,)> for S {
+            const C: (i32,) = ($n,);
+        }
+
+        // @has issue_33302/struct.S.html \
+        //        '//h3[@class="impl"]' 'impl T<(i32, i32)> for S'
+        // @has - '//*[@id="associatedconstant.C-2"]' 'const C: (i32, i32)'
+        // @has - '//*[@id="associatedconstant.D-2"]' 'const D: i32'
+        // @has - '//*[@class="docblock"]' 'C: (i32, i32) = (4, 4)'
+        // @has - '//*[@class="docblock"]' 'D: i32 = 4 / 4'
+        impl T<(i32, i32)> for S {
+            const C: (i32, i32) = ($n, $n);
+            const D: i32 = ($n / $n);
+        }
     }
 }
 
index f7a452d9f7bd7fef1311a739827ddf7ab7f4a5f3..40318141e04fa57716c45c673ab798eb33357789 100644 (file)
@@ -8,5 +8,5 @@ license = "MIT/Apache-2.0"
 clap = "2.19.3"
 
 [dependencies.mdbook]
-version = "0.0.16"
+version = "0.0.17"
 default-features = false
index 2c81382bc9b08820b9f398b2ef56619610a0df54..8025477684931bf699acdd638a30812faa9ff1d7 100644 (file)
@@ -168,9 +168,8 @@ pub fn check(path: &Path, bad: &mut bool) {
     // FIXME get this whitelist empty.
     let whitelist = vec![
         "abi_ptx", "simd",
-        "cfg_target_has_atomic",
-        "unboxed_closures", "stmt_expr_attributes",
-        "cfg_target_thread_local", "unwind_attributes"
+        "stmt_expr_attributes",
+        "cfg_target_thread_local", "unwind_attributes",
     ];
 
     // Only check the number of lang features.