]> git.lizzy.rs Git - rust.git/commitdiff
Auto merge of #34031 - jseyfried:fix_cfg_bug, r=eddyb
authorbors <bors@rust-lang.org>
Sat, 4 Jun 2016 23:48:29 +0000 (16:48 -0700)
committerbors <bors@rust-lang.org>
Sat, 4 Jun 2016 23:48:29 +0000 (16:48 -0700)
Fix a regression in the configuration folder

This fixes #34028, a regression caused by #33706 in which unconfigured impl items generated by a macro in an impl item position are not removed.
r? @nrc

200 files changed:
.travis.yml
src/bootstrap/bootstrap.py
src/doc/book/advanced-linking.md
src/doc/book/choosing-your-guarantees.md
src/doc/book/documentation.md
src/doc/book/error-handling.md
src/doc/book/functions.md
src/doc/book/guessing-game.md
src/doc/book/strings.md
src/doc/book/testing.md
src/doc/book/using-rust-without-the-standard-library.md
src/doc/book/vectors.md
src/doc/nomicon/README.md
src/etc/Dockerfile [new file with mode: 0644]
src/liballoc/raw_vec.rs
src/libcollections/btree/map.rs
src/libcollections/btree/node.rs
src/libcollections/btree/set.rs
src/libcollections/str.rs
src/libcollectionstest/btree/map.rs
src/libcollectionstest/btree/mod.rs
src/libcollectionstest/btree/set.rs
src/libcollectionstest/lib.rs
src/libcollectionstest/str.rs
src/libcore/cell.rs
src/libcore/fmt/num.rs
src/libcore/mem.rs
src/libcore/num/isize.rs
src/libcore/num/mod.rs
src/libcore/num/usize.rs
src/libcore/num/wrapping.rs
src/libcore/raw.rs
src/libcore/str/mod.rs
src/libcoretest/cell.rs
src/libcoretest/mem.rs
src/liblibc
src/librustc/dep_graph/dep_node.rs
src/librustc/dep_graph/query.rs
src/librustc/dep_graph/raii.rs
src/librustc/dep_graph/visit.rs
src/librustc/diagnostics.rs
src/librustc/hir/intravisit.rs
src/librustc/infer/error_reporting.rs
src/librustc/infer/higher_ranked/mod.rs
src/librustc/infer/mod.rs
src/librustc/infer/region_inference/graphviz.rs
src/librustc/infer/region_inference/mod.rs
src/librustc/infer/type_variable.rs
src/librustc/middle/resolve_lifetime.rs
src/librustc/session/config.rs
src/librustc/traits/fulfill.rs
src/librustc/traits/mod.rs
src/librustc/traits/project.rs
src/librustc/traits/select.rs
src/librustc/ty/flags.rs
src/librustc/ty/layout.rs
src/librustc/ty/mod.rs
src/librustc/ty/structural_impls.rs
src/librustc/ty/sty.rs
src/librustc/ty/util.rs
src/librustc/util/ppaux.rs
src/librustc_const_eval/diagnostics.rs
src/librustc_const_eval/eval.rs
src/librustc_const_math/int.rs
src/librustc_const_math/is.rs
src/librustc_const_math/us.rs
src/librustc_data_structures/lib.rs
src/librustc_data_structures/snapshot_map/mod.rs [new file with mode: 0644]
src/librustc_data_structures/snapshot_map/test.rs [new file with mode: 0644]
src/librustc_driver/driver.rs
src/librustc_driver/test.rs
src/librustc_incremental/assert_dep_graph.rs
src/librustc_incremental/persist/directory.rs
src/librustc_incremental/persist/hash.rs
src/librustc_incremental/persist/load.rs
src/librustc_incremental/persist/save.rs
src/librustc_metadata/decoder.rs
src/librustc_metadata/encoder.rs
src/librustc_metadata/tydecode.rs
src/librustc_metadata/tyencode.rs
src/librustc_mir/build/matches/test.rs
src/librustc_resolve/lib.rs
src/librustc_trans/common.rs
src/librustc_trans/expr.rs
src/librustc_trans/intrinsic.rs
src/librustc_trans/type_.rs
src/librustc_typeck/astconv.rs
src/librustc_typeck/collect.rs
src/librustc_typeck/diagnostics.rs
src/librustc_typeck/variance/constraints.rs
src/librustdoc/clean/mod.rs
src/librustdoc/core.rs
src/librustdoc/test.rs
src/libstd/fs.rs
src/libstd/panic.rs
src/libstd/primitive_docs.rs
src/libstd/process.rs
src/libstd/rt.rs
src/libstd/sync/mutex.rs
src/libstd/sync/rwlock.rs
src/libstd/sys/common/mutex.rs
src/libstd/sys/common/util.rs
src/libstd/sys/unix/ext/process.rs
src/libstd/sys/unix/mutex.rs
src/libstd/sys/unix/process.rs
src/libstd/sys/unix/rwlock.rs
src/libstd/sys/windows/mutex.rs
src/libstd/thread/mod.rs
src/libstd/thread/scoped_tls.rs [deleted file]
src/libstd/time/mod.rs
src/libsyntax/ext/expand.rs
src/libsyntax/ext/quote.rs
src/libsyntax_ext/concat_idents.rs
src/rt/hoedown
src/test/compile-fail/E0162.rs [new file with mode: 0644]
src/test/compile-fail/E0163.rs [new file with mode: 0644]
src/test/compile-fail/E0164.rs [new file with mode: 0644]
src/test/compile-fail/E0165.rs [new file with mode: 0644]
src/test/compile-fail/E0166.rs [new file with mode: 0644]
src/test/compile-fail/E0172.rs [new file with mode: 0644]
src/test/compile-fail/E0178.rs [new file with mode: 0644]
src/test/compile-fail/E0184.rs [new file with mode: 0644]
src/test/compile-fail/E0185.rs [new file with mode: 0644]
src/test/compile-fail/E0186.rs [new file with mode: 0644]
src/test/compile-fail/E0191.rs [new file with mode: 0644]
src/test/compile-fail/E0192.rs [new file with mode: 0644]
src/test/compile-fail/E0194.rs [new file with mode: 0644]
src/test/compile-fail/E0195.rs [new file with mode: 0644]
src/test/compile-fail/E0197.rs [new file with mode: 0644]
src/test/compile-fail/E0199.rs [new file with mode: 0644]
src/test/compile-fail/E0200.rs [new file with mode: 0644]
src/test/compile-fail/associated-types/cache/chrono-scan.rs [new file with mode: 0644]
src/test/compile-fail/associated-types/cache/elision.rs [new file with mode: 0644]
src/test/compile-fail/associated-types/cache/project-fn-ret-contravariant.rs [new file with mode: 0644]
src/test/compile-fail/associated-types/cache/project-fn-ret-invariant.rs [new file with mode: 0644]
src/test/compile-fail/associated-types/cache/wasm-issue-32330.rs [new file with mode: 0644]
src/test/compile-fail/hr-subtype.rs [new file with mode: 0644]
src/test/compile-fail/issue-16048.rs
src/test/compile-fail/issue-16338.rs
src/test/compile-fail/issue-16401.rs
src/test/compile-fail/issue-20831-debruijn.rs
src/test/compile-fail/issue-2356.rs
src/test/compile-fail/issue-33876.rs [new file with mode: 0644]
src/test/compile-fail/lifetime-inference-give-expl-lifetime-param-3.rs
src/test/compile-fail/lifetime-inference-give-expl-lifetime-param.rs
src/test/compile-fail/not-panic-safe-2.rs
src/test/compile-fail/not-panic-safe-3.rs
src/test/compile-fail/not-panic-safe-4.rs
src/test/compile-fail/not-panic-safe-5.rs
src/test/compile-fail/not-panic-safe-6.rs
src/test/compile-fail/regions-close-over-type-parameter-1.rs
src/test/compile-fail/regions-early-bound-error-method.rs
src/test/compile-fail/regions-early-bound-error.rs
src/test/compile-fail/regions-infer-invariance-due-to-mutability-4.rs
src/test/compile-fail/suggest-path-instead-of-mod-dot-item.rs
src/test/incremental/struct_add_field.rs [new file with mode: 0644]
src/test/incremental/struct_change_field_name.rs [new file with mode: 0644]
src/test/incremental/struct_change_field_type.rs [new file with mode: 0644]
src/test/incremental/struct_change_field_type_cross_crate/auxiliary/a.rs [new file with mode: 0644]
src/test/incremental/struct_change_field_type_cross_crate/b.rs [new file with mode: 0644]
src/test/incremental/struct_change_nothing.rs [new file with mode: 0644]
src/test/incremental/struct_remove_field.rs [new file with mode: 0644]
src/test/incremental/type_alias_cross_crate/auxiliary/a.rs
src/test/incremental/type_alias_cross_crate/b.rs
src/test/run-fail/main-panic.rs
src/test/run-fail/overflowing-add.rs
src/test/run-fail/overflowing-lsh-1.rs
src/test/run-fail/overflowing-lsh-2.rs
src/test/run-fail/overflowing-lsh-3.rs
src/test/run-fail/overflowing-lsh-4.rs
src/test/run-fail/overflowing-mul.rs
src/test/run-fail/overflowing-neg.rs
src/test/run-fail/overflowing-pow.rs
src/test/run-fail/overflowing-rsh-1.rs
src/test/run-fail/overflowing-rsh-2.rs
src/test/run-fail/overflowing-rsh-3.rs
src/test/run-fail/overflowing-rsh-4.rs
src/test/run-fail/overflowing-rsh-5.rs
src/test/run-fail/overflowing-rsh-6.rs
src/test/run-fail/overflowing-sub.rs
src/test/run-fail/panic-set-handler.rs
src/test/run-fail/panic-set-unset-handler.rs
src/test/run-fail/panic-take-handler-nop.rs
src/test/run-make/dep-info-no-analysis/Makefile [deleted file]
src/test/run-make/dep-info-no-analysis/input.dd [deleted file]
src/test/run-make/dep-info-no-analysis/input.rs [deleted file]
src/test/run-make/execution-engine/test.rs
src/test/run-pass/binary-heap-panic-safe.rs
src/test/run-pass/coherence-subtyping.rs
src/test/run-pass/dropck_legal_cycles.rs
src/test/run-pass/exhaustive-bool-match-sanity.rs [new file with mode: 0644]
src/test/run-pass/issue-33770.rs [new file with mode: 0644]
src/test/run-pass/multi-panic.rs
src/test/run-pass/panic-recover-propagate.rs
src/test/run-pass/project-cache-issue-31849.rs [new file with mode: 0644]
src/test/run-pass/reachable-unnameable-items.rs
src/test/run-pass/running-with-no-runtime.rs
src/test/run-pass/utf8_chars.rs
src/test/run-pass/variadic-ffi.rs
src/tools/cargotest/main.rs

index cc93b1127c3a73069dd5d6a7053ee741f9781984..87197a37f1d4919a4d239cc3423fa57d61f35799 100644 (file)
@@ -1,6 +1,7 @@
 language: generic
 sudo: required
-dist: trusty
+services:
+  - docker
 
 # LLVM takes awhile to check out and otherwise we'll manage the submodules in
 # our configure script, so disable auto submodule management.
@@ -8,15 +9,15 @@ git:
   submodules: false
 
 before_install:
-  - echo 0 | sudo tee /proc/sys/net/ipv6/conf/lo/disable_ipv6
-  - echo 'deb http://llvm.org/apt/trusty/ llvm-toolchain-trusty-3.7 main' | sudo tee -a /etc/apt/sources.list
-  - echo 'deb-src http://llvm.org/apt/trusty/ llvm-toolchain-trusty-3.7 main' | sudo tee -a /etc/apt/sources.list
-  - sudo apt-get update
-  - sudo apt-get --force-yes install curl make g++ python2.7 git zlib1g-dev libedit-dev llvm-3.7-tools
+  - docker build -t rust -f src/etc/Dockerfile src/etc
 
 script:
-  - ./configure --llvm-root=/usr/lib/llvm-3.7
-  - make tidy && make check-notidy -j4
+  - docker run -v `pwd`:/build rust
+    sh -c "
+      ./configure --llvm-root=/usr/lib/llvm-3.7 &&
+      make tidy &&
+      make check-notidy -j4
+    "
 
 # Real testing happens on http://buildbot.rust-lang.org/
 #
index d69d4b962496e58764ecb6c9552f869e5b633139..7b0a5d6b6dfc4fcbc38f2f5b46281ad914a3fce6 100644 (file)
@@ -30,7 +30,7 @@ def get(url, path, verbose=False):
         download(sha_path, sha_url, verbose)
         download(temp_path, url, verbose)
         verify(temp_path, sha_path, verbose)
-        print("moving " + temp_path + " to " + path)
+        print("moving {} to {}".format(temp_path, path))
         shutil.move(temp_path, path)
     finally:
         delete_if_present(sha_path)
@@ -44,7 +44,7 @@ def delete_if_present(path):
 
 
 def download(path, url, verbose):
-    print("downloading " + url + " to " + path)
+    print("downloading {} to {}".format(url, path))
     # see http://serverfault.com/questions/301128/how-to-download
     if sys.platform == 'win32':
         run(["PowerShell.exe", "/nologo", "-Command",
@@ -133,20 +133,20 @@ class RustBuild:
             if os.path.exists(self.bin_root()):
                 shutil.rmtree(self.bin_root())
             channel = self.stage0_rustc_channel()
-            filename = "rust-std-" + channel + "-" + self.build + ".tar.gz"
+            filename = "rust-std-{}-{}.tar.gz".format(channel, self.build)
             url = "https://static.rust-lang.org/dist/" + self.stage0_rustc_date()
             tarball = os.path.join(rustc_cache, filename)
             if not os.path.exists(tarball):
-                get(url + "/" + filename, tarball, verbose=self.verbose)
+                get("{}/{}".format(url, filename), tarball, verbose=self.verbose)
             unpack(tarball, self.bin_root(),
                    match="rust-std-" + self.build,
                    verbose=self.verbose)
 
-            filename = "rustc-" + channel + "-" + self.build + ".tar.gz"
+            filename = "rustc-{}-{}.tar.gz".format(channel, self.build)
             url = "https://static.rust-lang.org/dist/" + self.stage0_rustc_date()
             tarball = os.path.join(rustc_cache, filename)
             if not os.path.exists(tarball):
-                get(url + "/" + filename, tarball, verbose=self.verbose)
+                get("{}/{}".format(url, filename), tarball, verbose=self.verbose)
             unpack(tarball, self.bin_root(), match="rustc", verbose=self.verbose)
             with open(self.rustc_stamp(), 'w') as f:
                 f.write(self.stage0_rustc_date())
@@ -154,11 +154,11 @@ class RustBuild:
         if self.cargo().startswith(self.bin_root()) and \
            (not os.path.exists(self.cargo()) or self.cargo_out_of_date()):
             channel = self.stage0_cargo_channel()
-            filename = "cargo-" + channel + "-" + self.build + ".tar.gz"
+            filename = "cargo-{}-{}.tar.gz".format(channel, self.build)
             url = "https://static.rust-lang.org/cargo-dist/" + self.stage0_cargo_date()
             tarball = os.path.join(cargo_cache, filename)
             if not os.path.exists(tarball):
-                get(url + "/" + filename, tarball, verbose=self.verbose)
+                get("{}/{}".format(url, filename), tarball, verbose=self.verbose)
             unpack(tarball, self.bin_root(), match="cargo", verbose=self.verbose)
             with open(self.cargo_stamp(), 'w') as f:
                 f.write(self.stage0_cargo_date())
@@ -182,13 +182,13 @@ class RustBuild:
         return os.path.join(self.bin_root(), '.cargo-stamp')
 
     def rustc_out_of_date(self):
-        if not os.path.exists(self.rustc_stamp()):
+        if not os.path.exists(self.rustc_stamp()) or self.clean:
             return True
         with open(self.rustc_stamp(), 'r') as f:
             return self.stage0_rustc_date() != f.read()
 
     def cargo_out_of_date(self):
-        if not os.path.exists(self.cargo_stamp()):
+        if not os.path.exists(self.cargo_stamp()) or self.clean:
             return True
         with open(self.cargo_stamp(), 'r') as f:
             return self.stage0_cargo_date() != f.read()
@@ -235,8 +235,11 @@ class RustBuild:
             return ''
 
     def build_bootstrap(self):
+        build_dir = os.path.join(self.build_dir, "bootstrap")
+        if self.clean and os.path.exists(build_dir):
+            shutil.rmtree(build_dir)
         env = os.environ.copy()
-        env["CARGO_TARGET_DIR"] = os.path.join(self.build_dir, "bootstrap")
+        env["CARGO_TARGET_DIR"] = build_dir
         env["RUSTC"] = self.rustc()
         env["LD_LIBRARY_PATH"] = os.path.join(self.bin_root(), "lib")
         env["DYLD_LIBRARY_PATH"] = os.path.join(self.bin_root(), "lib")
@@ -335,11 +338,12 @@ class RustBuild:
                 raise ValueError(err)
             sys.exit(err)
 
-        return cputype + '-' + ostype
+        return "{}-{}".format(cputype, ostype)
 
 def main():
     parser = argparse.ArgumentParser(description='Build rust')
     parser.add_argument('--config')
+    parser.add_argument('--clean', action='store_true')
     parser.add_argument('-v', '--verbose', action='store_true')
 
     args = [a for a in sys.argv if a != '-h']
@@ -352,6 +356,7 @@ def main():
     rb.rust_root = os.path.abspath(os.path.join(__file__, '../../..'))
     rb.build_dir = os.path.join(os.getcwd(), "build")
     rb.verbose = args.verbose
+    rb.clean = args.clean
 
     try:
         with open(args.config or 'config.toml') as config:
index c8a9082947eea8ca4a9ae92973a25835c8214d19..ddaebaf98d99d421f5a5b2790eca8b45923fdeee 100644 (file)
@@ -134,7 +134,7 @@ $ ldd example
         not a dynamic executable
 $ ./example
 hi!
-thread '<main>' panicked at 'failed', example.rs:1
+thread 'main' panicked at 'failed', example.rs:1
 ```
 
 Success! This binary can be copied to almost any Linux machine with the same
index 50350213074bf3a2cd2368db61d0d7acce5c7a23..d88f619260ac0ca4d95a3edc5a6a12b63480991a 100644 (file)
@@ -232,7 +232,7 @@ indicator (one word in size) along with the data.
 
 At runtime each borrow causes a modification/check of the refcount.
 
-[cell-mod]: ../std/cell/
+[cell-mod]: ../std/cell/index.html
 [cell]: ../std/cell/struct.Cell.html
 [refcell]: ../std/cell/struct.RefCell.html
 
index 4a41bb7b7f37eee595870bb26f5680de4122587b..3c6643fbfe1554e0ae02c5bc551f0a04353715c2 100644 (file)
@@ -76,7 +76,7 @@ This [unfortunate error](https://github.com/rust-lang/rust/issues/22547) is
 correct; documentation comments apply to the thing after them, and there's
 nothing after that last comment.
 
-[rc-new]: https://doc.rust-lang.org/nightly/std/rc/struct.Rc.html#method.new
+[rc-new]: ../std/rc/struct.Rc.html#method.new
 
 ### Writing documentation comments
 
index c914c33a5a4b989e369e01489ff8aca199130c00..d94eeaebf4021447c8428906371c500bf3d33f05 100644 (file)
@@ -81,7 +81,7 @@ fn main() {
 If you try running this code, the program will crash with a message like this:
 
 ```text
-thread '<main>' panicked at 'Invalid number: 11', src/bin/panic-simple.rs:5
+thread 'main' panicked at 'Invalid number: 11', src/bin/panic-simple.rs:5
 ```
 
 Here's another example that is slightly less contrived. A program that accepts
@@ -498,7 +498,7 @@ At this point, you should be skeptical of calling `unwrap`. For example, if
 the string doesn't parse as a number, you'll get a panic:
 
 ```text
-thread '<main>' panicked at 'called `Result::unwrap()` on an `Err` value: ParseIntError { kind: InvalidDigit }', /home/rustbuild/src/rust-buildbot/slave/beta-dist-rustc-linux/build/src/libcore/result.rs:729
+thread 'main' panicked at 'called `Result::unwrap()` on an `Err` value: ParseIntError { kind: InvalidDigit }', /home/rustbuild/src/rust-buildbot/slave/beta-dist-rustc-linux/build/src/libcore/result.rs:729
 ```
 
 This is rather unsightly, and if this happened inside a library you're
@@ -2205,7 +2205,7 @@ heuristics!
 [3]: ../std/option/enum.Option.html#method.unwrap_or
 [4]: ../std/option/enum.Option.html#method.unwrap_or_else
 [5]: ../std/option/enum.Option.html
-[6]: ../std/result/
+[6]: ../std/result/index.html
 [7]: ../std/result/enum.Result.html#method.unwrap
 [8]: ../std/fmt/trait.Debug.html
 [9]: ../std/primitive.str.html#method.parse
index 3a10d2aecc25e3e3fbe364f50759629c50887ad0..574929c14b35d3cf07a022e181825f21c6573b45 100644 (file)
@@ -221,7 +221,7 @@ If you add a main function that calls `diverges()` and run it, you’ll get
 some output that looks like this:
 
 ```text
-thread ‘<main>’ panicked at ‘This function never returns!’, hello.rs:2
+thread ‘main’ panicked at ‘This function never returns!’, hello.rs:2
 ```
 
 If you want more information, you can get a backtrace by setting the
@@ -229,7 +229,7 @@ If you want more information, you can get a backtrace by setting the
 
 ```text
 $ RUST_BACKTRACE=1 ./diverges
-thread '<main>' panicked at 'This function never returns!', hello.rs:2
+thread 'main' panicked at 'This function never returns!', hello.rs:2
 stack backtrace:
    1:     0x7f402773a829 - sys::backtrace::write::h0942de78b6c02817K8r
    2:     0x7f402773d7fc - panicking::on_panic::h3f23f9d0b5f4c91bu9w
@@ -255,7 +255,7 @@ Any other value(even no value at all) turns on backtrace.
 $ export RUST_BACKTRACE=1
 ...
 $ RUST_BACKTRACE=0 ./diverges 
-thread '<main>' panicked at 'This function never returns!', hello.rs:2
+thread 'main' panicked at 'This function never returns!', hello.rs:2
 note: Run with `RUST_BACKTRACE=1` for a backtrace.
 ```
 
@@ -264,7 +264,7 @@ note: Run with `RUST_BACKTRACE=1` for a backtrace.
 ```text
 $ RUST_BACKTRACE=1 cargo run
      Running `target/debug/diverges`
-thread '<main>' panicked at 'This function never returns!', hello.rs:2
+thread 'main' panicked at 'This function never returns!', hello.rs:2
 stack backtrace:
    1:     0x7f402773a829 - sys::backtrace::write::h0942de78b6c02817K8r
    2:     0x7f402773d7fc - panicking::on_panic::h3f23f9d0b5f4c91bu9w
index a2067e33a60aaad69e60294004e2c79713c194b7..c759ff9bdbde48e845a64dfbc158f198bd8178ff 100644 (file)
@@ -806,7 +806,7 @@ You guessed: 59
 You win!
 Please input your guess.
 quit
-thread '<main>' panicked at 'Please type a number!'
+thread 'main' panicked at 'Please type a number!'
 ```
 
 Ha! `quit` actually quits. As does any other non-number input. Well, this is
index 008644ec9a3e2821d68cfd9dad94319fbbb001cb..7be90e785b02b1772570a3cea24df89e83e09987 100644 (file)
@@ -163,7 +163,7 @@ let hachi = &dog[0..2];
 with this error:
 
 ```text
-thread '<main>' panicked at 'index 0 and/or 2 in `忠犬ハチ公` do not lie on
+thread 'main' panicked at 'index 0 and/or 2 in `忠犬ハチ公` do not lie on
 character boundary'
 ```
 
index 4ea114c4bee68996c8c2658ae50778e80de27ed3..d8afd7c4cf3bf30e88c5ad41a4c9f0ba1dd445a0 100644 (file)
@@ -107,7 +107,7 @@ failures:
 
 test result: FAILED. 0 passed; 1 failed; 0 ignored; 0 measured
 
-thread '<main>' panicked at 'Some tests failed', /home/steve/src/rust/src/libtest/lib.rs:247
+thread 'main' panicked at 'Some tests failed', /home/steve/src/rust/src/libtest/lib.rs:247
 ```
 
 Rust indicates that our test failed:
index 1179aebe54c552462099174392100468f29a1608..69958dd3e68a41c717527cc05347eb9d064705d1 100644 (file)
@@ -22,11 +22,12 @@ fn plus_one(x: i32) -> i32 {
 ```
 
 Much of the functionality that’s exposed in the standard library is also
-available via the [`core` crate](../core/). When we’re using the standard
-library, Rust automatically brings `std` into scope, allowing you to use
-its features without an explicit import. By the same token, when using
+available via the [`core` crate](../core/index.html). When we’re using the
+standard library, Rust automatically brings `std` into scope, allowing you to
+use its features without an explicit import. By the same token, when using
 `#![no_std]`, Rust will bring `core` into scope for you, as well as [its
-prelude](../core/prelude/v1/). This means that a lot of code will Just Work:
+prelude](../core/prelude/v1/index.html). This means that a lot of code will Just
+Work:
 
 ```rust
 #![no_std]
index 1c44af2f21a7198db6ff597fca3bec4188a27b85..cb6781cdf28fa42dc18cbf86f009faac14a1c2e6 100644 (file)
@@ -79,7 +79,7 @@ println!("Item 7 is {}", v[7]);
 then the current thread will [panic] with a message like this:
 
 ```text
-thread '<main>' panicked at 'index out of bounds: the len is 3 but the index is 7'
+thread 'main' panicked at 'index out of bounds: the len is 3 but the index is 7'
 ```
 
 If you want to handle out-of-bounds errors without panicking, you can use
@@ -152,5 +152,5 @@ API documentation][vec].
 [box]: ../std/boxed/index.html
 [generic]: generics.html
 [panic]: concurrency.html#panics
-[get]: http://doc.rust-lang.org/std/vec/struct.Vec.html#method.get
-[get_mut]: http://doc.rust-lang.org/std/vec/struct.Vec.html#method.get_mut
+[get]: ../std/vec/struct.Vec.html#method.get
+[get_mut]: ../std/vec/struct.Vec.html#method.get_mut
index 4554652a17a2a40732f03e879a27bdc56e3a139e..b2e1eac5e0dccea7692c23a131faae5b288280d6 100644 (file)
@@ -35,4 +35,4 @@ exception-safety, pointer aliasing, memory models, and even some type-theory.
 We will also be spending a lot of time talking about the different kinds
 of safety and guarantees.
 
-[trpl]: ../book/
+[trpl]: ../book/index.html
diff --git a/src/etc/Dockerfile b/src/etc/Dockerfile
new file mode 100644 (file)
index 0000000..58fa147
--- /dev/null
@@ -0,0 +1,25 @@
+FROM ubuntu:xenial
+
+# curl
+#   Download stage0, see src/bootstrap/bootstrap.py
+# g++
+#   Compile LLVM binding in src/rustllvm
+# git
+#   Get commit hash and commit date in version string
+# make
+#   Run build scripts in mk
+# libedit-dev zlib1g-dev
+#   LLVM dependencies as packaged in Ubuntu
+#   (They are optional, but Ubuntu package enables them)
+# llvm-3.7-dev (installed by llvm-3.7-tools)
+#   LLVM
+# llvm-3.7-tools
+#   FileCheck is used to run tests in src/test/codegen
+
+RUN apt-get update && apt-get -y install \
+    curl g++ git make \
+    libedit-dev zlib1g-dev \
+    llvm-3.7-tools
+
+RUN mkdir /build
+WORKDIR /build
index 8b3168b29aa4f5dafa1b1923d6b6cc5a2b24cbb5..58c841151c0f0e1b4bf0b46d0db8196c215ffdd1 100644 (file)
@@ -578,9 +578,9 @@ fn drop(&mut self) {
 // * We don't overflow `usize::MAX` and actually allocate too little
 //
 // On 64-bit we just need to check for overflow since trying to allocate
-// `> isize::MAX` bytes will surely fail. On 32-bit we need to add an extra
-// guard for this in case we're running on a platform which can use all 4GB in
-// user-space. e.g. PAE or x32
+// `> isize::MAX` bytes will surely fail. On 32-bit and 16-bit we need to add
+// an extra guard for this in case we're running on a platform which can use
+// all 4GB in user-space. e.g. PAE or x32
 
 #[inline]
 fn alloc_guard(alloc_size: usize) {
index ec2f4a9f7f0b82208b230a687c95cd28e8c51a7e..29f3e4b1b6159cd4a02d23baf4468d653180af39 100644 (file)
@@ -842,13 +842,13 @@ fn fix_right_edge(&mut self) {
             // Check if right-most child is underfull.
             let mut last_edge = internal.last_edge();
             let right_child_len = last_edge.reborrow().descend().len();
-            if right_child_len < node::CAPACITY / 2 {
+            if right_child_len < node::MIN_LEN {
                 // We need to steal.
                 let mut last_kv = match last_edge.left_kv() {
                     Ok(left) => left,
                     Err(_) => unreachable!(),
                 };
-                last_kv.bulk_steal_left(node::CAPACITY/2 - right_child_len);
+                last_kv.bulk_steal_left(node::MIN_LEN - right_child_len);
                 last_edge = last_kv.right_edge();
             }
 
@@ -856,6 +856,174 @@ fn fix_right_edge(&mut self) {
             cur_node = last_edge.descend();
         }
     }
+
+    /// Splits the collection into two at the given key. Returns everything after the given key,
+    /// including the key.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// #![feature(btree_split_off)]
+    /// use std::collections::BTreeMap;
+    ///
+    /// let mut a = BTreeMap::new();
+    /// a.insert(1, "a");
+    /// a.insert(2, "b");
+    /// a.insert(3, "c");
+    /// a.insert(17, "d");
+    /// a.insert(41, "e");
+    ///
+    /// let b = a.split_off(&3);
+    ///
+    /// assert_eq!(a.len(), 2);
+    /// assert_eq!(b.len(), 3);
+    ///
+    /// assert_eq!(a[&1], "a");
+    /// assert_eq!(a[&2], "b");
+    ///
+    /// assert_eq!(b[&3], "c");
+    /// assert_eq!(b[&17], "d");
+    /// assert_eq!(b[&41], "e");
+    /// ```
+    #[unstable(feature = "btree_split_off",
+               reason = "recently added as part of collections reform 2",
+               issue = "19986")]
+    pub fn split_off<Q: ?Sized + Ord>(&mut self, key: &Q) -> Self where K: Borrow<Q> {
+        if self.is_empty() {
+            return Self::new();
+        }
+
+        let total_num = self.len();
+
+        let mut right = Self::new();
+        for _ in 0..(self.root.as_ref().height()) {
+            right.root.push_level();
+        }
+
+        {
+            let mut left_node = self.root.as_mut();
+            let mut right_node = right.root.as_mut();
+
+            loop {
+                let mut split_edge = match search::search_node(left_node, key) {
+                    // key is going to the right tree
+                    Found(handle) => handle.left_edge(),
+                    GoDown(handle) => handle
+                };
+
+                split_edge.move_suffix(&mut right_node);
+
+                match (split_edge.force(), right_node.force()) {
+                    (Internal(edge), Internal(node)) => {
+                        left_node = edge.descend();
+                        right_node = node.first_edge().descend();
+                    }
+                    (Leaf(_), Leaf(_)) => { break; },
+                    _ => { unreachable!(); }
+                }
+            }
+        }
+
+        self.fix_right_border();
+        right.fix_left_border();
+
+        if self.root.as_ref().height() < right.root.as_ref().height() {
+            self.recalc_length();
+            right.length = total_num - self.len();
+        } else {
+            right.recalc_length();
+            self.length = total_num - right.len();
+        }
+
+        right
+    }
+
+    /// Calculates the number of elements if it is incorrect.
+    fn recalc_length(&mut self) {
+        fn dfs<K, V>(node: NodeRef<marker::Immut, K, V, marker::LeafOrInternal>) -> usize {
+            let mut res = node.len();
+
+            if let Internal(node) = node.force() {
+                let mut edge = node.first_edge();
+                loop {
+                    res += dfs(edge.reborrow().descend());
+                    match edge.right_kv() {
+                        Ok(right_kv) => { edge = right_kv.right_edge(); },
+                        Err(_) => { break; }
+                    }
+                }
+            }
+
+            res
+        }
+
+        self.length = dfs(self.root.as_ref());
+    }
+
+    /// Removes empty levels on the top.
+    fn fix_top(&mut self) {
+        loop {
+            {
+                let node = self.root.as_ref();
+                if node.height() == 0 || node.len() > 0 {
+                    break;
+                }
+            }
+            self.root.pop_level();
+        }
+    }
+
+    fn fix_right_border(&mut self) {
+        self.fix_top();
+
+        {
+            let mut cur_node = self.root.as_mut();
+
+            while let Internal(node) = cur_node.force() {
+                let mut last_kv = node.last_kv();
+
+                if last_kv.can_merge() {
+                    cur_node = last_kv.merge().descend();
+                } else {
+                    let right_len = last_kv.reborrow().right_edge().descend().len();
+                    // `MINLEN + 1` to avoid readjust if merge happens on the next level.
+                    if right_len < node::MIN_LEN + 1 {
+                        last_kv.bulk_steal_left(node::MIN_LEN + 1 - right_len);
+                    }
+                    cur_node = last_kv.right_edge().descend();
+                }
+            }
+        }
+
+        self.fix_top();
+    }
+
+    /// The symmetric clone of `fix_right_border`.
+    fn fix_left_border(&mut self) {
+        self.fix_top();
+
+        {
+            let mut cur_node = self.root.as_mut();
+
+            while let Internal(node) = cur_node.force() {
+                let mut first_kv = node.first_kv();
+
+                if first_kv.can_merge() {
+                    cur_node = first_kv.merge().descend();
+                } else {
+                    let left_len = first_kv.reborrow().left_edge().descend().len();
+                    if left_len < node::MIN_LEN + 1 {
+                        first_kv.bulk_steal_right(node::MIN_LEN + 1 - left_len);
+                    }
+                    cur_node = first_kv.left_edge().descend();
+                }
+            }
+        }
+
+        self.fix_top();
+    }
 }
 
 impl<'a, K: 'a, V: 'a> IntoIterator for &'a BTreeMap<K, V> {
index ca1cf6bcc50280c81fe56a4445a6df2f13f72968..e9bc29118d508aa6b4bf36e926c16bfd4cae40f3 100644 (file)
@@ -51,6 +51,7 @@
 use boxed::Box;
 
 const B: usize = 6;
+pub const MIN_LEN: usize = B - 1;
 pub const CAPACITY: usize = 2 * B - 1;
 
 /// The underlying representation of leaf nodes. Note that it is often unsafe to actually store
@@ -413,6 +414,19 @@ pub fn last_edge(self) -> Handle<Self, marker::Edge> {
         let len = self.len();
         Handle::new_edge(self, len)
     }
+
+    /// Note that `self` must be nonempty.
+    pub fn first_kv(self) -> Handle<Self, marker::KV> {
+        debug_assert!(self.len() > 0);
+        Handle::new_kv(self, 0)
+    }
+
+    /// Note that `self` must be nonempty.
+    pub fn last_kv(self) -> Handle<Self, marker::KV> {
+        let len = self.len();
+        debug_assert!(len > 0);
+        Handle::new_kv(self, len - 1)
+    }
 }
 
 impl<K, V> NodeRef<marker::Owned, K, V, marker::Leaf> {
@@ -602,6 +616,17 @@ pub fn push(&mut self, key: K, val: V, edge: Root<K, V>) {
         }
     }
 
+    fn correct_childrens_parent_links(&mut self, first: usize, after_last: usize) {
+        for i in first..after_last {
+            Handle::new_edge(unsafe { self.reborrow_mut() }, i).correct_parent_link();
+        }
+    }
+
+    fn correct_all_childrens_parent_links(&mut self) {
+        let len = self.len();
+        self.correct_childrens_parent_links(0, len + 1);
+    }
+
     /// Adds a key/value pair and an edge to go to the left of that pair to
     /// the beginning of the node.
     pub fn push_front(&mut self, key: K, val: V, edge: Root<K, V>) {
@@ -623,11 +648,8 @@ pub fn push_front(&mut self, key: K, val: V, edge: Root<K, V>) {
 
             self.as_leaf_mut().len += 1;
 
-            for i in 0..self.len()+1 {
-                Handle::new_edge(self.reborrow_mut(), i).correct_parent_link();
-            }
+            self.correct_all_childrens_parent_links();
         }
-
     }
 }
 
@@ -696,6 +718,13 @@ pub fn pop_front(&mut self) -> (K, V, Option<Root<K, V>>) {
             (key, val, edge)
         }
     }
+
+    fn into_kv_pointers_mut(mut self) -> (*mut K, *mut V) {
+        (
+            self.keys_mut().as_mut_ptr(),
+            self.vals_mut().as_mut_ptr()
+        )
+    }
 }
 
 impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
@@ -1275,105 +1304,155 @@ pub fn steal_right(&mut self) {
     }
 
     /// This does stealing similar to `steal_left` but steals multiple elements at once.
-    pub fn bulk_steal_left(&mut self, n: usize) {
+    pub fn bulk_steal_left(&mut self, count: usize) {
         unsafe {
-            // Get raw pointers to left child's keys, values and edges.
-            let (left_len, left_k, left_v, left_e) = {
-                let mut left = self.reborrow_mut().left_edge().descend();
-
-                (left.len(),
-                 left.keys_mut().as_mut_ptr(),
-                 left.vals_mut().as_mut_ptr(),
-                 match left.force() {
-                     ForceResult::Leaf(_) => None,
-                     ForceResult::Internal(mut i) => Some(i.as_internal_mut().edges.as_mut_ptr()),
-                 })
-            };
-
-            // Get raw pointers to right child's keys, values and edges.
-            let (right_len, right_k, right_v, right_e) = {
-                let mut right = self.reborrow_mut().right_edge().descend();
-
-                (right.len(),
-                 right.keys_mut().as_mut_ptr(),
-                 right.vals_mut().as_mut_ptr(),
-                 match right.force() {
-                     ForceResult::Leaf(_) => None,
-                     ForceResult::Internal(mut i) => Some(i.as_internal_mut().edges.as_mut_ptr()),
-                 })
-            };
-
-            // Get raw pointers to parent's key and value.
-            let (parent_k, parent_v) = {
-                let kv = self.reborrow_mut().into_kv_mut();
-                (kv.0 as *mut K, kv.1 as *mut V)
-            };
+            let mut left_node = ptr::read(self).left_edge().descend();
+            let left_len = left_node.len();
+            let mut right_node = ptr::read(self).right_edge().descend();
+            let right_len = right_node.len();
 
             // Make sure that we may steal safely.
-            debug_assert!(right_len + n <= CAPACITY);
-            debug_assert!(left_len >= n);
-
-            // Make room for stolen elements in right child.
-            ptr::copy(right_k,
-                      right_k.offset(n as isize),
-                      right_len);
-            ptr::copy(right_v,
-                      right_v.offset(n as isize),
-                      right_len);
-            if let Some(edges) = right_e {
-                ptr::copy(edges,
-                          edges.offset(n as isize),
-                          right_len+1);
+            debug_assert!(right_len + count <= CAPACITY);
+            debug_assert!(left_len >= count);
+
+            let new_left_len = left_len - count;
+
+            // Move data.
+            {
+                let left_kv = left_node.reborrow_mut().into_kv_pointers_mut();
+                let right_kv = right_node.reborrow_mut().into_kv_pointers_mut();
+                let parent_kv = {
+                    let kv = self.reborrow_mut().into_kv_mut();
+                    (kv.0 as *mut K, kv.1 as *mut V)
+                };
+
+                // Make room for stolen elements in the right child.
+                ptr::copy(right_kv.0,
+                          right_kv.0.offset(count as isize),
+                          right_len);
+                ptr::copy(right_kv.1,
+                          right_kv.1.offset(count as isize),
+                          right_len);
+
+                // Move elements from the left child to the right one.
+                move_kv(left_kv, new_left_len + 1, right_kv, 0, count - 1);
+
+                // Move parent's key/value pair to the right child.
+                move_kv(parent_kv, 0, right_kv, count - 1, 1);
+
+                // Move the left-most stolen pair to the parent.
+                move_kv(left_kv, new_left_len, parent_kv, 0, 1);
             }
 
-            // Move elements from the left child to the right one.
-            let left_ind = (left_len - n) as isize;
-            ptr::copy_nonoverlapping(left_k.offset(left_ind + 1),
-                                     right_k,
-                                     n - 1);
-            ptr::copy_nonoverlapping(left_v.offset(left_ind + 1),
-                                     right_v,
-                                     n - 1);
-            match (left_e, right_e) {
-                (Some(left), Some(right)) => {
-                    ptr::copy_nonoverlapping(left.offset(left_ind + 1),
-                                             right,
-                                             n);
+            left_node.reborrow_mut().as_leaf_mut().len -= count as u16;
+            right_node.reborrow_mut().as_leaf_mut().len += count as u16;
+
+            match (left_node.force(), right_node.force()) {
+                (ForceResult::Internal(left), ForceResult::Internal(mut right)) => {
+                    // Make room for stolen edges.
+                    let right_edges = right.reborrow_mut().as_internal_mut().edges.as_mut_ptr();
+                    ptr::copy(right_edges,
+                              right_edges.offset(count as isize),
+                              right_len + 1);
+                    right.correct_childrens_parent_links(count, count + right_len + 1);
+
+                    move_edges(left, new_left_len + 1, right, 0, count);
                 },
-                (Some(_), None) => unreachable!(),
-                (None, Some(_)) => unreachable!(),
-                (None, None) => {},
+                (ForceResult::Leaf(_), ForceResult::Leaf(_)) => { }
+                _ => { unreachable!(); }
             }
+        }
+    }
 
-            // Copy parent key/value pair to right child.
-            ptr::copy_nonoverlapping(parent_k,
-                                     right_k.offset(n as isize - 1),
-                                     1);
-            ptr::copy_nonoverlapping(parent_v,
-                                     right_v.offset(n as isize - 1),
-                                     1);
-            // Copy left-most stolen pair to parent.
-            ptr::copy_nonoverlapping(left_k.offset(left_ind),
-                                     parent_k,
-                                     1);
-            ptr::copy_nonoverlapping(left_v.offset(left_ind),
-                                     parent_v,
-                                     1);
-
-            // Fix lengths of left and right child and parent pointers in children of the right
-            // child.
-            self.reborrow_mut().left_edge().descend().as_leaf_mut().len -= n as u16;
-            let mut right = self.reborrow_mut().right_edge().descend();
-            right.as_leaf_mut().len += n as u16;
-            if let ForceResult::Internal(mut node) = right.force() {
-                for i in 0..(right_len+n+1) {
-                    Handle::new_edge(node.reborrow_mut(), i as usize).correct_parent_link();
-                }
+    /// The symmetric clone of `bulk_steal_left`.
+    pub fn bulk_steal_right(&mut self, count: usize) {
+        unsafe {
+            let mut left_node = ptr::read(self).left_edge().descend();
+            let left_len = left_node.len();
+            let mut right_node = ptr::read(self).right_edge().descend();
+            let right_len = right_node.len();
+
+            // Make sure that we may steal safely.
+            debug_assert!(left_len + count <= CAPACITY);
+            debug_assert!(right_len >= count);
+
+            let new_right_len = right_len - count;
+
+            // Move data.
+            {
+                let left_kv = left_node.reborrow_mut().into_kv_pointers_mut();
+                let right_kv = right_node.reborrow_mut().into_kv_pointers_mut();
+                let parent_kv = {
+                    let kv = self.reborrow_mut().into_kv_mut();
+                    (kv.0 as *mut K, kv.1 as *mut V)
+                };
+
+                // Move parent's key/value pair to the left child.
+                move_kv(parent_kv, 0, left_kv, left_len, 1);
+
+                // Move elements from the right child to the left one.
+                move_kv(right_kv, 0, left_kv, left_len + 1, count - 1);
+
+                // Move the right-most stolen pair to the parent.
+                move_kv(right_kv, count - 1, parent_kv, 0, 1);
+
+                // Fix right indexing
+                ptr::copy(right_kv.0.offset(count as isize),
+                          right_kv.0,
+                          new_right_len);
+                ptr::copy(right_kv.1.offset(count as isize),
+                          right_kv.1,
+                          new_right_len);
+            }
+
+            left_node.reborrow_mut().as_leaf_mut().len += count as u16;
+            right_node.reborrow_mut().as_leaf_mut().len -= count as u16;
+
+            match (left_node.force(), right_node.force()) {
+                (ForceResult::Internal(left), ForceResult::Internal(mut right)) => {
+                    move_edges(right.reborrow_mut(), 0, left, left_len + 1, count);
+
+                    // Fix right indexing.
+                    let right_edges = right.reborrow_mut().as_internal_mut().edges.as_mut_ptr();
+                    ptr::copy(right_edges.offset(count as isize),
+                              right_edges,
+                              new_right_len + 1);
+                    right.correct_childrens_parent_links(0, new_right_len + 1);
+                },
+                (ForceResult::Leaf(_), ForceResult::Leaf(_)) => { }
+                _ => { unreachable!(); }
             }
         }
     }
 }
 
+unsafe fn move_kv<K, V>(
+    source: (*mut K, *mut V), source_offset: usize,
+    dest: (*mut K, *mut V), dest_offset: usize,
+    count: usize)
+{
+    ptr::copy_nonoverlapping(source.0.offset(source_offset as isize),
+                             dest.0.offset(dest_offset as isize),
+                             count);
+    ptr::copy_nonoverlapping(source.1.offset(source_offset as isize),
+                             dest.1.offset(dest_offset as isize),
+                             count);
+}
+
+// Source and destination must have the same height.
+unsafe fn move_edges<K, V>(
+    mut source: NodeRef<marker::Mut, K, V, marker::Internal>, source_offset: usize,
+    mut dest: NodeRef<marker::Mut, K, V, marker::Internal>, dest_offset: usize,
+    count: usize)
+{
+    let source_ptr = source.as_internal_mut().edges.as_mut_ptr();
+    let dest_ptr = dest.as_internal_mut().edges.as_mut_ptr();
+    ptr::copy_nonoverlapping(source_ptr.offset(source_offset as isize),
+                             dest_ptr.offset(dest_offset as isize),
+                             count);
+    dest.correct_childrens_parent_links(dest_offset, dest_offset + count);
+}
+
 impl<BorrowType, K, V, HandleType>
         Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, HandleType> {
 
@@ -1397,6 +1476,41 @@ pub fn force(self) -> ForceResult<
     }
 }
 
+impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
+    /// Move the suffix after `self` from one node to another one. `right` must be empty.
+    /// The first edge of `right` remains unchanged.
+    pub fn move_suffix(&mut self,
+            right: &mut NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>) {
+        unsafe {
+            let left_new_len = self.idx;
+            let mut left_node = self.reborrow_mut().into_node();
+
+            let right_new_len = left_node.len() - left_new_len;
+            let mut right_node = right.reborrow_mut();
+
+            debug_assert!(right_node.len() == 0);
+            debug_assert!(left_node.height == right_node.height);
+
+            let left_kv = left_node.reborrow_mut().into_kv_pointers_mut();
+            let right_kv = right_node.reborrow_mut().into_kv_pointers_mut();
+
+
+            move_kv(left_kv, left_new_len, right_kv, 0, right_new_len);
+
+            left_node.reborrow_mut().as_leaf_mut().len = left_new_len as u16;
+            right_node.reborrow_mut().as_leaf_mut().len = right_new_len as u16;
+
+            match (left_node.force(), right_node.force()) {
+                (ForceResult::Internal(left), ForceResult::Internal(right)) => {
+                    move_edges(left, left_new_len + 1, right, 1, right_new_len);
+                },
+                (ForceResult::Leaf(_), ForceResult::Leaf(_)) => { }
+                _ => { unreachable!(); }
+            }
+        }
+    }
+}
+
 pub enum ForceResult<Leaf, Internal> {
     Leaf(Leaf),
     Internal(Internal)
index 3ee42499a38f868c5d4b4ebf00896fbd68a83f37..765595be317c117b705aaa8c90933d383cd1019d 100644 (file)
@@ -580,6 +580,43 @@ pub fn take<Q: ?Sized>(&mut self, value: &Q) -> Option<T>
     pub fn append(&mut self, other: &mut Self) {
         self.map.append(&mut other.map);
     }
+
+    /// Splits the collection into two at the given key. Returns everything after the given key,
+    /// including the key.
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// #![feature(btree_split_off)]
+    /// use std::collections::BTreeMap;
+    ///
+    /// let mut a = BTreeMap::new();
+    /// a.insert(1, "a");
+    /// a.insert(2, "b");
+    /// a.insert(3, "c");
+    /// a.insert(17, "d");
+    /// a.insert(41, "e");
+    ///
+    /// let b = a.split_off(&3);
+    ///
+    /// assert_eq!(a.len(), 2);
+    /// assert_eq!(b.len(), 3);
+    ///
+    /// assert_eq!(a[&1], "a");
+    /// assert_eq!(a[&2], "b");
+    ///
+    /// assert_eq!(b[&3], "c");
+    /// assert_eq!(b[&17], "d");
+    /// assert_eq!(b[&41], "e");
+    /// ```
+    #[unstable(feature = "btree_split_off",
+               reason = "recently added as part of collections reform 2",
+               issue = "19986")]
+    pub fn split_off<Q: ?Sized + Ord>(&mut self, key: &Q) -> Self where T: Borrow<Q> {
+        BTreeSet { map: self.map.split_off(key) }
+    }
 }
 
 #[stable(feature = "rust1", since = "1.0.0")]
index f3770816cb6b3aa184f1b398df3a37ebaf7adc9a..d7c11f32404395db1b82cdac20348658d62ab828 100644 (file)
@@ -112,11 +112,6 @@ fn connect(&self, sep: &str) -> String {
     }
 }
 
-/// Deprecated, renamed to EncodeUtf16
-#[unstable(feature = "str_utf16", issue = "27714")]
-#[rustc_deprecated(since = "1.8.0", reason = "renamed to EncodeUtf16")]
-pub type Utf16Units<'a> = EncodeUtf16<'a>;
-
 /// External iterator for a string's UTF-16 code units.
 ///
 /// For use with the `std::iter` module.
@@ -352,230 +347,6 @@ pub unsafe fn slice_mut_unchecked(&mut self, begin: usize, end: usize) -> &mut s
         core_str::StrExt::slice_mut_unchecked(self, begin, end)
     }
 
-    /// Given a byte position, returns the next `char` and its index.
-    ///
-    /// # Panics
-    ///
-    /// If `i` is greater than or equal to the length of the string.
-    /// If `i` is not the index of the beginning of a valid UTF-8 sequence.
-    ///
-    /// # Examples
-    ///
-    /// This example manually iterates through the code points of a string;
-    /// this should normally be
-    /// done by `.chars()` or `.char_indices()`.
-    ///
-    /// ```
-    /// #![feature(str_char)]
-    /// #![allow(deprecated)]
-    ///
-    /// use std::str::CharRange;
-    ///
-    /// let s = "中华Việt Nam";
-    /// let mut i = 0;
-    /// while i < s.len() {
-    ///     let CharRange {ch, next} = s.char_range_at(i);
-    ///     println!("{}: {}", i, ch);
-    ///     i = next;
-    /// }
-    /// ```
-    ///
-    /// This outputs:
-    ///
-    /// ```text
-    /// 0: 中
-    /// 3: 华
-    /// 6: V
-    /// 7: i
-    /// 8: e
-    /// 9:
-    /// 11:
-    /// 13: t
-    /// 14:
-    /// 15: N
-    /// 16: a
-    /// 17: m
-    /// ```
-    #[unstable(feature = "str_char",
-               reason = "often replaced by char_indices, this method may \
-                         be removed in favor of just char_at() or eventually \
-                         removed altogether",
-               issue = "27754")]
-    #[inline]
-    #[rustc_deprecated(reason = "use slicing plus chars() plus len_utf8",
-                       since = "1.9.0")]
-    #[allow(deprecated)]
-    pub fn char_range_at(&self, start: usize) -> CharRange {
-        core_str::StrExt::char_range_at(self, start)
-    }
-
-    /// Given a byte position, returns the previous `char` and its position.
-    ///
-    /// Note that Unicode has many features, such as combining marks, ligatures,
-    /// and direction marks, that need to be taken into account to correctly reverse a string.
-    ///
-    /// Returns 0 for next index if called on start index 0.
-    ///
-    /// # Panics
-    ///
-    /// If `i` is greater than the length of the string.
-    /// If `i` is not an index following a valid UTF-8 sequence.
-    ///
-    /// # Examples
-    ///
-    /// This example manually iterates through the code points of a string;
-    /// this should normally be
-    /// done by `.chars().rev()` or `.char_indices()`.
-    ///
-    /// ```
-    /// #![feature(str_char)]
-    /// #![allow(deprecated)]
-    ///
-    /// use std::str::CharRange;
-    ///
-    /// let s = "中华Việt Nam";
-    /// let mut i = s.len();
-    /// while i > 0 {
-    ///     let CharRange {ch, next} = s.char_range_at_reverse(i);
-    ///     println!("{}: {}", i, ch);
-    ///     i = next;
-    /// }
-    /// ```
-    ///
-    /// This outputs:
-    ///
-    /// ```text
-    /// 18: m
-    /// 17: a
-    /// 16: N
-    /// 15:
-    /// 14: t
-    /// 13:
-    /// 11:
-    /// 9: e
-    /// 8: i
-    /// 7: V
-    /// 6: 华
-    /// 3: 中
-    /// ```
-    #[unstable(feature = "str_char",
-               reason = "often replaced by char_indices, this method may \
-                         be removed in favor of just char_at_reverse() or \
-                         eventually removed altogether",
-               issue = "27754")]
-    #[inline]
-    #[rustc_deprecated(reason = "use slicing plus chars().rev() plus len_utf8",
-                       since = "1.9.0")]
-    #[allow(deprecated)]
-    pub fn char_range_at_reverse(&self, start: usize) -> CharRange {
-        core_str::StrExt::char_range_at_reverse(self, start)
-    }
-
-    /// Given a byte position, returns the `char` at that position.
-    ///
-    /// # Panics
-    ///
-    /// If `i` is greater than or equal to the length of the string.
-    /// If `i` is not the index of the beginning of a valid UTF-8 sequence.
-    ///
-    /// # Examples
-    ///
-    /// ```
-    /// #![feature(str_char)]
-    /// #![allow(deprecated)]
-    ///
-    /// let s = "abπc";
-    /// assert_eq!(s.char_at(1), 'b');
-    /// assert_eq!(s.char_at(2), 'π');
-    /// assert_eq!(s.char_at(4), 'c');
-    /// ```
-    #[unstable(feature = "str_char",
-               reason = "frequently replaced by the chars() iterator, this \
-                         method may be removed or possibly renamed in the \
-                         future; it is normally replaced by chars/char_indices \
-                         iterators or by getting the first char from a \
-                         subslice",
-               issue = "27754")]
-    #[inline]
-    #[allow(deprecated)]
-    #[rustc_deprecated(reason = "use slicing plus chars()",
-                       since = "1.9.0")]
-    pub fn char_at(&self, i: usize) -> char {
-        core_str::StrExt::char_at(self, i)
-    }
-
-    /// Given a byte position, returns the `char` at that position, counting
-    /// from the end.
-    ///
-    /// # Panics
-    ///
-    /// If `i` is greater than the length of the string.
-    /// If `i` is not an index following a valid UTF-8 sequence.
-    ///
-    /// # Examples
-    ///
-    /// ```
-    /// #![feature(str_char)]
-    /// #![allow(deprecated)]
-    ///
-    /// let s = "abπc";
-    /// assert_eq!(s.char_at_reverse(1), 'a');
-    /// assert_eq!(s.char_at_reverse(2), 'b');
-    /// assert_eq!(s.char_at_reverse(3), 'π');
-    /// ```
-    #[unstable(feature = "str_char",
-               reason = "see char_at for more details, but reverse semantics \
-                         are also somewhat unclear, especially with which \
-                         cases generate panics",
-               issue = "27754")]
-    #[inline]
-    #[rustc_deprecated(reason = "use slicing plus chars().rev()",
-                       since = "1.9.0")]
-    #[allow(deprecated)]
-    pub fn char_at_reverse(&self, i: usize) -> char {
-        core_str::StrExt::char_at_reverse(self, i)
-    }
-
-    /// Retrieves the first `char` from a `&str` and returns it.
-    ///
-    /// Note that a single Unicode character (grapheme cluster)
-    /// can be composed of multiple `char`s.
-    ///
-    /// This does not allocate a new string; instead, it returns a slice that
-    /// points one code point beyond the code point that was shifted.
-    ///
-    /// `None` is returned if the slice is empty.
-    ///
-    /// # Examples
-    ///
-    /// ```
-    /// #![feature(str_char)]
-    /// #![allow(deprecated)]
-    ///
-    /// let s = "Łódź"; // \u{141}o\u{301}dz\u{301}
-    /// let (c, s1) = s.slice_shift_char().unwrap();
-    ///
-    /// assert_eq!(c, 'Ł');
-    /// assert_eq!(s1, "ódź");
-    ///
-    /// let (c, s2) = s1.slice_shift_char().unwrap();
-    ///
-    /// assert_eq!(c, 'o');
-    /// assert_eq!(s2, "\u{301}dz\u{301}");
-    /// ```
-    #[unstable(feature = "str_char",
-               reason = "awaiting conventions about shifting and slices and \
-                         may not be warranted with the existence of the chars \
-                         and/or char_indices iterators",
-               issue = "27754")]
-    #[inline]
-    #[rustc_deprecated(reason = "use chars() plus Chars::as_str",
-                       since = "1.9.0")]
-    #[allow(deprecated)]
-    pub fn slice_shift_char(&self) -> Option<(char, &str)> {
-        core_str::StrExt::slice_shift_char(self)
-    }
-
     /// Divide one string slice into two at an index.
     ///
     /// The argument, `mid`, should be a byte offset from the start of the
@@ -867,16 +638,6 @@ pub fn lines_any(&self) -> LinesAny {
         core_str::StrExt::lines_any(self)
     }
 
-    /// Returns an iterator of `u16` over the string encoded as UTF-16.
-    #[unstable(feature = "str_utf16",
-               reason = "this functionality may only be provided by libunicode",
-               issue = "27714")]
-    #[rustc_deprecated(since = "1.8.0", reason = "renamed to encode_utf16")]
-    #[allow(deprecated)]
-    pub fn utf16_units(&self) -> Utf16Units {
-        Utf16Units { encoder: Utf16Encoder::new(self[..].chars()) }
-    }
-
     /// Returns an iterator of `u16` over the string encoded as UTF-16.
     #[stable(feature = "encode_utf16", since = "1.8.0")]
     pub fn encode_utf16(&self) -> EncodeUtf16 {
index e19090c7599ecd3d6e6b4fc35fd26c365469403e..49fce68d15e542138a720a59b05d19c880951dd7 100644 (file)
@@ -13,6 +13,9 @@
 use std::collections::btree_map::Entry::{Occupied, Vacant};
 use std::rc::Rc;
 
+use std::iter::FromIterator;
+use super::DeterministicRng;
+
 #[test]
 fn test_basic_large() {
     let mut map = BTreeMap::new();
@@ -528,6 +531,51 @@ fn $name() {
 create_append_test!(test_append_239, 239);
 create_append_test!(test_append_1700, 1700);
 
+fn rand_data(len: usize) -> Vec<(u32, u32)> {
+    let mut rng = DeterministicRng::new();
+    Vec::from_iter(
+        (0..len).map(|_| (rng.next(), rng.next()))
+    )
+}
+
+#[test]
+fn test_split_off_empty_right() {
+    let mut data = rand_data(173);
+
+    let mut map = BTreeMap::from_iter(data.clone());
+    let right = map.split_off(&(data.iter().max().unwrap().0 + 1));
+
+    data.sort();
+    assert!(map.into_iter().eq(data));
+    assert!(right.into_iter().eq(None));
+}
+
+#[test]
+fn test_split_off_empty_left() {
+    let mut data = rand_data(314);
+
+    let mut map = BTreeMap::from_iter(data.clone());
+    let right = map.split_off(&data.iter().min().unwrap().0);
+
+    data.sort();
+    assert!(map.into_iter().eq(None));
+    assert!(right.into_iter().eq(data));
+}
+
+#[test]
+fn test_split_off_large_random_sorted() {
+    let mut data = rand_data(1529);
+    // special case with maximum height.
+    data.sort();
+
+    let mut map = BTreeMap::from_iter(data.clone());
+    let key = data[data.len() / 2].0;
+    let right = map.split_off(&key);
+
+    assert!(map.into_iter().eq(data.clone().into_iter().filter(|x| x.0 < key)));
+    assert!(right.into_iter().eq(data.into_iter().filter(|x| x.0 >= key)));
+}
+
 mod bench {
     use std::collections::BTreeMap;
     use std::__rand::{Rng, thread_rng};
index 0db48f3ce9edb4bac58c1b4f2596a48f5eca20da..ea43f423b7c1f0bf7e60570c110cd317444202fe 100644 (file)
 
 mod map;
 mod set;
+
+/// XorShiftRng
+struct DeterministicRng {
+    x: u32,
+    y: u32,
+    z: u32,
+    w: u32,
+}
+
+impl DeterministicRng {
+    fn new() -> Self {
+        DeterministicRng {
+            x: 0x193a6754,
+            y: 0xa8a7d469,
+            z: 0x97830e05,
+            w: 0x113ba7bb
+        }
+    }
+
+    fn next(&mut self) -> u32 {
+        let x = self.x;
+        let t = x ^ (x << 11);
+        self.x = self.y;
+        self.y = self.z;
+        self.z = self.w;
+        let w_ = self.w;
+        self.w = w_ ^ (w_ >> 19) ^ (t ^ (t >> 8));
+        self.w
+    }
+}
index 53ccfd5b4e26dc0c4dbd3325457b46828cf03616..f7b647d7772db4a57d91ec37d1cfc300703f23e6 100644 (file)
@@ -10,6 +10,9 @@
 
 use std::collections::BTreeSet;
 
+use std::iter::FromIterator;
+use super::DeterministicRng;
+
 #[test]
 fn test_clone_eq() {
   let mut m = BTreeSet::new();
@@ -289,3 +292,48 @@ fn test_append() {
     assert_eq!(a.contains(&4), true);
     assert_eq!(a.contains(&5), true);
 }
+
+fn rand_data(len: usize) -> Vec<u32> {
+    let mut rng = DeterministicRng::new();
+    Vec::from_iter(
+        (0..len).map(|_| rng.next())
+    )
+}
+
+#[test]
+fn test_split_off_empty_right() {
+    let mut data = rand_data(173);
+
+    let mut set = BTreeSet::from_iter(data.clone());
+    let right = set.split_off(&(data.iter().max().unwrap() + 1));
+
+    data.sort();
+    assert!(set.into_iter().eq(data));
+    assert!(right.into_iter().eq(None));
+}
+
+#[test]
+fn test_split_off_empty_left() {
+    let mut data = rand_data(314);
+
+    let mut set = BTreeSet::from_iter(data.clone());
+    let right = set.split_off(data.iter().min().unwrap());
+
+    data.sort();
+    assert!(set.into_iter().eq(None));
+    assert!(right.into_iter().eq(data));
+}
+
+#[test]
+fn test_split_off_large_random_sorted() {
+    let mut data = rand_data(1529);
+    // special case with maximum height.
+    data.sort();
+
+    let mut set = BTreeSet::from_iter(data.clone());
+    let key = data[data.len() / 2];
+    let right = set.split_off(&key);
+
+    assert!(set.into_iter().eq(data.clone().into_iter().filter(|x| *x < key)));
+    assert!(right.into_iter().eq(data.into_iter().filter(|x| *x >= key)));
+}
index bae21f1bd9b8fe41454d2bee5ce5758ede1a0ecc..400d614094862a83445d8d251a56047d069bc950 100644 (file)
@@ -14,6 +14,7 @@
 #![feature(binary_heap_append)]
 #![feature(box_syntax)]
 #![feature(btree_append)]
+#![feature(btree_split_off)]
 #![feature(btree_range)]
 #![feature(collections)]
 #![feature(collections_bound)]
@@ -25,7 +26,6 @@
 #![feature(pattern)]
 #![feature(rand)]
 #![feature(step_by)]
-#![feature(str_char)]
 #![feature(str_escape)]
 #![feature(test)]
 #![feature(unboxed_closures)]
index a1820a1cb96e3901b973454ce802f4a23494c909..124b85bfca872bbfb15f26ca9897dee61ba72806 100644 (file)
@@ -479,20 +479,6 @@ fn test_is_whitespace() {
     assert!(!"   _   ".chars().all(|c| c.is_whitespace()));
 }
 
-#[test]
-#[allow(deprecated)]
-fn test_slice_shift_char() {
-    let data = "ประเทศไทย中";
-    assert_eq!(data.slice_shift_char(), Some(('ป', "ระเทศไทย中")));
-}
-
-#[test]
-#[allow(deprecated)]
-fn test_slice_shift_char_2() {
-    let empty = "";
-    assert_eq!(empty.slice_shift_char(), None);
-}
-
 #[test]
 fn test_is_utf8() {
     // deny overlong encodings
@@ -674,30 +660,6 @@ fn test_contains_char() {
     assert!(!"".contains('a'));
 }
 
-#[test]
-#[allow(deprecated)]
-fn test_char_at() {
-    let s = "ศไทย中华Việt Nam";
-    let v = vec!['ศ','ไ','ท','ย','中','华','V','i','ệ','t',' ','N','a','m'];
-    let mut pos = 0;
-    for ch in &v {
-        assert!(s.char_at(pos) == *ch);
-        pos += ch.to_string().len();
-    }
-}
-
-#[test]
-#[allow(deprecated)]
-fn test_char_at_reverse() {
-    let s = "ศไทย中华Việt Nam";
-    let v = vec!['ศ','ไ','ท','ย','中','华','V','i','ệ','t',' ','N','a','m'];
-    let mut pos = s.len();
-    for ch in v.iter().rev() {
-        assert!(s.char_at_reverse(pos) == *ch);
-        pos -= ch.to_string().len();
-    }
-}
-
 #[test]
 fn test_split_at() {
     let s = "ศไทย中华Việt Nam";
@@ -764,26 +726,6 @@ fn test_total_ord() {
     assert_eq!("22".cmp("1234"), Greater);
 }
 
-#[test]
-#[allow(deprecated)]
-fn test_char_range_at() {
-    let data = "b¢€𤭢𤭢€¢b";
-    assert_eq!('b', data.char_range_at(0).ch);
-    assert_eq!('¢', data.char_range_at(1).ch);
-    assert_eq!('€', data.char_range_at(3).ch);
-    assert_eq!('𤭢', data.char_range_at(6).ch);
-    assert_eq!('𤭢', data.char_range_at(10).ch);
-    assert_eq!('€', data.char_range_at(14).ch);
-    assert_eq!('¢', data.char_range_at(17).ch);
-    assert_eq!('b', data.char_range_at(19).ch);
-}
-
-#[test]
-#[allow(deprecated)]
-fn test_char_range_at_reverse_underflow() {
-    assert_eq!("abc".char_range_at_reverse(0).next, 0);
-}
-
 #[test]
 fn test_iterator() {
     let s = "ศไทย中华Việt Nam";
index 97a85f6aa43d006cf77930df3169168e95b53bfa..2700f016751dcd83fe279530e0f3a10a5ae392fd 100644 (file)
@@ -694,40 +694,6 @@ pub fn map<U: ?Sized, F>(orig: Ref<'b, T>, f: F) -> Ref<'b, U>
             borrow: orig.borrow,
         }
     }
-
-    /// Make a new `Ref` for an optional component of the borrowed data, e.g. an
-    /// enum variant.
-    ///
-    /// The `RefCell` is already immutably borrowed, so this cannot fail.
-    ///
-    /// This is an associated function that needs to be used as
-    /// `Ref::filter_map(...)`.  A method would interfere with methods of the
-    /// same name on the contents of a `RefCell` used through `Deref`.
-    ///
-    /// # Example
-    ///
-    /// ```
-    /// # #![feature(cell_extras)]
-    /// use std::cell::{RefCell, Ref};
-    ///
-    /// let c = RefCell::new(Ok(5));
-    /// let b1: Ref<Result<u32, ()>> = c.borrow();
-    /// let b2: Ref<u32> = Ref::filter_map(b1, |o| o.as_ref().ok()).unwrap();
-    /// assert_eq!(*b2, 5)
-    /// ```
-    #[unstable(feature = "cell_extras", reason = "recently added",
-               issue = "27746")]
-    #[rustc_deprecated(since = "1.8.0", reason = "can be built on `Ref::map`: \
-        https://crates.io/crates/ref_filter_map")]
-    #[inline]
-    pub fn filter_map<U: ?Sized, F>(orig: Ref<'b, T>, f: F) -> Option<Ref<'b, U>>
-        where F: FnOnce(&T) -> Option<&U>
-    {
-        f(orig.value).map(move |new| Ref {
-            value: new,
-            borrow: orig.borrow,
-        })
-    }
 }
 
 #[unstable(feature = "coerce_unsized", issue = "27732")]
@@ -767,47 +733,6 @@ pub fn map<U: ?Sized, F>(orig: RefMut<'b, T>, f: F) -> RefMut<'b, U>
             borrow: orig.borrow,
         }
     }
-
-    /// Make a new `RefMut` for an optional component of the borrowed data, e.g.
-    /// an enum variant.
-    ///
-    /// The `RefCell` is already mutably borrowed, so this cannot fail.
-    ///
-    /// This is an associated function that needs to be used as
-    /// `RefMut::filter_map(...)`.  A method would interfere with methods of the
-    /// same name on the contents of a `RefCell` used through `Deref`.
-    ///
-    /// # Example
-    ///
-    /// ```
-    /// # #![feature(cell_extras)]
-    /// use std::cell::{RefCell, RefMut};
-    ///
-    /// let c = RefCell::new(Ok(5));
-    /// {
-    ///     let b1: RefMut<Result<u32, ()>> = c.borrow_mut();
-    ///     let mut b2: RefMut<u32> = RefMut::filter_map(b1, |o| {
-    ///         o.as_mut().ok()
-    ///     }).unwrap();
-    ///     assert_eq!(*b2, 5);
-    ///     *b2 = 42;
-    /// }
-    /// assert_eq!(*c.borrow(), Ok(42));
-    /// ```
-    #[unstable(feature = "cell_extras", reason = "recently added",
-               issue = "27746")]
-    #[rustc_deprecated(since = "1.8.0", reason = "can be built on `RefMut::map`: \
-        https://crates.io/crates/ref_filter_map")]
-    #[inline]
-    pub fn filter_map<U: ?Sized, F>(orig: RefMut<'b, T>, f: F) -> Option<RefMut<'b, U>>
-        where F: FnOnce(&mut T) -> Option<&mut U>
-    {
-        let RefMut { value, borrow } = orig;
-        f(value).map(move |new| RefMut {
-            value: new,
-            borrow: borrow,
-        })
-    }
 }
 
 struct BorrowRefMut<'b> {
index a944c996c1a1e7532e0aeebde83bbbfc05c182e8..d55e0317a949218af2cca35aebd36da2997d1dc3 100644 (file)
@@ -29,6 +29,7 @@ trait Int: Zero + PartialEq + PartialOrd + Div<Output=Self> + Rem<Output=Self> +
            Sub<Output=Self> + Copy {
     fn from_u8(u: u8) -> Self;
     fn to_u8(&self) -> u8;
+    fn to_u16(&self) -> u16;
     fn to_u32(&self) -> u32;
     fn to_u64(&self) -> u64;
 }
@@ -37,6 +38,7 @@ macro_rules! doit {
     ($($t:ident)*) => ($(impl Int for $t {
         fn from_u8(u: u8) -> $t { u as $t }
         fn to_u8(&self) -> u8 { *self as u8 }
+        fn to_u16(&self) -> u16 { *self as u16 }
         fn to_u32(&self) -> u32 { *self as u32 }
         fn to_u64(&self) -> u64 { *self as u64 }
     })*)
@@ -256,6 +258,8 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
 
 impl_Display!(i8, u8, i16, u16, i32, u32: to_u32);
 impl_Display!(i64, u64: to_u64);
+#[cfg(target_pointer_width = "16")]
+impl_Display!(isize, usize: to_u16);
 #[cfg(target_pointer_width = "32")]
 impl_Display!(isize, usize: to_u32);
 #[cfg(target_pointer_width = "64")]
index a0f2a2adcb62cb8cf2587af3170fa96eea412f51..5c2179ccf33a1a6404ef4db66f047c0e7b77e6e7 100644 (file)
@@ -518,6 +518,10 @@ pub fn replace<T>(dest: &mut T, mut src: T) -> T {
 #[stable(feature = "rust1", since = "1.0.0")]
 pub fn drop<T>(_x: T) { }
 
+macro_rules! repeat_u8_as_u16 {
+    ($name:expr) => { (($name as u16) <<  8 |
+                       ($name as u16)) }
+}
 macro_rules! repeat_u8_as_u32 {
     ($name:expr) => { (($name as u32) << 24 |
                        ($name as u32) << 16 |
@@ -543,11 +547,18 @@ macro_rules! repeat_u8_as_u64 {
 pub const POST_DROP_U8: u8 = 0x1d;
 #[unstable(feature = "filling_drop", issue = "5016")]
 #[allow(missing_docs)]
+pub const POST_DROP_U16: u16 = repeat_u8_as_u16!(POST_DROP_U8);
+#[unstable(feature = "filling_drop", issue = "5016")]
+#[allow(missing_docs)]
 pub const POST_DROP_U32: u32 = repeat_u8_as_u32!(POST_DROP_U8);
 #[unstable(feature = "filling_drop", issue = "5016")]
 #[allow(missing_docs)]
 pub const POST_DROP_U64: u64 = repeat_u8_as_u64!(POST_DROP_U8);
 
+#[cfg(target_pointer_width = "16")]
+#[unstable(feature = "filling_drop", issue = "5016")]
+#[allow(missing_docs)]
+pub const POST_DROP_USIZE: usize = POST_DROP_U16 as usize;
 #[cfg(target_pointer_width = "32")]
 #[unstable(feature = "filling_drop", issue = "5016")]
 #[allow(missing_docs)]
index de5b1777f936f6b86fd7beec86472f026b5f6d8c..86bcef4011d02177f10d8109672575c7f0b6b35e 100644 (file)
@@ -14,6 +14,8 @@
 
 #![stable(feature = "rust1", since = "1.0.0")]
 
+#[cfg(target_pointer_width = "16")]
+int_module! { isize, 16 }
 #[cfg(target_pointer_width = "32")]
 int_module! { isize, 32 }
 #[cfg(target_pointer_width = "64")]
index 9b6f6698defe4513e636ac4a47c02b750bdd2d09..5988a6375d44efbf08156f87f2f167e253a5c83f 100644 (file)
@@ -1176,6 +1176,15 @@ impl i64 {
         intrinsics::mul_with_overflow }
 }
 
+#[cfg(target_pointer_width = "16")]
+#[lang = "isize"]
+impl isize {
+    int_impl! { i16, u16, 16,
+        intrinsics::add_with_overflow,
+        intrinsics::sub_with_overflow,
+        intrinsics::mul_with_overflow }
+}
+
 #[cfg(target_pointer_width = "32")]
 #[lang = "isize"]
 impl isize {
@@ -2188,6 +2197,18 @@ impl u64 {
         intrinsics::mul_with_overflow }
 }
 
+#[cfg(target_pointer_width = "16")]
+#[lang = "usize"]
+impl usize {
+    uint_impl! { u16, 16,
+        intrinsics::ctpop,
+        intrinsics::ctlz,
+        intrinsics::cttz,
+        intrinsics::bswap,
+        intrinsics::add_with_overflow,
+        intrinsics::sub_with_overflow,
+        intrinsics::mul_with_overflow }
+}
 #[cfg(target_pointer_width = "32")]
 #[lang = "usize"]
 impl usize {
index 0c7d16a41bc7e8968b6501b6c57378097ee939e0..685c52e271ec0c924ab9ca495b45b231bd7754aa 100644 (file)
@@ -14,6 +14,8 @@
 
 #![stable(feature = "rust1", since = "1.0.0")]
 
+#[cfg(target_pointer_width = "16")]
+uint_module! { usize, 16 }
 #[cfg(target_pointer_width = "32")]
 uint_module! { usize, 32 }
 #[cfg(target_pointer_width = "64")]
index 04e8bc4913bdc96cc5ce66f7a371f475c2dddbf0..4857817e84e4fcc17bdc23401643f1d5ead6e534 100644 (file)
@@ -292,6 +292,12 @@ fn neg(self) -> Self {
 mod shift_max {
     #![allow(non_upper_case_globals)]
 
+    #[cfg(target_pointer_width = "16")]
+    mod platform {
+        pub const usize: u32 = super::u16;
+        pub const isize: u32 = super::i16;
+    }
+
     #[cfg(target_pointer_width = "32")]
     mod platform {
         pub const usize: u32 = super::u32;
index 19226d81f16882000966fcd514a046d1f16abe86..6b2122451db8fbf164190d84d485a09067229113 100644 (file)
 //!
 //! Their definition should always match the ABI defined in `rustc::back::abi`.
 
-use clone::Clone;
-use marker::Copy;
-use mem;
-
-/// The representation of a slice like `&[T]`.
-///
-/// This struct is guaranteed to have the layout of types like `&[T]`,
-/// `&str`, and `Box<[T]>`, but is not the type of such slices
-/// (e.g. the fields are not directly accessible on a `&[T]`) nor does
-/// it control that layout (changing the definition will not change
-/// the layout of a `&[T]`). It is only designed to be used by unsafe
-/// code that needs to manipulate the low-level details.
-///
-/// However, it is not recommended to use this type for such code,
-/// since there are alternatives which may be safer:
-///
-/// - Creating a slice from a data pointer and length can be done with
-///   `std::slice::from_raw_parts` or `std::slice::from_raw_parts_mut`
-///   instead of `std::mem::transmute`ing a value of type `Slice`.
-/// - Extracting the data pointer and length from a slice can be
-///   performed with the `as_ptr` (or `as_mut_ptr`) and `len`
-///   methods.
-///
-/// If one does decide to convert a slice value to a `Slice`, the
-/// `Repr` trait in this module provides a method for a safe
-/// conversion from `&[T]` (and `&str`) to a `Slice`, more type-safe
-/// than a call to `transmute`.
-///
-/// # Examples
-///
-/// ```
-/// #![feature(raw)]
-///
-/// use std::raw::{self, Repr};
-///
-/// let slice: &[u16] = &[1, 2, 3, 4];
-///
-/// let repr: raw::Slice<u16> = slice.repr();
-/// println!("data pointer = {:?}, length = {}", repr.data, repr.len);
-/// ```
-#[repr(C)]
-#[allow(missing_debug_implementations)]
-#[rustc_deprecated(reason = "use raw accessors/constructors in `slice` module",
-                   since = "1.9.0")]
-#[unstable(feature = "raw", issue = "27751")]
-pub struct Slice<T> {
-    pub data: *const T,
-    pub len: usize,
-}
-
-#[allow(deprecated)]
-impl<T> Copy for Slice<T> {}
-#[allow(deprecated)]
-impl<T> Clone for Slice<T> {
-    fn clone(&self) -> Slice<T> { *self }
-}
-
 /// The representation of a trait object like `&SomeTrait`.
 ///
 /// This struct has the same layout as types like `&SomeTrait` and
@@ -154,22 +97,3 @@ pub struct TraitObject {
     pub data: *mut (),
     pub vtable: *mut (),
 }
-
-/// This trait is meant to map equivalences between raw structs and their
-/// corresponding rust values.
-#[rustc_deprecated(reason = "use raw accessors/constructors in `slice` module",
-                   since = "1.9.0")]
-#[unstable(feature = "raw", issue = "27751")]
-pub unsafe trait Repr<T> {
-    /// This function "unwraps" a rust value (without consuming it) into its raw
-    /// struct representation. This can be used to read/write different values
-    /// for the struct. This is a safe method because by default it does not
-    /// enable write-access to the fields of the return value in safe code.
-    #[inline]
-    fn repr(&self) -> T { unsafe { mem::transmute_copy(&self) } }
-}
-
-#[allow(deprecated)]
-unsafe impl<T> Repr<Slice<T>> for [T] {}
-#[allow(deprecated)]
-unsafe impl Repr<Slice<u8>> for str {}
index 32b81ab7f53a8ab19b69487bf98167ad0dd964a7..5fc15fae5199bd344a1920248201adb721fd92ad 100644 (file)
@@ -17,7 +17,7 @@
 use self::pattern::Pattern;
 use self::pattern::{Searcher, ReverseSearcher, DoubleEndedSearcher};
 
-use char::{self, CharExt};
+use char;
 use clone::Clone;
 use convert::AsRef;
 use default::Default;
@@ -1664,40 +1664,6 @@ fn trim_right_matches<'a, P: Pattern<'a>>(&'a self, pat: P) -> &'a str
         where P::Searcher: ReverseSearcher<'a>;
     #[stable(feature = "is_char_boundary", since = "1.9.0")]
     fn is_char_boundary(&self, index: usize) -> bool;
-    #[unstable(feature = "str_char",
-               reason = "often replaced by char_indices, this method may \
-                         be removed in favor of just char_at() or eventually \
-                         removed altogether",
-               issue = "27754")]
-    #[rustc_deprecated(reason = "use slicing plus chars() plus len_utf8",
-                       since = "1.9.0")]
-    fn char_range_at(&self, start: usize) -> CharRange;
-    #[unstable(feature = "str_char",
-               reason = "often replaced by char_indices, this method may \
-                         be removed in favor of just char_at_reverse() or \
-                         eventually removed altogether",
-               issue = "27754")]
-    #[rustc_deprecated(reason = "use slicing plus chars().rev() plus len_utf8",
-                       since = "1.9.0")]
-    fn char_range_at_reverse(&self, start: usize) -> CharRange;
-    #[unstable(feature = "str_char",
-               reason = "frequently replaced by the chars() iterator, this \
-                         method may be removed or possibly renamed in the \
-                         future; it is normally replaced by chars/char_indices \
-                         iterators or by getting the first char from a \
-                         subslice",
-               issue = "27754")]
-    #[rustc_deprecated(reason = "use slicing plus chars()",
-                       since = "1.9.0")]
-    fn char_at(&self, i: usize) -> char;
-    #[unstable(feature = "str_char",
-               reason = "see char_at for more details, but reverse semantics \
-                         are also somewhat unclear, especially with which \
-                         cases generate panics",
-               issue = "27754")]
-    #[rustc_deprecated(reason = "use slicing plus chars().rev()",
-                       since = "1.9.0")]
-    fn char_at_reverse(&self, i: usize) -> char;
     #[stable(feature = "core", since = "1.6.0")]
     fn as_bytes(&self) -> &[u8];
     #[stable(feature = "core", since = "1.6.0")]
@@ -1710,14 +1676,6 @@ fn rfind<'a, P: Pattern<'a>>(&'a self, pat: P) -> Option<usize>
     fn split_at(&self, mid: usize) -> (&str, &str);
     #[stable(feature = "core", since = "1.6.0")]
     fn split_at_mut(&mut self, mid: usize) -> (&mut str, &mut str);
-    #[unstable(feature = "str_char",
-               reason = "awaiting conventions about shifting and slices and \
-                         may not be warranted with the existence of the chars \
-                         and/or char_indices iterators",
-               issue = "27754")]
-    #[rustc_deprecated(reason = "use chars() plus Chars::as_str",
-                       since = "1.9.0")]
-    fn slice_shift_char(&self) -> Option<(char, &str)>;
     #[stable(feature = "core", since = "1.6.0")]
     fn as_ptr(&self) -> *const u8;
     #[stable(feature = "core", since = "1.6.0")]
@@ -1946,55 +1904,6 @@ fn is_char_boundary(&self, index: usize) -> bool {
         }
     }
 
-    #[inline]
-    fn char_range_at(&self, i: usize) -> CharRange {
-        let (c, n) = char_range_at_raw(self.as_bytes(), i);
-        CharRange { ch: unsafe { char::from_u32_unchecked(c) }, next: n }
-    }
-
-    #[inline]
-    fn char_range_at_reverse(&self, start: usize) -> CharRange {
-        let mut prev = start;
-
-        prev = prev.saturating_sub(1);
-        if self.as_bytes()[prev] < 128 {
-            return CharRange{ch: self.as_bytes()[prev] as char, next: prev}
-        }
-
-        // Multibyte case is a fn to allow char_range_at_reverse to inline cleanly
-        fn multibyte_char_range_at_reverse(s: &str, mut i: usize) -> CharRange {
-            // while there is a previous byte == 10......
-            while i > 0 && s.as_bytes()[i] & !CONT_MASK == TAG_CONT_U8 {
-                i -= 1;
-            }
-
-            let first= s.as_bytes()[i];
-            let w = UTF8_CHAR_WIDTH[first as usize];
-            assert!(w != 0);
-
-            let mut val = utf8_first_byte(first, w as u32);
-            val = utf8_acc_cont_byte(val, s.as_bytes()[i + 1]);
-            if w > 2 { val = utf8_acc_cont_byte(val, s.as_bytes()[i + 2]); }
-            if w > 3 { val = utf8_acc_cont_byte(val, s.as_bytes()[i + 3]); }
-
-            CharRange {ch: unsafe { char::from_u32_unchecked(val) }, next: i}
-        }
-
-        multibyte_char_range_at_reverse(self, prev)
-    }
-
-    #[inline]
-    #[allow(deprecated)]
-    fn char_at(&self, i: usize) -> char {
-        self.char_range_at(i).ch
-    }
-
-    #[inline]
-    #[allow(deprecated)]
-    fn char_at_reverse(&self, i: usize) -> char {
-        self.char_range_at_reverse(i).ch
-    }
-
     #[inline]
     fn as_bytes(&self) -> &[u8] {
         unsafe { mem::transmute(self) }
@@ -2041,18 +1950,6 @@ fn split_at_mut(&mut self, mid: usize) -> (&mut str, &mut str) {
         }
     }
 
-    #[inline]
-    #[allow(deprecated)]
-    fn slice_shift_char(&self) -> Option<(char, &str)> {
-        if self.is_empty() {
-            None
-        } else {
-            let ch = self.char_at(0);
-            let next_s = unsafe { self.slice_unchecked(ch.len_utf8(), self.len()) };
-            Some((ch, next_s))
-        }
-    }
-
     #[inline]
     fn as_ptr(&self) -> *const u8 {
         self as *const str as *const u8
@@ -2078,31 +1975,6 @@ fn as_ref(&self) -> &[u8] {
     }
 }
 
-/// Pluck a code point out of a UTF-8-like byte slice and return the
-/// index of the next code point.
-#[inline]
-fn char_range_at_raw(bytes: &[u8], i: usize) -> (u32, usize) {
-    if bytes[i] < 128 {
-        return (bytes[i] as u32, i + 1);
-    }
-
-    // Multibyte case is a fn to allow char_range_at to inline cleanly
-    fn multibyte_char_range_at(bytes: &[u8], i: usize) -> (u32, usize) {
-        let first = bytes[i];
-        let w = UTF8_CHAR_WIDTH[first as usize];
-        assert!(w != 0);
-
-        let mut val = utf8_first_byte(first, w as u32);
-        val = utf8_acc_cont_byte(val, bytes[i + 1]);
-        if w > 2 { val = utf8_acc_cont_byte(val, bytes[i + 2]); }
-        if w > 3 { val = utf8_acc_cont_byte(val, bytes[i + 3]); }
-
-        (val, i + w as usize)
-    }
-
-    multibyte_char_range_at(bytes, i)
-}
-
 #[stable(feature = "rust1", since = "1.0.0")]
 impl<'a> Default for &'a str {
     fn default() -> &'a str { "" }
index c0b22274ee9d628f09c36b4b7081629844d102f5..a635620d12abd3618e3acda562b0c2ed49686153 100644 (file)
@@ -158,20 +158,6 @@ fn accessor(&self) -> Ref<u32> {
     assert_eq!(*d, 7);
 }
 
-#[test]
-#[allow(deprecated)]
-fn ref_filter_map_accessor() {
-    struct X(RefCell<Result<u32, ()>>);
-    impl X {
-        fn accessor(&self) -> Option<Ref<u32>> {
-            Ref::filter_map(self.0.borrow(), |r| r.as_ref().ok())
-        }
-    }
-    let x = X(RefCell::new(Ok(7)));
-    let d: Ref<u32> = x.accessor().unwrap();
-    assert_eq!(*d, 7);
-}
-
 #[test]
 fn ref_mut_map_accessor() {
     struct X(RefCell<(u32, char)>);
@@ -189,24 +175,6 @@ fn accessor(&self) -> RefMut<u32> {
     assert_eq!(*x.0.borrow(), (8, 'z'));
 }
 
-#[test]
-#[allow(deprecated)]
-fn ref_mut_filter_map_accessor() {
-    struct X(RefCell<Result<u32, ()>>);
-    impl X {
-        fn accessor(&self) -> Option<RefMut<u32>> {
-            RefMut::filter_map(self.0.borrow_mut(), |r| r.as_mut().ok())
-        }
-    }
-    let x = X(RefCell::new(Ok(7)));
-    {
-        let mut d: RefMut<u32> = x.accessor().unwrap();
-        assert_eq!(*d, 7);
-        *d += 1;
-    }
-    assert_eq!(*x.0.borrow(), Ok(8));
-}
-
 #[test]
 fn as_unsafe_cell() {
     let c1: Cell<usize> = Cell::new(0);
index 5bc08376d257c5ceff5a7e2b1a722bbcdbc15d45..01bafe49a7acd4820e4ca9bc657c9308d5d36670 100644 (file)
@@ -18,6 +18,13 @@ fn size_of_basic() {
     assert_eq!(size_of::<u64>(), 8);
 }
 
+#[test]
+#[cfg(target_pointer_width = "16")]
+fn size_of_16() {
+    assert_eq!(size_of::<usize>(), 2);
+    assert_eq!(size_of::<*const usize>(), 2);
+}
+
 #[test]
 #[cfg(target_pointer_width = "32")]
 fn size_of_32() {
@@ -47,6 +54,13 @@ fn align_of_basic() {
     assert_eq!(align_of::<u32>(), 4);
 }
 
+#[test]
+#[cfg(target_pointer_width = "16")]
+fn align_of_16() {
+    assert_eq!(align_of::<usize>(), 2);
+    assert_eq!(align_of::<*const usize>(), 2);
+}
+
 #[test]
 #[cfg(target_pointer_width = "32")]
 fn align_of_32() {
index b19b5465a1235be3323363cdc11838739b593029..45d85899e99d33e291b2bf3259881b46cc5365d7 160000 (submodule)
@@ -1 +1 @@
-Subproject commit b19b5465a1235be3323363cdc11838739b593029
+Subproject commit 45d85899e99d33e291b2bf3259881b46cc5365d7
index 84c84a7ed57a2656af76de3a0a0428d53c0d10f3..73b96651b05e27826c031d55267cff16415a7bf1 100644 (file)
 
 use std::fmt::Debug;
 
-#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
+macro_rules! try_opt {
+    ($e:expr) => (
+        match $e {
+            Some(r) => r,
+            None => return None,
+        }
+    )
+}
+
+#[derive(Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
 pub enum DepNode<D: Clone + Debug> {
     // The `D` type is "how definitions are identified".
     // During compilation, it is always `DefId`, but when serializing
@@ -116,7 +125,7 @@ pub enum DepNode<D: Clone + Debug> {
     // which would yield an overly conservative dep-graph.
     TraitItems(D),
     ReprHints(D),
-    TraitSelect(D),
+    TraitSelect(D, Vec<D>),
 }
 
 impl<D: Clone + Debug> DepNode<D> {
@@ -212,7 +221,11 @@ pub fn map_def<E, OP>(&self, mut op: OP) -> Option<DepNode<E>>
             TraitImpls(ref d) => op(d).map(TraitImpls),
             TraitItems(ref d) => op(d).map(TraitItems),
             ReprHints(ref d) => op(d).map(ReprHints),
-            TraitSelect(ref d) => op(d).map(TraitSelect),
+            TraitSelect(ref d, ref type_ds) => {
+                let d = try_opt!(op(d));
+                let type_ds = try_opt!(type_ds.iter().map(|d| op(d)).collect());
+                Some(TraitSelect(d, type_ds))
+            }
         }
     }
 }
index 93248edb197c625e9f4fec25559171ff4245ece5..7a780c1d4ae2478bc6d80d2e9c932bb0063e4b1e 100644 (file)
@@ -47,26 +47,26 @@ pub fn contains_node(&self, node: &DepNode<D>) -> bool {
         self.indices.contains_key(&node)
     }
 
-    pub fn nodes(&self) -> Vec<DepNode<D>> {
+    pub fn nodes(&self) -> Vec<&DepNode<D>> {
         self.graph.all_nodes()
                   .iter()
-                  .map(|n| n.data.clone())
+                  .map(|n| &n.data)
                   .collect()
     }
 
-    pub fn edges(&self) -> Vec<(DepNode<D>,DepNode<D>)> {
+    pub fn edges(&self) -> Vec<(&DepNode<D>,&DepNode<D>)> {
         self.graph.all_edges()
                   .iter()
                   .map(|edge| (edge.source(), edge.target()))
-                  .map(|(s, t)| (self.graph.node_data(s).clone(),
-                                 self.graph.node_data(t).clone()))
+                  .map(|(s, t)| (self.graph.node_data(s),
+                                 self.graph.node_data(t)))
                   .collect()
     }
 
-    fn reachable_nodes(&self, node: DepNode<D>, direction: Direction) -> Vec<DepNode<D>> {
-        if let Some(&index) = self.indices.get(&node) {
+    fn reachable_nodes(&self, node: &DepNode<D>, direction: Direction) -> Vec<&DepNode<D>> {
+        if let Some(&index) = self.indices.get(node) {
             self.graph.depth_traverse(index, direction)
-                      .map(|s| self.graph.node_data(s).clone())
+                      .map(|s| self.graph.node_data(s))
                       .collect()
         } else {
             vec![]
@@ -75,20 +75,20 @@ fn reachable_nodes(&self, node: DepNode<D>, direction: Direction) -> Vec<DepNode
 
     /// All nodes reachable from `node`. In other words, things that
     /// will have to be recomputed if `node` changes.
-    pub fn transitive_successors(&self, node: DepNode<D>) -> Vec<DepNode<D>> {
+    pub fn transitive_successors(&self, node: &DepNode<D>) -> Vec<&DepNode<D>> {
         self.reachable_nodes(node, OUTGOING)
     }
 
     /// All nodes that can reach `node`.
-    pub fn transitive_predecessors(&self, node: DepNode<D>) -> Vec<DepNode<D>> {
+    pub fn transitive_predecessors(&self, node: &DepNode<D>) -> Vec<&DepNode<D>> {
         self.reachable_nodes(node, INCOMING)
     }
 
     /// Just the outgoing edges from `node`.
-    pub fn immediate_successors(&self, node: DepNode<D>) -> Vec<DepNode<D>> {
+    pub fn immediate_successors(&self, node: &DepNode<D>) -> Vec<&DepNode<D>> {
         if let Some(&index) = self.indices.get(&node) {
             self.graph.successor_nodes(index)
-                      .map(|s| self.graph.node_data(s).clone())
+                      .map(|s| self.graph.node_data(s))
                       .collect()
         } else {
             vec![]
index 13151d169fc3f78843405d5a1829ec9d2823d985..c43d493d176757b023c0b6a73435bf4b999a4d94 100644 (file)
 
 pub struct DepTask<'graph> {
     data: &'graph DepGraphThreadData,
-    key: DepNode<DefId>,
+    key: Option<DepNode<DefId>>,
 }
 
 impl<'graph> DepTask<'graph> {
     pub fn new(data: &'graph DepGraphThreadData, key: DepNode<DefId>)
                -> DepTask<'graph> {
-        data.enqueue(DepMessage::PushTask(key));
-        DepTask { data: data, key: key }
+        data.enqueue(DepMessage::PushTask(key.clone()));
+        DepTask { data: data, key: Some(key) }
     }
 }
 
 impl<'graph> Drop for DepTask<'graph> {
     fn drop(&mut self) {
-        self.data.enqueue(DepMessage::PopTask(self.key));
+        self.data.enqueue(DepMessage::PopTask(self.key.take().unwrap()));
     }
 }
 
index 9133b4d22eeb2ec0693757d90045b6f8e0140817..5dd71db2f1832870b8e87171499b772c32acf02e 100644 (file)
@@ -39,7 +39,7 @@ impl<'visit, 'tcx, F, V> Visitor<'tcx> for TrackingVisitor<'visit, 'tcx, F, V>
         fn visit_item(&mut self, i: &'tcx hir::Item) {
             let item_def_id = self.tcx.map.local_def_id(i.id);
             let task_id = (self.dep_node_fn)(item_def_id);
-            let _task = self.tcx.dep_graph.in_task(task_id);
+            let _task = self.tcx.dep_graph.in_task(task_id.clone());
             debug!("Started task {:?}", task_id);
             self.tcx.dep_graph.read(DepNode::Hir(item_def_id));
             self.visitor.visit_item(i);
index bd7c0f683d1c54cde7448ee8a3843b62edb2b2c1..1ba722b6baee76e79594b9d21d47b412a83f254a 100644 (file)
@@ -1647,5 +1647,5 @@ fn cookie() -> ! { // error: definition of an unknown language item: `cookie`
     E0490, // a value of type `..` is borrowed for too long
     E0491, // in type `..`, reference has a longer lifetime than the data it...
     E0495, // cannot infer an appropriate lifetime due to conflicting requirements
-    E0525, // expected a closure that implements `..` but this closure only implements `..`
+    E0525  // expected a closure that implements `..` but this closure only implements `..`
 }
index 84a666ebef1bfdc9c21fda93dcd01e1ea1906b1a..d47de676e796032f581973190125cfbc9fc57437 100644 (file)
@@ -132,6 +132,9 @@ fn visit_ty(&mut self, t: &'v Ty) {
     fn visit_generics(&mut self, g: &'v Generics) {
         walk_generics(self, g)
     }
+    fn visit_where_predicate(&mut self, predicate: &'v WherePredicate) {
+        walk_where_predicate(self, predicate)
+    }
     fn visit_fn(&mut self, fk: FnKind<'v>, fd: &'v FnDecl, b: &'v Block, s: Span, _: NodeId) {
         walk_fn(self, fk, fd, b, s)
     }
@@ -529,29 +532,34 @@ pub fn walk_generics<'v, V: Visitor<'v>>(visitor: &mut V, generics: &'v Generics
         walk_list!(visitor, visit_ty, &param.default);
     }
     walk_list!(visitor, visit_lifetime_def, &generics.lifetimes);
-    for predicate in &generics.where_clause.predicates {
-        match predicate {
-            &WherePredicate::BoundPredicate(WhereBoundPredicate{ref bounded_ty,
-                                                                          ref bounds,
-                                                                          ref bound_lifetimes,
-                                                                          ..}) => {
-                visitor.visit_ty(bounded_ty);
-                walk_list!(visitor, visit_ty_param_bound, bounds);
-                walk_list!(visitor, visit_lifetime_def, bound_lifetimes);
-            }
-            &WherePredicate::RegionPredicate(WhereRegionPredicate{ref lifetime,
-                                                                            ref bounds,
-                                                                            ..}) => {
-                visitor.visit_lifetime(lifetime);
-                walk_list!(visitor, visit_lifetime, bounds);
-            }
-            &WherePredicate::EqPredicate(WhereEqPredicate{id,
-                                                                    ref path,
-                                                                    ref ty,
-                                                                    ..}) => {
-                visitor.visit_path(path, id);
-                visitor.visit_ty(ty);
-            }
+    walk_list!(visitor, visit_where_predicate, &generics.where_clause.predicates);
+}
+
+pub fn walk_where_predicate<'v, V: Visitor<'v>>(
+    visitor: &mut V,
+    predicate: &'v WherePredicate)
+{
+    match predicate {
+        &WherePredicate::BoundPredicate(WhereBoundPredicate{ref bounded_ty,
+                                                            ref bounds,
+                                                            ref bound_lifetimes,
+                                                            ..}) => {
+            visitor.visit_ty(bounded_ty);
+            walk_list!(visitor, visit_ty_param_bound, bounds);
+            walk_list!(visitor, visit_lifetime_def, bound_lifetimes);
+        }
+        &WherePredicate::RegionPredicate(WhereRegionPredicate{ref lifetime,
+                                                              ref bounds,
+                                                              ..}) => {
+            visitor.visit_lifetime(lifetime);
+            walk_list!(visitor, visit_lifetime, bounds);
+        }
+        &WherePredicate::EqPredicate(WhereEqPredicate{id,
+                                                      ref path,
+                                                      ref ty,
+                                                      ..}) => {
+            visitor.visit_path(path, id);
+            visitor.visit_ty(ty);
         }
     }
 }
index 8afee54c4bc683fb07ccb97942678ef70da17e8f..2f67042ca1c25f8f0512db17ed3752691c6d8608 100644 (file)
@@ -77,6 +77,7 @@
 use hir;
 use hir::print as pprust;
 
+use lint;
 use hir::def::Def;
 use hir::def_id::DefId;
 use infer::{self, TypeOrigin};
@@ -1017,6 +1018,27 @@ fn give_suggestion(&self, err: &mut DiagnosticBuilder, same_regions: &[SameRegio
         let (fn_decl, generics) = rebuilder.rebuild();
         self.give_expl_lifetime_param(err, &fn_decl, unsafety, constness, name, &generics, span);
     }
+
+    pub fn issue_32330_warnings(&self, span: Span, issue32330s: &[ty::Issue32330]) {
+        for issue32330 in issue32330s {
+            match *issue32330 {
+                ty::Issue32330::WontChange => { }
+                ty::Issue32330::WillChange { fn_def_id, region_name } => {
+                    self.tcx.sess.add_lint(
+                        lint::builtin::HR_LIFETIME_IN_ASSOC_TYPE,
+                        ast::CRATE_NODE_ID,
+                        span,
+                        format!("lifetime parameter `{0}` declared on fn `{1}` \
+                                 appears only in the return type, \
+                                 but here is required to be higher-ranked, \
+                                 which means that `{0}` must appear in both \
+                                 argument and return types",
+                                region_name,
+                                self.tcx.item_path_str(fn_def_id)));
+                }
+            }
+        }
+    }
 }
 
 struct RebuildPathInfo<'a> {
@@ -1129,7 +1151,7 @@ fn extract_anon_nums_and_names(&self, same_regions: &SameRegions)
                 ty::BrAnon(i) => {
                     anon_nums.insert(i);
                 }
-                ty::BrNamed(_, name) => {
+                ty::BrNamed(_, name, _) => {
                     region_names.insert(name);
                 }
                 _ => ()
@@ -1143,7 +1165,7 @@ fn extract_all_region_names(&self) -> HashSet<ast::Name> {
         for sr in self.same_regions {
             for br in &sr.regions {
                 match *br {
-                    ty::BrNamed(_, name) => {
+                    ty::BrNamed(_, name, _) => {
                         all_region_names.insert(name);
                     }
                     _ => ()
@@ -1923,3 +1945,4 @@ fn name_to_dummy_lifetime(name: ast::Name) -> hir::Lifetime {
                     span: codemap::DUMMY_SP,
                     name: name }
 }
+
index 6814d50107f6848fb95faab14abd786690698963..84b72d9be60a1d3f71d573e27615d695c5e88e5d 100644 (file)
 //! Helper routines for higher-ranked things. See the `doc` module at
 //! the end of the file for details.
 
-use super::{CombinedSnapshot, InferCtxt, HigherRankedType, SkolemizationMap};
+use super::{CombinedSnapshot,
+            InferCtxt,
+            LateBoundRegion,
+            HigherRankedType,
+            SubregionOrigin,
+            SkolemizationMap};
 use super::combine::CombineFields;
+use super::region_inference::{TaintDirections};
 
 use ty::{self, TyCtxt, Binder, TypeFoldable};
 use ty::error::TypeError;
 use syntax::codemap::Span;
 use util::nodemap::{FnvHashMap, FnvHashSet};
 
+pub struct HrMatchResult<U> {
+    pub value: U,
+
+    /// Normally, when we do a higher-ranked match operation, we
+    /// expect all higher-ranked regions to be constrained as part of
+    /// the match operation. However, in the transition period for
+    /// #32330, it can happen that we sometimes have unconstrained
+    /// regions that get instantiated with fresh variables. In that
+    /// case, we collect the set of unconstrained bound regions here
+    /// and replace them with fresh variables.
+    pub unconstrained_regions: Vec<ty::BoundRegion>,
+}
+
 impl<'a, 'gcx, 'tcx> CombineFields<'a, 'gcx, 'tcx> {
     pub fn higher_ranked_sub<T>(&self, a: &Binder<T>, b: &Binder<T>)
                                 -> RelateResult<'tcx, Binder<T>>
@@ -39,11 +58,13 @@ pub fn higher_ranked_sub<T>(&self, a: &Binder<T>, b: &Binder<T>)
         // Start a snapshot so we can examine "all bindings that were
         // created as part of this type comparison".
         return self.infcx.commit_if_ok(|snapshot| {
+            let span = self.trace.origin.span();
+
             // First, we instantiate each bound region in the subtype with a fresh
             // region variable.
             let (a_prime, _) =
                 self.infcx.replace_late_bound_regions_with_fresh_var(
-                    self.trace.origin.span(),
+                    span,
                     HigherRankedType,
                     a);
 
@@ -60,7 +81,11 @@ pub fn higher_ranked_sub<T>(&self, a: &Binder<T>, b: &Binder<T>)
 
             // Presuming type comparison succeeds, we need to check
             // that the skolemized regions do not "leak".
-            self.infcx.leak_check(!self.a_is_expected, &skol_map, snapshot)?;
+            self.infcx.leak_check(!self.a_is_expected, span, &skol_map, snapshot)?;
+
+            // We are finished with the skolemized regions now so pop
+            // them off.
+            self.infcx.pop_skolemized(skol_map, snapshot);
 
             debug!("higher_ranked_sub: OK result={:?}", result);
 
@@ -68,6 +93,134 @@ pub fn higher_ranked_sub<T>(&self, a: &Binder<T>, b: &Binder<T>)
         });
     }
 
+    /// The value consists of a pair `(t, u)` where `t` is the
+    /// *matcher* and `u` is a *value*. The idea is to find a
+    /// substitution `S` such that `S(t) == b`, and then return
+    /// `S(u)`. In other words, find values for the late-bound regions
+    /// in `a` that can make `t == b` and then replace the LBR in `u`
+    /// with those values.
+    ///
+    /// This routine is (as of this writing) used in trait matching,
+    /// particularly projection.
+    ///
+    /// NB. It should not happen that there are LBR appearing in `U`
+    /// that do not appear in `T`. If that happens, those regions are
+    /// unconstrained, and this routine replaces them with `'static`.
+    pub fn higher_ranked_match<T, U>(&self,
+                                     span: Span,
+                                     a_pair: &Binder<(T, U)>,
+                                     b_match: &T)
+                                     -> RelateResult<'tcx, HrMatchResult<U>>
+        where T: Relate<'tcx>,
+              U: TypeFoldable<'tcx>
+    {
+        debug!("higher_ranked_match(a={:?}, b={:?})",
+               a_pair, b_match);
+
+        // Start a snapshot so we can examine "all bindings that were
+        // created as part of this type comparison".
+        return self.infcx.commit_if_ok(|snapshot| {
+            // First, we instantiate each bound region in the matcher
+            // with a skolemized region.
+            let ((a_match, a_value), skol_map) =
+                self.infcx.skolemize_late_bound_regions(a_pair, snapshot);
+
+            debug!("higher_ranked_match: a_match={:?}", a_match);
+            debug!("higher_ranked_match: skol_map={:?}", skol_map);
+
+            // Equate types now that bound regions have been replaced.
+            try!(self.equate().relate(&a_match, &b_match));
+
+            // Map each skolemized region to a vector of other regions that it
+            // must be equated with. (Note that this vector may include other
+            // skolemized regions from `skol_map`.)
+            let skol_resolution_map: FnvHashMap<_, _> =
+                skol_map
+                .iter()
+                .map(|(&br, &skol)| {
+                    let tainted_regions =
+                        self.infcx.tainted_regions(snapshot,
+                                                   skol,
+                                                   TaintDirections::incoming()); // [1]
+
+                    // [1] this routine executes after the skolemized
+                    // regions have been *equated* with something
+                    // else, so examining the incoming edges ought to
+                    // be enough to collect all constraints
+
+                    (skol, (br, tainted_regions))
+                })
+                .collect();
+
+            // For each skolemized region, pick a representative -- which can
+            // be any region from the sets above, except for other members of
+            // `skol_map`. There should always be a representative if things
+            // are properly well-formed.
+            let mut unconstrained_regions = vec![];
+            let skol_representatives: FnvHashMap<_, _> =
+                skol_resolution_map
+                .iter()
+                .map(|(&skol, &(br, ref regions))| {
+                    let representative =
+                        regions.iter()
+                               .filter(|r| !skol_resolution_map.contains_key(r))
+                               .cloned()
+                               .next()
+                               .unwrap_or_else(|| { // [1]
+                                   unconstrained_regions.push(br);
+                                   self.infcx.next_region_var(
+                                       LateBoundRegion(span, br, HigherRankedType))
+                               });
+
+                    // [1] There should always be a representative,
+                    // unless the higher-ranked region did not appear
+                    // in the values being matched. We should reject
+                    // as ill-formed cases that can lead to this, but
+                    // right now we sometimes issue warnings (see
+                    // #32330).
+
+                    (skol, representative)
+                })
+                .collect();
+
+            // Equate all the members of each skolemization set with the
+            // representative.
+            for (skol, &(_br, ref regions)) in &skol_resolution_map {
+                let representative = &skol_representatives[skol];
+                debug!("higher_ranked_match: \
+                        skol={:?} representative={:?} regions={:?}",
+                       skol, representative, regions);
+                for region in regions.iter()
+                                     .filter(|&r| !skol_resolution_map.contains_key(r))
+                                     .filter(|&r| r != representative)
+                {
+                    let origin = SubregionOrigin::Subtype(self.trace.clone());
+                    self.infcx.region_vars.make_eqregion(origin,
+                                                         *representative,
+                                                         *region);
+                }
+            }
+
+            // Replace the skolemized regions appearing in value with
+            // their representatives
+            let a_value =
+                fold_regions_in(
+                    self.tcx(),
+                    &a_value,
+                    |r, _| skol_representatives.get(&r).cloned().unwrap_or(r));
+
+            debug!("higher_ranked_match: value={:?}", a_value);
+
+            // We are now done with these skolemized variables.
+            self.infcx.pop_skolemized(skol_map, snapshot);
+
+            Ok(HrMatchResult {
+                value: a_value,
+                unconstrained_regions: unconstrained_regions,
+            })
+        });
+    }
+
     pub fn higher_ranked_lub<T>(&self, a: &Binder<T>, b: &Binder<T>)
                                 -> RelateResult<'tcx, Binder<T>>
         where T: Relate<'tcx>
@@ -124,7 +277,7 @@ fn generalize_region<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>,
                 return r0;
             }
 
-            let tainted = infcx.tainted_regions(snapshot, r0);
+            let tainted = infcx.tainted_regions(snapshot, r0, TaintDirections::both());
 
             // Variables created during LUB computation which are
             // *related* to regions that pre-date the LUB computation
@@ -219,7 +372,7 @@ fn generalize_region<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>,
                 return r0;
             }
 
-            let tainted = infcx.tainted_regions(snapshot, r0);
+            let tainted = infcx.tainted_regions(snapshot, r0, TaintDirections::both());
 
             let mut a_r = None;
             let mut b_r = None;
@@ -341,8 +494,12 @@ fn fold_regions_in<'a, 'gcx, 'tcx, T, F>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
 }
 
 impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
-    fn tainted_regions(&self, snapshot: &CombinedSnapshot, r: ty::Region) -> Vec<ty::Region> {
-        self.region_vars.tainted(&snapshot.region_vars_snapshot, r)
+    fn tainted_regions(&self,
+                       snapshot: &CombinedSnapshot,
+                       r: ty::Region,
+                       directions: TaintDirections)
+                       -> FnvHashSet<ty::Region> {
+        self.region_vars.tainted(&snapshot.region_vars_snapshot, r, directions)
     }
 
     fn region_vars_confined_to_snapshot(&self,
@@ -422,22 +579,27 @@ fn region_vars_confined_to_snapshot(&self,
         region_vars
     }
 
+    /// Replace all regions bound by `binder` with skolemized regions and
+    /// return a map indicating which bound-region was replaced with what
+    /// skolemized region. This is the first step of checking subtyping
+    /// when higher-ranked things are involved.
+    ///
+    /// **Important:** you must call this function from within a snapshot.
+    /// Moreover, before committing the snapshot, you must eventually call
+    /// either `plug_leaks` or `pop_skolemized` to remove the skolemized
+    /// regions. If you rollback the snapshot (or are using a probe), then
+    /// the pop occurs as part of the rollback, so an explicit call is not
+    /// needed (but is also permitted).
+    ///
+    /// See `README.md` for more details.
     pub fn skolemize_late_bound_regions<T>(&self,
                                            binder: &ty::Binder<T>,
                                            snapshot: &CombinedSnapshot)
                                            -> (T, SkolemizationMap)
         where T : TypeFoldable<'tcx>
     {
-        /*!
-         * Replace all regions bound by `binder` with skolemized regions and
-         * return a map indicating which bound-region was replaced with what
-         * skolemized region. This is the first step of checking subtyping
-         * when higher-ranked things are involved. See `README.md` for more
-         * details.
-         */
-
         let (result, map) = self.tcx.replace_late_bound_regions(binder, |br| {
-            self.region_vars.new_skolemized(br, &snapshot.region_vars_snapshot)
+            self.region_vars.push_skolemized(br, &snapshot.region_vars_snapshot)
         });
 
         debug!("skolemize_bound_regions(binder={:?}, result={:?}, map={:?})",
@@ -448,32 +610,80 @@ pub fn skolemize_late_bound_regions<T>(&self,
         (result, map)
     }
 
+    /// Searches the region constriants created since `snapshot` was started
+    /// and checks to determine whether any of the skolemized regions created
+    /// in `skol_map` would "escape" -- meaning that they are related to
+    /// other regions in some way. If so, the higher-ranked subtyping doesn't
+    /// hold. See `README.md` for more details.
     pub fn leak_check(&self,
                       overly_polymorphic: bool,
+                      span: Span,
                       skol_map: &SkolemizationMap,
                       snapshot: &CombinedSnapshot)
                       -> RelateResult<'tcx, ()>
     {
-        /*!
-         * Searches the region constriants created since `snapshot` was started
-         * and checks to determine whether any of the skolemized regions created
-         * in `skol_map` would "escape" -- meaning that they are related to
-         * other regions in some way. If so, the higher-ranked subtyping doesn't
-         * hold. See `README.md` for more details.
-         */
-
         debug!("leak_check: skol_map={:?}",
                skol_map);
 
+        // ## Issue #32330 warnings
+        //
+        // When Issue #32330 is fixed, a certain number of late-bound
+        // regions (LBR) will become early-bound. We wish to issue
+        // warnings when the result of `leak_check` relies on such LBR, as
+        // that means that compilation will likely start to fail.
+        //
+        // Recall that when we do a "HR subtype" check, we replace all
+        // late-bound regions (LBR) in the subtype with fresh variables,
+        // and skolemize the late-bound regions in the supertype. If those
+        // skolemized regions from the supertype wind up being
+        // super-regions (directly or indirectly) of either
+        //
+        // - another skolemized region; or,
+        // - some region that pre-exists the HR subtype check
+        //   - e.g., a region variable that is not one of those created
+        //     to represent bound regions in the subtype
+        //
+        // then leak-check (and hence the subtype check) fails.
+        //
+        // What will change when we fix #32330 is that some of the LBR in the
+        // subtype may become early-bound. In that case, they would no longer be in
+        // the "permitted set" of variables that can be related to a skolemized
+        // type.
+        //
+        // So the foundation for this warning is to collect variables that we found
+        // to be related to a skolemized type. For each of them, we have a
+        // `BoundRegion` which carries a `Issue32330` flag. We check whether any of
+        // those flags indicate that this variable was created from a lifetime
+        // that will change from late- to early-bound. If so, we issue a warning
+        // indicating that the results of compilation may change.
+        //
+        // This is imperfect, since there are other kinds of code that will not
+        // compile once #32330 is fixed. However, it fixes the errors observed in
+        // practice on crater runs.
+        let mut warnings = vec![];
+
         let new_vars = self.region_vars_confined_to_snapshot(snapshot);
         for (&skol_br, &skol) in skol_map {
-            let tainted = self.tainted_regions(snapshot, skol);
-            for &tainted_region in &tainted {
+            // The inputs to a skolemized variable can only
+            // be itself or other new variables.
+            let incoming_taints = self.tainted_regions(snapshot,
+                                                       skol,
+                                                       TaintDirections::both());
+            for &tainted_region in &incoming_taints {
                 // Each skolemized should only be relatable to itself
                 // or new variables:
                 match tainted_region {
                     ty::ReVar(vid) => {
-                        if new_vars.iter().any(|&x| x == vid) { continue; }
+                        if new_vars.contains(&vid) {
+                            warnings.extend(
+                                match self.region_vars.var_origin(vid) {
+                                    LateBoundRegion(_,
+                                                    ty::BrNamed(_, _, wc),
+                                                    _) => Some(wc),
+                                    _ => None,
+                                });
+                            continue;
+                        }
                     }
                     _ => {
                         if tainted_region == skol { continue; }
@@ -496,6 +706,9 @@ pub fn leak_check(&self,
                 }
             }
         }
+
+        self.issue_32330_warnings(span, &warnings);
+
         Ok(())
     }
 
@@ -533,8 +746,6 @@ pub fn plug_leaks<T>(&self,
                          value: &T) -> T
         where T : TypeFoldable<'tcx>
     {
-        debug_assert!(self.leak_check(false, &skol_map, snapshot).is_ok());
-
         debug!("plug_leaks(skol_map={:?}, value={:?})",
                skol_map,
                value);
@@ -545,9 +756,9 @@ pub fn plug_leaks<T>(&self,
         // these taint sets are mutually disjoint.
         let inv_skol_map: FnvHashMap<ty::Region, ty::BoundRegion> =
             skol_map
-            .into_iter()
-            .flat_map(|(skol_br, skol)| {
-                self.tainted_regions(snapshot, skol)
+            .iter()
+            .flat_map(|(&skol_br, &skol)| {
+                self.tainted_regions(snapshot, skol, TaintDirections::both())
                     .into_iter()
                     .map(move |tainted_region| (tainted_region, skol_br))
             })
@@ -577,6 +788,19 @@ pub fn plug_leaks<T>(&self,
                     // binders, so this assert is satisfied.
                     assert!(current_depth > 1);
 
+                    // since leak-check passed, this skolemized region
+                    // should only have incoming edges from variables
+                    // (which ought not to escape the snapshot, but we
+                    // don't check that) or itself
+                    assert!(
+                        match r {
+                            ty::ReVar(_) => true,
+                            ty::ReSkolemized(_, ref br1) => br == br1,
+                            _ => false,
+                        },
+                        "leak-check would have us replace {:?} with {:?}",
+                        r, br);
+
                     ty::ReLateBound(ty::DebruijnIndex::new(current_depth - 1), br.clone())
                 }
             }
@@ -585,6 +809,27 @@ pub fn plug_leaks<T>(&self,
         debug!("plug_leaks: result={:?}",
                result);
 
+        self.pop_skolemized(skol_map, snapshot);
+
+        debug!("plug_leaks: result={:?}", result);
+
         result
     }
+
+    /// Pops the skolemized regions found in `skol_map` from the region
+    /// inference context. Whenever you create skolemized regions via
+    /// `skolemize_late_bound_regions`, they must be popped before you
+    /// commit the enclosing snapshot (if you do not commit, e.g. within a
+    /// probe or as a result of an error, then this is not necessary, as
+    /// popping happens as part of the rollback).
+    ///
+    /// Note: popping also occurs implicitly as part of `leak_check`.
+    pub fn pop_skolemized(&self,
+                          skol_map: SkolemizationMap,
+                          snapshot: &CombinedSnapshot)
+    {
+        debug!("pop_skolemized({:?})", skol_map);
+        let skol_regions: FnvHashSet<_> = skol_map.values().cloned().collect();
+        self.region_vars.pop_skolemized(&skol_regions, &snapshot.region_vars_snapshot);
+    }
 }
index 7c9c52baa63e44965632e0ebb8fe34b87af3e22b..4dbee6ffa79d998e697acc28e8c7e1a81c3872b9 100644 (file)
@@ -45,6 +45,7 @@
 use util::nodemap::{FnvHashMap, FnvHashSet, NodeMap};
 
 use self::combine::CombineFields;
+use self::higher_ranked::HrMatchResult;
 use self::region_inference::{RegionVarBindings, RegionSnapshot};
 use self::unify_key::ToType;
 
@@ -63,6 +64,7 @@
 pub mod type_variable;
 pub mod unify_key;
 
+#[must_use]
 pub struct InferOk<'tcx, T> {
     pub value: T,
     pub obligations: PredicateObligations<'tcx>,
@@ -104,6 +106,12 @@ pub struct InferCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
 
     pub tables: InferTables<'a, 'gcx, 'tcx>,
 
+    // Cache for projections. This cache is snapshotted along with the
+    // infcx.
+    //
+    // Public so that `traits::project` can use it.
+    pub projection_cache: RefCell<traits::ProjectionCache<'tcx>>,
+
     // We instantiate UnificationTable with bounds<Ty> because the
     // types that might instantiate a general type variable have an
     // order, represented by its upper and lower bounds.
@@ -477,6 +485,7 @@ pub fn borrowck_fake_infer_ctxt(self, param_env: ty::ParameterEnvironment<'gcx>)
             parameter_environment: param_env,
             selection_cache: traits::SelectionCache::new(),
             evaluation_cache: traits::EvaluationCache::new(),
+            projection_cache: RefCell::new(traits::ProjectionCache::new()),
             reported_trait_errors: RefCell::new(FnvHashSet()),
             normalize: false,
             projection_mode: ProjectionMode::AnyFinal,
@@ -510,6 +519,7 @@ pub fn enter<F, R>(&'tcx mut self, f: F) -> R
         global_tcx.enter_local(arenas, |tcx| f(InferCtxt {
             tcx: tcx,
             tables: tables,
+            projection_cache: RefCell::new(traits::ProjectionCache::new()),
             type_variables: RefCell::new(type_variable::TypeVariableTable::new()),
             int_unification_table: RefCell::new(UnificationTable::new()),
             float_unification_table: RefCell::new(UnificationTable::new()),
@@ -538,13 +548,14 @@ fn new(a_is_expected: bool, a: T, b: T) -> Self {
 }
 
 impl<'tcx, T> InferOk<'tcx, T> {
-    fn unit(self) -> InferOk<'tcx, ()> {
+    pub fn unit(self) -> InferOk<'tcx, ()> {
         InferOk { value: (), obligations: self.obligations }
     }
 }
 
 #[must_use = "once you start a snapshot, you should always consume it"]
 pub struct CombinedSnapshot {
+    projection_cache_snapshot: traits::ProjectionCacheSnapshot,
     type_snapshot: type_variable::Snapshot,
     int_snapshot: unify::Snapshot<ty::IntVid>,
     float_snapshot: unify::Snapshot<ty::FloatVid>,
@@ -643,6 +654,8 @@ pub fn drain_fulfillment_cx_or_panic<T>(&self,
                                             -> T::Lifted
         where T: TypeFoldable<'tcx> + ty::Lift<'gcx>
     {
+        debug!("drain_fulfillment_cx_or_panic()");
+
         let when = "resolving bounds after type-checking";
         let v = match self.drain_fulfillment_cx(fulfill_cx, result) {
             Ok(v) => v,
@@ -817,10 +830,13 @@ pub fn glb<T>(&'a self, a_is_expected: bool, trace: TypeTrace<'tcx>, a: &T, b: &
     }
 
     fn start_snapshot(&self) -> CombinedSnapshot {
+        debug!("start_snapshot()");
+
         let obligations_in_snapshot = self.obligations_in_snapshot.get();
         self.obligations_in_snapshot.set(false);
 
         CombinedSnapshot {
+            projection_cache_snapshot: self.projection_cache.borrow_mut().snapshot(),
             type_snapshot: self.type_variables.borrow_mut().snapshot(),
             int_snapshot: self.int_unification_table.borrow_mut().snapshot(),
             float_snapshot: self.float_unification_table.borrow_mut().snapshot(),
@@ -831,7 +847,8 @@ fn start_snapshot(&self) -> CombinedSnapshot {
 
     fn rollback_to(&self, cause: &str, snapshot: CombinedSnapshot) {
         debug!("rollback_to(cause={})", cause);
-        let CombinedSnapshot { type_snapshot,
+        let CombinedSnapshot { projection_cache_snapshot,
+                               type_snapshot,
                                int_snapshot,
                                float_snapshot,
                                region_vars_snapshot,
@@ -840,6 +857,9 @@ fn rollback_to(&self, cause: &str, snapshot: CombinedSnapshot) {
         assert!(!self.obligations_in_snapshot.get());
         self.obligations_in_snapshot.set(obligations_in_snapshot);
 
+        self.projection_cache
+            .borrow_mut()
+            .rollback_to(projection_cache_snapshot);
         self.type_variables
             .borrow_mut()
             .rollback_to(type_snapshot);
@@ -854,8 +874,9 @@ fn rollback_to(&self, cause: &str, snapshot: CombinedSnapshot) {
     }
 
     fn commit_from(&self, snapshot: CombinedSnapshot) {
-        debug!("commit_from!");
-        let CombinedSnapshot { type_snapshot,
+        debug!("commit_from()");
+        let CombinedSnapshot { projection_cache_snapshot,
+                               type_snapshot,
                                int_snapshot,
                                float_snapshot,
                                region_vars_snapshot,
@@ -863,6 +884,9 @@ fn commit_from(&self, snapshot: CombinedSnapshot) {
 
         self.obligations_in_snapshot.set(obligations_in_snapshot);
 
+        self.projection_cache
+            .borrow_mut()
+            .commit(projection_cache_snapshot);
         self.type_variables
             .borrow_mut()
             .commit(type_snapshot);
@@ -920,7 +944,8 @@ pub fn commit_regions_if_ok<T, E, F>(&self, f: F) -> Result<T, E> where
         F: FnOnce() -> Result<T, E>
     {
         debug!("commit_regions_if_ok()");
-        let CombinedSnapshot { type_snapshot,
+        let CombinedSnapshot { projection_cache_snapshot,
+                               type_snapshot,
                                int_snapshot,
                                float_snapshot,
                                region_vars_snapshot,
@@ -935,6 +960,9 @@ pub fn commit_regions_if_ok<T, E, F>(&self, f: F) -> Result<T, E> where
 
         // Roll back any non-region bindings - they should be resolved
         // inside `f`, with, e.g. `resolve_type_vars_if_possible`.
+        self.projection_cache
+            .borrow_mut()
+            .rollback_to(projection_cache_snapshot);
         self.type_variables
             .borrow_mut()
             .rollback_to(type_snapshot);
@@ -1076,7 +1104,9 @@ pub fn equality_predicate(&self,
                 self.skolemize_late_bound_regions(predicate, snapshot);
             let origin = TypeOrigin::EquatePredicate(span);
             let eqty_ok = self.eq_types(false, origin, a, b)?;
-            self.leak_check(false, &skol_map, snapshot).map(|_| eqty_ok.unit())
+            self.leak_check(false, span, &skol_map, snapshot)?;
+            self.pop_skolemized(skol_map, snapshot);
+            Ok(eqty_ok.unit())
         })
     }
 
@@ -1090,7 +1120,8 @@ pub fn region_outlives_predicate(&self,
                 self.skolemize_late_bound_regions(predicate, snapshot);
             let origin = RelateRegionParamBound(span);
             self.sub_regions(origin, r_b, r_a); // `b : a` ==> `a <= b`
-            self.leak_check(false, &skol_map, snapshot)
+            self.leak_check(false, span, &skol_map, snapshot)?;
+            Ok(self.pop_skolemized(skol_map, snapshot))
         })
     }
 
@@ -1569,6 +1600,40 @@ pub fn replace_late_bound_regions_with_fresh_var<T>(
             |br| self.next_region_var(LateBoundRegion(span, br, lbrct)))
     }
 
+    /// Given a higher-ranked projection predicate like:
+    ///
+    ///     for<'a> <T as Fn<&'a u32>>::Output = &'a u32
+    ///
+    /// and a target trait-ref like:
+    ///
+    ///     <T as Fn<&'x u32>>
+    ///
+    /// find a substitution `S` for the higher-ranked regions (here,
+    /// `['a => 'x]`) such that the predicate matches the trait-ref,
+    /// and then return the value (here, `&'a u32`) but with the
+    /// substitution applied (hence, `&'x u32`).
+    ///
+    /// See `higher_ranked_match` in `higher_ranked/mod.rs` for more
+    /// details.
+    pub fn match_poly_projection_predicate(&self,
+                                           origin: TypeOrigin,
+                                           match_a: ty::PolyProjectionPredicate<'tcx>,
+                                           match_b: ty::TraitRef<'tcx>)
+                                           -> InferResult<'tcx, HrMatchResult<Ty<'tcx>>>
+    {
+        let span = origin.span();
+        let match_trait_ref = match_a.skip_binder().projection_ty.trait_ref;
+        let trace = TypeTrace {
+            origin: origin,
+            values: TraitRefs(ExpectedFound::new(true, match_trait_ref, match_b))
+        };
+
+        let match_pair = match_a.map_bound(|p| (p.projection_ty.trait_ref, p.ty));
+        let combine = self.combine_fields(true, trace);
+        let result = combine.higher_ranked_match(span, &match_pair, &match_b)?;
+        Ok(InferOk { value: result, obligations: combine.obligations })
+    }
+
     /// See `verify_generic_bound` method in `region_inference`
     pub fn verify_generic_bound(&self,
                                 origin: SubregionOrigin<'tcx>,
index c9037d6b12aa8ee7302a4e1cae6d7417a881e83b..905ad7c0faa236c45e18c6eb5d03cc64e3663441 100644 (file)
@@ -213,8 +213,12 @@ fn constraint_to_nodes(c: &Constraint) -> (Node, Node) {
     match *c {
         Constraint::ConstrainVarSubVar(rv_1, rv_2) =>
             (Node::RegionVid(rv_1), Node::RegionVid(rv_2)),
-        Constraint::ConstrainRegSubVar(r_1, rv_2) => (Node::Region(r_1), Node::RegionVid(rv_2)),
-        Constraint::ConstrainVarSubReg(rv_1, r_2) => (Node::RegionVid(rv_1), Node::Region(r_2)),
+        Constraint::ConstrainRegSubVar(r_1, rv_2) =>
+            (Node::Region(r_1), Node::RegionVid(rv_2)),
+        Constraint::ConstrainVarSubReg(rv_1, r_2) =>
+            (Node::RegionVid(rv_1), Node::Region(r_2)),
+        Constraint::ConstrainRegSubReg(r_1, r_2) =>
+            (Node::Region(r_1), Node::Region(r_2)),
     }
 }
 
index 5312d03052552817810dac087fd68b88602309c3..2211a565a325f215a2af2ca7ead346fc61d0f6dc 100644 (file)
@@ -11,7 +11,6 @@
 //! See README.md
 
 pub use self::Constraint::*;
-pub use self::Verify::*;
 pub use self::UndoLogEntry::*;
 pub use self::CombineMapType::*;
 pub use self::RegionResolutionError::*;
@@ -20,6 +19,7 @@
 use super::{RegionVariableOrigin, SubregionOrigin, MiscVariable};
 use super::unify_key;
 
+use rustc_data_structures::fnv::{FnvHashMap, FnvHashSet};
 use rustc_data_structures::graph::{self, Direction, NodeIndex, OUTGOING};
 use rustc_data_structures::unify::{self, UnificationTable};
 use middle::free_region::FreeRegionMap;
 use ty::{BoundRegion, Region, RegionVid};
 use ty::{ReEmpty, ReStatic, ReFree, ReEarlyBound};
 use ty::{ReLateBound, ReScope, ReVar, ReSkolemized, BrFresh};
-use util::common::indenter;
-use util::nodemap::{FnvHashMap, FnvHashSet};
 
 use std::cell::{Cell, RefCell};
 use std::cmp::Ordering::{self, Less, Greater, Equal};
 use std::fmt;
+use std::mem;
 use std::u32;
 use syntax::ast;
 
@@ -47,25 +46,28 @@ pub enum Constraint {
     // Concrete region is subregion of region variable
     ConstrainRegSubVar(Region, RegionVid),
 
-    // Region variable is subregion of concrete region
-    //
-    // FIXME(#29436) -- should be remove in favor of a Verify
+    // Region variable is subregion of concrete region. This does not
+    // directly affect inference, but instead is checked after
+    // inference is complete.
     ConstrainVarSubReg(RegionVid, Region),
+
+    // A constraint where neither side is a variable. This does not
+    // directly affect inference, but instead is checked after
+    // inference is complete.
+    ConstrainRegSubReg(Region, Region),
 }
 
-// Something we have to verify after region inference is done, but
-// which does not directly influence the inference process
-pub enum Verify<'tcx> {
-    // VerifyRegSubReg(a, b): Verify that `a <= b`. Neither `a` nor
-    // `b` are inference variables.
-    VerifyRegSubReg(SubregionOrigin<'tcx>, Region, Region),
-
-    // VerifyGenericBound(T, _, R, RS): The parameter type `T` (or
-    // associated type) must outlive the region `R`. `T` is known to
-    // outlive `RS`. Therefore verify that `R <= RS[i]` for some
-    // `i`. Inference variables may be involved (but this verification
-    // step doesn't influence inference).
-    VerifyGenericBound(GenericKind<'tcx>, SubregionOrigin<'tcx>, Region, VerifyBound),
+// VerifyGenericBound(T, _, R, RS): The parameter type `T` (or
+// associated type) must outlive the region `R`. `T` is known to
+// outlive `RS`. Therefore verify that `R <= RS[i]` for some
+// `i`. Inference variables may be involved (but this verification
+// step doesn't influence inference).
+#[derive(Debug)]
+pub struct Verify<'tcx> {
+    kind: GenericKind<'tcx>,
+    origin: SubregionOrigin<'tcx>,
+    region: Region,
+    bound: VerifyBound,
 }
 
 #[derive(Copy, Clone, PartialEq, Eq)]
@@ -108,13 +110,36 @@ pub struct TwoRegions {
 
 #[derive(Copy, Clone, PartialEq)]
 pub enum UndoLogEntry {
+    /// Pushed when we start a snapshot.
     OpenSnapshot,
+
+    /// Replaces an `OpenSnapshot` when a snapshot is committed, but
+    /// that snapshot is not the root. If the root snapshot is
+    /// unrolled, all nested snapshots must be committed.
     CommitedSnapshot,
+
+    /// We added `RegionVid`
     AddVar(RegionVid),
+
+    /// We added the given `constraint`
     AddConstraint(Constraint),
+
+    /// We added the given `verify`
     AddVerify(usize),
+
+    /// We added the given `given`
     AddGiven(ty::FreeRegion, ty::RegionVid),
+
+    /// We added a GLB/LUB "combinaton variable"
     AddCombination(CombineMapType, TwoRegions),
+
+    /// During skolemization, we sometimes purge entries from the undo
+    /// log in a kind of minisnapshot (unlike other snapshots, this
+    /// purging actually takes place *on success*). In that case, we
+    /// replace the corresponding entry with `Noop` so as to avoid the
+    /// need to do a bunch of swapping. (We can't use `swap_remove` as
+    /// the order of the vector is important.)
+    Purged,
 }
 
 #[derive(Copy, Clone, PartialEq)]
@@ -253,6 +278,112 @@ pub struct RegionSnapshot {
     skolemization_count: u32,
 }
 
+/// When working with skolemized regions, we often wish to find all of
+/// the regions that are either reachable from a skolemized region, or
+/// which can reach a skolemized region, or both. We call such regions
+/// *tained* regions.  This struct allows you to decide what set of
+/// tainted regions you want.
+#[derive(Debug)]
+pub struct TaintDirections {
+    incoming: bool,
+    outgoing: bool,
+}
+
+impl TaintDirections {
+    pub fn incoming() -> Self {
+        TaintDirections { incoming: true, outgoing: false }
+    }
+
+    pub fn outgoing() -> Self {
+        TaintDirections { incoming: false, outgoing: true }
+    }
+
+    pub fn both() -> Self {
+        TaintDirections { incoming: true, outgoing: true }
+    }
+}
+
+struct TaintSet {
+    directions: TaintDirections,
+    regions: FnvHashSet<ty::Region>
+}
+
+impl TaintSet {
+    fn new(directions: TaintDirections,
+           initial_region: ty::Region)
+           -> Self {
+        let mut regions = FnvHashSet();
+        regions.insert(initial_region);
+        TaintSet { directions: directions, regions: regions }
+    }
+
+    fn fixed_point(&mut self,
+                   undo_log: &[UndoLogEntry],
+                   verifys: &[Verify]) {
+        let mut prev_len = 0;
+        while prev_len < self.len() {
+            debug!("tainted: prev_len = {:?} new_len = {:?}",
+                   prev_len, self.len());
+
+            prev_len = self.len();
+
+            for undo_entry in undo_log {
+                match undo_entry {
+                    &AddConstraint(ConstrainVarSubVar(a, b)) => {
+                        self.add_edge(ReVar(a), ReVar(b));
+                    }
+                    &AddConstraint(ConstrainRegSubVar(a, b)) => {
+                        self.add_edge(a, ReVar(b));
+                    }
+                    &AddConstraint(ConstrainVarSubReg(a, b)) => {
+                        self.add_edge(ReVar(a), b);
+                    }
+                    &AddConstraint(ConstrainRegSubReg(a, b)) => {
+                        self.add_edge(a, b);
+                    }
+                    &AddGiven(a, b) => {
+                        self.add_edge(ReFree(a), ReVar(b));
+                    }
+                    &AddVerify(i) => {
+                        verifys[i].bound.for_each_region(&mut |b| {
+                            self.add_edge(verifys[i].region, b);
+                        });
+                    }
+                    &Purged |
+                    &AddCombination(..) |
+                    &AddVar(..) |
+                    &OpenSnapshot |
+                    &CommitedSnapshot => {}
+                }
+            }
+        }
+    }
+
+    fn into_set(self) -> FnvHashSet<ty::Region> {
+        self.regions
+    }
+
+    fn len(&self) -> usize {
+        self.regions.len()
+    }
+
+    fn add_edge(&mut self,
+                source: ty::Region,
+                target: ty::Region) {
+        if self.directions.incoming {
+            if self.regions.contains(&target) {
+                self.regions.insert(source);
+            }
+        }
+
+        if self.directions.outgoing {
+            if self.regions.contains(&source) {
+                self.regions.insert(target);
+            }
+        }
+    }
+}
+
 impl<'a, 'gcx, 'tcx> RegionVarBindings<'a, 'gcx, 'tcx> {
     pub fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>) -> RegionVarBindings<'a, 'gcx, 'tcx> {
         RegionVarBindings {
@@ -290,6 +421,10 @@ pub fn commit(&self, snapshot: RegionSnapshot) {
         debug!("RegionVarBindings: commit({})", snapshot.length);
         assert!(self.undo_log.borrow().len() > snapshot.length);
         assert!((*self.undo_log.borrow())[snapshot.length] == OpenSnapshot);
+        assert!(self.skolemization_count.get() == snapshot.skolemization_count,
+                "failed to pop skolemized regions: {} now vs {} at start",
+                self.skolemization_count.get(),
+                snapshot.skolemization_count);
 
         let mut undo_log = self.undo_log.borrow_mut();
         if snapshot.length == 0 {
@@ -297,7 +432,6 @@ pub fn commit(&self, snapshot: RegionSnapshot) {
         } else {
             (*undo_log)[snapshot.length] = CommitedSnapshot;
         }
-        self.skolemization_count.set(snapshot.skolemization_count);
         self.unification_table.borrow_mut().commit(snapshot.region_snapshot);
     }
 
@@ -307,33 +441,7 @@ pub fn rollback_to(&self, snapshot: RegionSnapshot) {
         assert!(undo_log.len() > snapshot.length);
         assert!((*undo_log)[snapshot.length] == OpenSnapshot);
         while undo_log.len() > snapshot.length + 1 {
-            match undo_log.pop().unwrap() {
-                OpenSnapshot => {
-                    bug!("Failure to observe stack discipline");
-                }
-                CommitedSnapshot => {}
-                AddVar(vid) => {
-                    let mut var_origins = self.var_origins.borrow_mut();
-                    var_origins.pop().unwrap();
-                    assert_eq!(var_origins.len(), vid.index as usize);
-                }
-                AddConstraint(ref constraint) => {
-                    self.constraints.borrow_mut().remove(constraint);
-                }
-                AddVerify(index) => {
-                    self.verifys.borrow_mut().pop();
-                    assert_eq!(self.verifys.borrow().len(), index);
-                }
-                AddGiven(sub, sup) => {
-                    self.givens.borrow_mut().remove(&(sub, sup));
-                }
-                AddCombination(Glb, ref regions) => {
-                    self.glbs.borrow_mut().remove(regions);
-                }
-                AddCombination(Lub, ref regions) => {
-                    self.lubs.borrow_mut().remove(regions);
-                }
-            }
+            self.rollback_undo_entry(undo_log.pop().unwrap());
         }
         let c = undo_log.pop().unwrap();
         assert!(c == OpenSnapshot);
@@ -342,6 +450,38 @@ pub fn rollback_to(&self, snapshot: RegionSnapshot) {
             .rollback_to(snapshot.region_snapshot);
     }
 
+    pub fn rollback_undo_entry(&self, undo_entry: UndoLogEntry) {
+        match undo_entry {
+            OpenSnapshot => {
+                panic!("Failure to observe stack discipline");
+            }
+            Purged | CommitedSnapshot => {
+                // nothing to do here
+            }
+            AddVar(vid) => {
+                let mut var_origins = self.var_origins.borrow_mut();
+                var_origins.pop().unwrap();
+                assert_eq!(var_origins.len(), vid.index as usize);
+            }
+            AddConstraint(ref constraint) => {
+                self.constraints.borrow_mut().remove(constraint);
+            }
+            AddVerify(index) => {
+                self.verifys.borrow_mut().pop();
+                assert_eq!(self.verifys.borrow().len(), index);
+            }
+            AddGiven(sub, sup) => {
+                self.givens.borrow_mut().remove(&(sub, sup));
+            }
+            AddCombination(Glb, ref regions) => {
+                self.glbs.borrow_mut().remove(regions);
+            }
+            AddCombination(Lub, ref regions) => {
+                self.lubs.borrow_mut().remove(regions);
+            }
+        }
+    }
+
     pub fn num_vars(&self) -> u32 {
         let len = self.var_origins.borrow().len();
         // enforce no overflow
@@ -366,22 +506,30 @@ pub fn new_region_var(&self, origin: RegionVariableOrigin) -> RegionVid {
         return vid;
     }
 
+    pub fn var_origin(&self, vid: RegionVid) -> RegionVariableOrigin {
+        self.var_origins.borrow()[vid.index as usize].clone()
+    }
+
     /// Creates a new skolemized region. Skolemized regions are fresh
     /// regions used when performing higher-ranked computations. They
     /// must be used in a very particular way and are never supposed
     /// to "escape" out into error messages or the code at large.
     ///
     /// The idea is to always create a snapshot. Skolemized regions
-    /// can be created in the context of this snapshot, but once the
-    /// snapshot is committed or rolled back, their numbers will be
-    /// recycled, so you must be finished with them. See the extensive
-    /// comments in `higher_ranked.rs` to see how it works (in
-    /// particular, the subtyping comparison).
+    /// can be created in the context of this snapshot, but before the
+    /// snapshot is committed or rolled back, they must be popped
+    /// (using `pop_skolemized_regions`), so that their numbers can be
+    /// recycled. Normally you don't have to think about this: you use
+    /// the APIs in `higher_ranked/mod.rs`, such as
+    /// `skolemize_late_bound_regions` and `plug_leaks`, which will
+    /// guide you on this path (ensure that the `SkolemizationMap` is
+    /// consumed and you are good).  There are also somewhat extensive
+    /// comments in `higher_ranked/README.md`.
     ///
     /// The `snapshot` argument to this function is not really used;
     /// it's just there to make it explicit which snapshot bounds the
-    /// skolemized region that results.
-    pub fn new_skolemized(&self, br: ty::BoundRegion, snapshot: &RegionSnapshot) -> Region {
+    /// skolemized region that results. It should always be the top-most snapshot.
+    pub fn push_skolemized(&self, br: ty::BoundRegion, snapshot: &RegionSnapshot) -> Region {
         assert!(self.in_snapshot());
         assert!(self.undo_log.borrow()[snapshot.length] == OpenSnapshot);
 
@@ -390,6 +538,94 @@ pub fn new_skolemized(&self, br: ty::BoundRegion, snapshot: &RegionSnapshot) ->
         ReSkolemized(ty::SkolemizedRegionVid { index: sc }, br)
     }
 
+    /// Removes all the edges to/from the skolemized regions that are
+    /// in `skols`. This is used after a higher-ranked operation
+    /// completes to remove all trace of the skolemized regions
+    /// created in that time.
+    pub fn pop_skolemized(&self,
+                          skols: &FnvHashSet<ty::Region>,
+                          snapshot: &RegionSnapshot) {
+        debug!("pop_skolemized_regions(skols={:?})", skols);
+
+        assert!(self.in_snapshot());
+        assert!(self.undo_log.borrow()[snapshot.length] == OpenSnapshot);
+        assert!(self.skolemization_count.get() as usize >= skols.len(),
+                "popping more skolemized variables than actually exist, \
+                 sc now = {}, skols.len = {}",
+                self.skolemization_count.get(),
+                skols.len());
+
+        let last_to_pop = self.skolemization_count.get();
+        let first_to_pop = last_to_pop - (skols.len() as u32);
+
+        assert!(first_to_pop >= snapshot.skolemization_count,
+                "popping more regions than snapshot contains, \
+                 sc now = {}, sc then = {}, skols.len = {}",
+                self.skolemization_count.get(),
+                snapshot.skolemization_count,
+                skols.len());
+        debug_assert! {
+            skols.iter()
+                 .all(|k| match *k {
+                     ty::ReSkolemized(index, _) =>
+                         index.index >= first_to_pop &&
+                         index.index < last_to_pop,
+                     _ =>
+                         false
+                 }),
+            "invalid skolemization keys or keys out of range ({}..{}): {:?}",
+            snapshot.skolemization_count,
+            self.skolemization_count.get(),
+            skols
+        }
+
+        let mut undo_log = self.undo_log.borrow_mut();
+
+        let constraints_to_kill: Vec<usize> =
+            undo_log.iter()
+                    .enumerate()
+                    .rev()
+                    .filter(|&(_, undo_entry)| kill_constraint(skols, undo_entry))
+                    .map(|(index, _)| index)
+                    .collect();
+
+        for index in constraints_to_kill {
+            let undo_entry = mem::replace(&mut undo_log[index], Purged);
+            self.rollback_undo_entry(undo_entry);
+        }
+
+        self.skolemization_count.set(snapshot.skolemization_count);
+        return;
+
+        fn kill_constraint(skols: &FnvHashSet<ty::Region>,
+                           undo_entry: &UndoLogEntry)
+                           -> bool {
+            match undo_entry {
+                &AddConstraint(ConstrainVarSubVar(_, _)) =>
+                    false,
+                &AddConstraint(ConstrainRegSubVar(a, _)) =>
+                    skols.contains(&a),
+                &AddConstraint(ConstrainVarSubReg(_, b)) =>
+                    skols.contains(&b),
+                &AddConstraint(ConstrainRegSubReg(a, b)) =>
+                    skols.contains(&a) || skols.contains(&b),
+                &AddGiven(_, _) =>
+                    false,
+                &AddVerify(_) =>
+                    false,
+                &AddCombination(_, ref two_regions) =>
+                    skols.contains(&two_regions.a) ||
+                    skols.contains(&two_regions.b),
+                &AddVar(..) |
+                &OpenSnapshot |
+                &Purged |
+                &CommitedSnapshot =>
+                    false,
+            }
+        }
+
+    }
+
     pub fn new_bound(&self, debruijn: ty::DebruijnIndex) -> Region {
         // Creates a fresh bound variable for use in GLB computations.
         // See discussion of GLB computation in the large comment at
@@ -443,11 +679,9 @@ fn add_verify(&self, verify: Verify<'tcx>) {
         debug!("RegionVarBindings: add_verify({:?})", verify);
 
         // skip no-op cases known to be satisfied
-        match verify {
-            VerifyGenericBound(_, _, _, VerifyBound::AllBounds(ref bs)) if bs.len() == 0 => {
-                return;
-            }
-            _ => {}
+        match verify.bound {
+            VerifyBound::AllBounds(ref bs) if bs.len() == 0 => { return; }
+            _ => { }
         }
 
         let mut verifys = self.verifys.borrow_mut();
@@ -515,7 +749,7 @@ pub fn make_subregion(&self, origin: SubregionOrigin<'tcx>, sub: Region, sup: Re
                 self.add_constraint(ConstrainVarSubReg(sub_id, r), origin);
             }
             _ => {
-                self.add_verify(VerifyRegSubReg(origin, sub, sup));
+                self.add_constraint(ConstrainRegSubReg(sub, sup), origin);
             }
         }
     }
@@ -526,7 +760,12 @@ pub fn verify_generic_bound(&self,
                                 kind: GenericKind<'tcx>,
                                 sub: Region,
                                 bound: VerifyBound) {
-        self.add_verify(VerifyGenericBound(kind, origin, sub, bound));
+        self.add_verify(Verify {
+            kind: kind,
+            origin: origin,
+            region: sub,
+            bound: bound
+        });
     }
 
     pub fn lub_regions(&self, origin: SubregionOrigin<'tcx>, a: Region, b: Region) -> Region {
@@ -632,83 +871,30 @@ pub fn vars_created_since_snapshot(&self, mark: &RegionSnapshot) -> Vec<RegionVi
             .collect()
     }
 
-    /// Computes all regions that have been related to `r0` in any way since the mark `mark` was
-    /// made---`r0` itself will be the first entry. This is used when checking whether skolemized
-    /// regions are being improperly related to other regions.
-    pub fn tainted(&self, mark: &RegionSnapshot, r0: Region) -> Vec<Region> {
-        debug!("tainted(mark={:?}, r0={:?})", mark, r0);
-        let _indenter = indenter();
+    /// Computes all regions that have been related to `r0` since the
+    /// mark `mark` was made---`r0` itself will be the first
+    /// entry. The `directions` parameter controls what kind of
+    /// relations are considered. For example, one can say that only
+    /// "incoming" edges to `r0` are desired, in which case one will
+    /// get the set of regions `{r|r <= r0}`. This is used when
+    /// checking whether skolemized regions are being improperly
+    /// related to other regions.
+    pub fn tainted(&self,
+                   mark: &RegionSnapshot,
+                   r0: Region,
+                   directions: TaintDirections)
+                   -> FnvHashSet<ty::Region> {
+        debug!("tainted(mark={:?}, r0={:?}, directions={:?})",
+               mark, r0, directions);
 
         // `result_set` acts as a worklist: we explore all outgoing
         // edges and add any new regions we find to result_set.  This
         // is not a terribly efficient implementation.
-        let mut result_set = vec![r0];
-        let mut result_index = 0;
-        while result_index < result_set.len() {
-            // nb: can't use usize::range() here because result_set grows
-            let r = result_set[result_index];
-            debug!("result_index={}, r={:?}", result_index, r);
-
-            for undo_entry in self.undo_log.borrow()[mark.length..].iter() {
-                match undo_entry {
-                    &AddConstraint(ConstrainVarSubVar(a, b)) => {
-                        consider_adding_bidirectional_edges(&mut result_set, r, ReVar(a), ReVar(b));
-                    }
-                    &AddConstraint(ConstrainRegSubVar(a, b)) => {
-                        consider_adding_bidirectional_edges(&mut result_set, r, a, ReVar(b));
-                    }
-                    &AddConstraint(ConstrainVarSubReg(a, b)) => {
-                        consider_adding_bidirectional_edges(&mut result_set, r, ReVar(a), b);
-                    }
-                    &AddGiven(a, b) => {
-                        consider_adding_bidirectional_edges(&mut result_set,
-                                                            r,
-                                                            ReFree(a),
-                                                            ReVar(b));
-                    }
-                    &AddVerify(i) => {
-                        match (*self.verifys.borrow())[i] {
-                            VerifyRegSubReg(_, a, b) => {
-                                consider_adding_bidirectional_edges(&mut result_set, r, a, b);
-                            }
-                            VerifyGenericBound(_, _, a, ref bound) => {
-                                bound.for_each_region(&mut |b| {
-                                    consider_adding_bidirectional_edges(&mut result_set, r, a, b)
-                                });
-                            }
-                        }
-                    }
-                    &AddCombination(..) |
-                    &AddVar(..) |
-                    &OpenSnapshot |
-                    &CommitedSnapshot => {}
-                }
-            }
-
-            result_index += 1;
-        }
-
-        return result_set;
-
-        fn consider_adding_bidirectional_edges(result_set: &mut Vec<Region>,
-                                               r: Region,
-                                               r1: Region,
-                                               r2: Region) {
-            consider_adding_directed_edge(result_set, r, r1, r2);
-            consider_adding_directed_edge(result_set, r, r2, r1);
-        }
-
-        fn consider_adding_directed_edge(result_set: &mut Vec<Region>,
-                                         r: Region,
-                                         r1: Region,
-                                         r2: Region) {
-            if r == r1 {
-                // Clearly, this is potentially inefficient.
-                if !result_set.iter().any(|x| *x == r2) {
-                    result_set.push(r2);
-                }
-            }
-        }
+        let mut taint_set = TaintSet::new(directions, r0);
+        taint_set.fixed_point(&self.undo_log.borrow()[mark.length..],
+                              &self.verifys.borrow());
+        debug!("tainted: result={:?}", taint_set.regions);
+        return taint_set.into_set();
     }
 
     /// This function performs the actual region resolution.  It must be
@@ -805,10 +991,6 @@ pub enum VarValue {
     ErrorValue,
 }
 
-struct VarData {
-    value: VarValue,
-}
-
 struct RegionAndOrigin<'tcx> {
     region: Region,
     origin: SubregionOrigin<'tcx>,
@@ -834,18 +1016,14 @@ fn infer_variable_values(&self,
         let graph = self.construct_graph();
         self.expand_givens(&graph);
         self.expansion(free_regions, &mut var_data);
-        self.contraction(free_regions, &mut var_data);
-        let values = self.extract_values_and_collect_conflicts(free_regions,
-                                                               &var_data,
-                                                               &graph,
-                                                               errors);
-        self.collect_concrete_region_errors(free_regions, &values, errors);
-        values
+        self.collect_errors(free_regions, &mut var_data, errors);
+        self.collect_var_errors(free_regions, &var_data, &graph, errors);
+        var_data
     }
 
-    fn construct_var_data(&self) -> Vec<VarData> {
+    fn construct_var_data(&self) -> Vec<VarValue> {
         (0..self.num_vars() as usize)
-            .map(|_| VarData { value: Value(ty::ReEmpty) })
+            .map(|_| Value(ty::ReEmpty))
             .collect()
     }
 
@@ -882,30 +1060,28 @@ fn expand_givens(&self, graph: &RegionGraph) {
         }
     }
 
-    fn expansion(&self, free_regions: &FreeRegionMap, var_data: &mut [VarData]) {
-        self.iterate_until_fixed_point("Expansion", |constraint| {
+    fn expansion(&self, free_regions: &FreeRegionMap, var_values: &mut [VarValue]) {
+        self.iterate_until_fixed_point("Expansion", |constraint, origin| {
             debug!("expansion: constraint={:?} origin={:?}",
-                   constraint,
-                   self.constraints
-                       .borrow()
-                       .get(constraint)
-                       .unwrap());
+                   constraint, origin);
             match *constraint {
                 ConstrainRegSubVar(a_region, b_vid) => {
-                    let b_data = &mut var_data[b_vid.index as usize];
+                    let b_data = &mut var_values[b_vid.index as usize];
                     self.expand_node(free_regions, a_region, b_vid, b_data)
                 }
                 ConstrainVarSubVar(a_vid, b_vid) => {
-                    match var_data[a_vid.index as usize].value {
+                    match var_values[a_vid.index as usize] {
                         ErrorValue => false,
                         Value(a_region) => {
-                            let b_node = &mut var_data[b_vid.index as usize];
+                            let b_node = &mut var_values[b_vid.index as usize];
                             self.expand_node(free_regions, a_region, b_vid, b_node)
                         }
                     }
                 }
+                ConstrainRegSubReg(..) |
                 ConstrainVarSubReg(..) => {
-                    // This is a contraction constraint.  Ignore it.
+                    // These constraints are checked after expansion
+                    // is done, in `collect_errors`.
                     false
                 }
             }
@@ -916,12 +1092,12 @@ fn expand_node(&self,
                    free_regions: &FreeRegionMap,
                    a_region: Region,
                    b_vid: RegionVid,
-                   b_data: &mut VarData)
+                   b_data: &mut VarValue)
                    -> bool {
         debug!("expand_node({:?}, {:?} == {:?})",
                a_region,
                b_vid,
-               b_data.value);
+               b_data);
 
         // Check if this relationship is implied by a given.
         match a_region {
@@ -934,7 +1110,7 @@ fn expand_node(&self,
             _ => {}
         }
 
-        match b_data.value {
+        match *b_data {
             Value(cur_region) => {
                 let lub = self.lub_concrete_regions(free_regions, a_region, cur_region);
                 if lub == cur_region {
@@ -946,7 +1122,7 @@ fn expand_node(&self,
                        cur_region,
                        lub);
 
-                b_data.value = Value(lub);
+                *b_data = Value(lub);
                 return true;
             }
 
@@ -956,94 +1132,92 @@ fn expand_node(&self,
         }
     }
 
-    // FIXME(#29436) -- this fn would just go away if we removed ConstrainVarSubReg
-    fn contraction(&self, free_regions: &FreeRegionMap, var_data: &mut [VarData]) {
-        self.iterate_until_fixed_point("Contraction", |constraint| {
-            debug!("contraction: constraint={:?} origin={:?}",
-                   constraint,
-                   self.constraints
-                       .borrow()
-                       .get(constraint)
-                       .unwrap());
+    /// After expansion is complete, go and check upper bounds (i.e.,
+    /// cases where the region cannot grow larger than a fixed point)
+    /// and check that they are satisfied.
+    fn collect_errors(&self,
+                      free_regions: &FreeRegionMap,
+                      var_data: &mut Vec<VarValue>,
+                      errors: &mut Vec<RegionResolutionError<'tcx>>) {
+        let constraints = self.constraints.borrow();
+        for (constraint, origin) in constraints.iter() {
+            debug!("collect_errors: constraint={:?} origin={:?}",
+                   constraint, origin);
             match *constraint {
                 ConstrainRegSubVar(..) |
                 ConstrainVarSubVar(..) => {
                     // Expansion will ensure that these constraints hold. Ignore.
                 }
+
+                ConstrainRegSubReg(sub, sup) => {
+                    if free_regions.is_subregion_of(self.tcx, sub, sup) {
+                        continue;
+                    }
+
+                    debug!("collect_errors: region error at {:?}: \
+                            cannot verify that {:?} <= {:?}",
+                           origin,
+                           sub,
+                           sup);
+
+                    errors.push(ConcreteFailure((*origin).clone(), sub, sup));
+                }
+
                 ConstrainVarSubReg(a_vid, b_region) => {
                     let a_data = &mut var_data[a_vid.index as usize];
                     debug!("contraction: {:?} == {:?}, {:?}",
                            a_vid,
-                           a_data.value,
+                           a_data,
                            b_region);
 
-                    let a_region = match a_data.value {
-                        ErrorValue => return false,
+                    let a_region = match *a_data {
+                        ErrorValue => continue,
                         Value(a_region) => a_region,
                     };
 
+                    // Do not report these errors immediately:
+                    // instead, set the variable value to error and
+                    // collect them later.
                     if !free_regions.is_subregion_of(self.tcx, a_region, b_region) {
-                        debug!("Setting {:?} to ErrorValue: {:?} not subregion of {:?}",
+                        debug!("collect_errors: region error at {:?}: \
+                                cannot verify that {:?}={:?} <= {:?}",
+                               origin,
                                a_vid,
                                a_region,
                                b_region);
-                        a_data.value = ErrorValue;
+                        *a_data = ErrorValue;
                     }
                 }
             }
+        }
 
-            false
-        })
-    }
-
-    fn collect_concrete_region_errors(&self,
-                                      free_regions: &FreeRegionMap,
-                                      values: &Vec<VarValue>,
-                                      errors: &mut Vec<RegionResolutionError<'tcx>>) {
-        let mut reg_reg_dups = FnvHashSet();
         for verify in self.verifys.borrow().iter() {
-            match *verify {
-                VerifyRegSubReg(ref origin, sub, sup) => {
-                    if free_regions.is_subregion_of(self.tcx, sub, sup) {
-                        continue;
-                    }
-
-                    if !reg_reg_dups.insert((sub, sup)) {
-                        continue;
-                    }
-
-                    debug!("region inference error at {:?}: {:?} <= {:?} is not true",
-                           origin,
-                           sub,
-                           sup);
-
-                    errors.push(ConcreteFailure((*origin).clone(), sub, sup));
-                }
+            debug!("collect_errors: verify={:?}", verify);
+            let sub = normalize(var_data, verify.region);
+            if verify.bound.is_met(self.tcx, free_regions, var_data, sub) {
+                continue;
+            }
 
-                VerifyGenericBound(ref kind, ref origin, sub, ref bound) => {
-                    let sub = normalize(values, sub);
-                    if bound.is_met(self.tcx, free_regions, values, sub) {
-                        continue;
-                    }
+            debug!("collect_errors: region error at {:?}: \
+                    cannot verify that {:?} <= {:?}",
+                   verify.origin,
+                   verify.region,
+                   verify.bound);
 
-                    debug!("region inference error at {:?}: verifying {:?} <= {:?}",
-                           origin,
-                           sub,
-                           bound);
-
-                    errors.push(GenericBoundFailure((*origin).clone(), kind.clone(), sub));
-                }
-            }
+            errors.push(GenericBoundFailure(verify.origin.clone(),
+                                            verify.kind.clone(),
+                                            sub));
         }
     }
 
-    fn extract_values_and_collect_conflicts(&self,
-                                            free_regions: &FreeRegionMap,
-                                            var_data: &[VarData],
-                                            graph: &RegionGraph,
-                                            errors: &mut Vec<RegionResolutionError<'tcx>>)
-                                            -> Vec<VarValue> {
-        debug!("extract_values_and_collect_conflicts()");
+    /// Go over the variables that were declared to be error variables
+    /// and create a `RegionResolutionError` for each of them.
+    fn collect_var_errors(&self,
+                          free_regions: &FreeRegionMap,
+                          var_data: &[VarValue],
+                          graph: &RegionGraph,
+                          errors: &mut Vec<RegionResolutionError<'tcx>>) {
+        debug!("collect_var_errors");
 
         // This is the best way that I have found to suppress
         // duplicate and related errors. Basically we keep a set of
@@ -1059,7 +1233,7 @@ fn extract_values_and_collect_conflicts(&self,
         let mut dup_vec = vec![u32::MAX; self.num_vars() as usize];
 
         for idx in 0..self.num_vars() as usize {
-            match var_data[idx].value {
+            match var_data[idx] {
                 Value(_) => {
                     /* Inference successful */
                 }
@@ -1096,8 +1270,6 @@ fn extract_values_and_collect_conflicts(&self,
                 }
             }
         }
-
-        (0..self.num_vars() as usize).map(|idx| var_data[idx].value).collect()
     }
 
     fn construct_graph(&self) -> RegionGraph {
@@ -1132,6 +1304,10 @@ fn construct_graph(&self) -> RegionGraph {
                 ConstrainVarSubReg(a_id, _) => {
                     graph.add_edge(NodeIndex(a_id.index as usize), dummy_sink, *constraint);
                 }
+                ConstrainRegSubReg(..) => {
+                    // this would be an edge from `dummy_source` to
+                    // `dummy_sink`; just ignore it.
+                }
             }
         }
 
@@ -1274,13 +1450,18 @@ fn process_edges<'a, 'gcx, 'tcx>(this: &RegionVarBindings<'a, 'gcx, 'tcx>,
                             origin: this.constraints.borrow().get(&edge.data).unwrap().clone(),
                         });
                     }
+
+                    ConstrainRegSubReg(..) => {
+                        panic!("cannot reach reg-sub-reg edge in region inference \
+                                post-processing")
+                    }
                 }
             }
         }
     }
 
     fn iterate_until_fixed_point<F>(&self, tag: &str, mut body: F)
-        where F: FnMut(&Constraint) -> bool
+        where F: FnMut(&Constraint, &SubregionOrigin<'tcx>) -> bool
     {
         let mut iteration = 0;
         let mut changed = true;
@@ -1288,8 +1469,8 @@ fn iterate_until_fixed_point<F>(&self, tag: &str, mut body: F)
             changed = false;
             iteration += 1;
             debug!("---- {} Iteration {}{}", "#", tag, iteration);
-            for (constraint, _) in self.constraints.borrow().iter() {
-                let edge_changed = body(constraint);
+            for (constraint, origin) in self.constraints.borrow().iter() {
+                let edge_changed = body(constraint, origin);
                 if edge_changed {
                     debug!("Updated due to constraint {:?}", constraint);
                     changed = true;
@@ -1301,19 +1482,6 @@ fn iterate_until_fixed_point<F>(&self, tag: &str, mut body: F)
 
 }
 
-impl<'tcx> fmt::Debug for Verify<'tcx> {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        match *self {
-            VerifyRegSubReg(_, ref a, ref b) => {
-                write!(f, "VerifyRegSubReg({:?}, {:?})", a, b)
-            }
-            VerifyGenericBound(_, ref p, ref a, ref bs) => {
-                write!(f, "VerifyGenericBound({:?}, {:?}, {:?})", p, a, bs)
-            }
-        }
-    }
-}
-
 fn normalize(values: &Vec<VarValue>, r: ty::Region) -> ty::Region {
     match r {
         ty::ReVar(rid) => lookup(values, rid),
index c2a8d04ac00da6a6ed40c98a515f7029565f2cf7..badbeccee83cf6efdfecda136ca7c554b73ca943 100644 (file)
@@ -178,7 +178,9 @@ pub fn new_var(&mut self,
             value: Bounded { relations: vec![], default: default },
             diverging: diverging
         });
-        ty::TyVid { index: index as u32 }
+        let v = ty::TyVid { index: index as u32 };
+        debug!("new_var() -> {:?}", v);
+        v
     }
 
     pub fn root_var(&mut self, vid: ty::TyVid) -> ty::TyVid {
@@ -219,6 +221,17 @@ pub fn snapshot(&mut self) -> Snapshot {
     }
 
     pub fn rollback_to(&mut self, s: Snapshot) {
+        debug!("rollback_to{:?}", {
+            for action in self.values.actions_since_snapshot(&s.snapshot) {
+                match *action {
+                    sv::UndoLog::NewElem(index) => {
+                        debug!("inference variable _#{}t popped", index)
+                    }
+                    _ => { }
+                }
+            }
+        });
+
         self.values.rollback_to(s.snapshot);
         self.eq_relations.rollback_to(s.eq_snapshot);
     }
index 4cc9b0b4353a0b2da14878c789d888979c7df2c7..dcc84fb04399fa5ed812a5e9ca1735b468ed2185 100644 (file)
@@ -22,6 +22,7 @@
 use hir::map::Map;
 use session::Session;
 use hir::def::{Def, DefMap};
+use hir::def_id::DefId;
 use middle::region;
 use ty::subst;
 use ty;
@@ -32,6 +33,7 @@
 use syntax::parse::token::keywords;
 use util::nodemap::NodeMap;
 
+use rustc_data_structures::fnv::FnvHashSet;
 use hir;
 use hir::print::lifetime_to_string;
 use hir::intravisit::{self, Visitor, FnKind};
@@ -50,11 +52,21 @@ pub enum DefRegion {
 
 // Maps the id of each lifetime reference to the lifetime decl
 // that it corresponds to.
-pub type NamedRegionMap = NodeMap<DefRegion>;
+pub struct NamedRegionMap {
+    // maps from every use of a named (not anonymous) lifetime to a
+    // `DefRegion` describing how that region is bound
+    pub defs: NodeMap<DefRegion>,
+
+    // the set of lifetime def ids that are late-bound; late-bound ids
+    // are named regions appearing in fn arguments that do not appear
+    // in where-clauses
+    pub late_bound: NodeMap<ty::Issue32330>,
+}
 
-struct LifetimeContext<'a> {
+struct LifetimeContext<'a, 'tcx: 'a> {
     sess: &'a Session,
-    named_region_map: &'a mut NamedRegionMap,
+    hir_map: &'a Map<'tcx>,
+    map: &'a mut NamedRegionMap,
     scope: Scope<'a>,
     def_map: &'a DefMap,
     // Deep breath. Our representation for poly trait refs contains a single
@@ -101,21 +113,25 @@ pub fn krate(sess: &Session,
              -> Result<NamedRegionMap, usize> {
     let _task = hir_map.dep_graph.in_task(DepNode::ResolveLifetimes);
     let krate = hir_map.krate();
-    let mut named_region_map = NodeMap();
+    let mut map = NamedRegionMap {
+        defs: NodeMap(),
+        late_bound: NodeMap(),
+    };
     sess.track_errors(|| {
         krate.visit_all_items(&mut LifetimeContext {
             sess: sess,
-            named_region_map: &mut named_region_map,
+            hir_map: hir_map,
+            map: &mut map,
             scope: &ROOT_SCOPE,
             def_map: def_map,
             trait_ref_hack: false,
             labels_in_fn: vec![],
         });
     })?;
-    Ok(named_region_map)
+    Ok(map)
 }
 
-impl<'a, 'v> Visitor<'v> for LifetimeContext<'a> {
+impl<'a, 'tcx, 'v> Visitor<'v> for LifetimeContext<'a, 'tcx> {
     fn visit_item(&mut self, item: &hir::Item) {
         assert!(self.labels_in_fn.is_empty());
 
@@ -164,8 +180,12 @@ fn visit_foreign_item(&mut self, item: &hir::ForeignItem) {
         // Items always introduce a new root scope
         self.with(RootScope, |_, this| {
             match item.node {
-                hir::ForeignItemFn(_, ref generics) => {
-                    this.visit_early_late(subst::FnSpace, generics, |this| {
+                hir::ForeignItemFn(ref decl, ref generics) => {
+                    this.visit_early_late(item.id,
+                                          subst::FnSpace,
+                                          decl,
+                                          generics,
+                                          |this| {
                         intravisit::walk_foreign_item(this, item);
                     })
                 }
@@ -179,24 +199,27 @@ fn visit_foreign_item(&mut self, item: &hir::ForeignItem) {
         replace(&mut self.labels_in_fn, saved);
     }
 
-    fn visit_fn(&mut self, fk: FnKind<'v>, fd: &'v hir::FnDecl,
+    fn visit_fn(&mut self, fk: FnKind<'v>, decl: &'v hir::FnDecl,
                 b: &'v hir::Block, s: Span, fn_id: ast::NodeId) {
         match fk {
             FnKind::ItemFn(_, generics, _, _, _, _, _) => {
-                self.visit_early_late(subst::FnSpace, generics, |this| {
-                    this.add_scope_and_walk_fn(fk, fd, b, s, fn_id)
+                self.visit_early_late(fn_id, subst::FnSpace, decl, generics, |this| {
+                    this.add_scope_and_walk_fn(fk, decl, b, s, fn_id)
                 })
             }
             FnKind::Method(_, sig, _, _) => {
-                self.visit_early_late(subst::FnSpace, &sig.generics, |this| {
-                    this.add_scope_and_walk_fn(fk, fd, b, s, fn_id)
-                })
+                self.visit_early_late(
+                    fn_id,
+                    subst::FnSpace,
+                    decl,
+                    &sig.generics,
+                    |this| this.add_scope_and_walk_fn(fk, decl, b, s, fn_id));
             }
             FnKind::Closure(_) => {
                 // Closures have their own set of labels, save labels just
                 // like for foreign items above.
                 let saved = replace(&mut self.labels_in_fn, vec![]);
-                let result = self.add_scope_and_walk_fn(fk, fd, b, s, fn_id);
+                let result = self.add_scope_and_walk_fn(fk, decl, b, s, fn_id);
                 replace(&mut self.labels_in_fn, saved);
                 result
             }
@@ -240,7 +263,8 @@ fn visit_trait_item(&mut self, trait_item: &hir::TraitItem) {
 
         if let hir::MethodTraitItem(ref sig, None) = trait_item.node {
             self.visit_early_late(
-                subst::FnSpace, &sig.generics,
+                trait_item.id, subst::FnSpace,
+                &sig.decl, &sig.generics,
                 |this| intravisit::walk_trait_item(this, trait_item))
         } else {
             intravisit::walk_trait_item(self, trait_item);
@@ -380,8 +404,7 @@ fn signal_shadowing_problem(sess: &Session, name: ast::Name, orig: Original, sha
 
 // Adds all labels in `b` to `ctxt.labels_in_fn`, signalling a warning
 // if one of the label shadows a lifetime or another label.
-fn extract_labels<'v, 'a>(ctxt: &mut LifetimeContext<'a>, b: &'v hir::Block) {
-
+fn extract_labels(ctxt: &mut LifetimeContext, b: &hir::Block) {
     struct GatherLabels<'a> {
         sess: &'a Session,
         scope: Scope<'a>,
@@ -468,7 +491,7 @@ fn check_if_label_shadows_lifetime<'a>(sess: &'a Session,
     }
 }
 
-impl<'a> LifetimeContext<'a> {
+impl<'a, 'tcx> LifetimeContext<'a, 'tcx> {
     fn add_scope_and_walk_fn<'b>(&mut self,
                                  fk: FnKind,
                                  fd: &hir::FnDecl,
@@ -501,10 +524,11 @@ fn add_scope_and_walk_fn<'b>(&mut self,
     fn with<F>(&mut self, wrap_scope: ScopeChain, f: F) where
         F: FnOnce(Scope, &mut LifetimeContext),
     {
-        let LifetimeContext {sess, ref mut named_region_map, ..} = *self;
+        let LifetimeContext {sess, hir_map, ref mut map, ..} = *self;
         let mut this = LifetimeContext {
             sess: sess,
-            named_region_map: *named_region_map,
+            hir_map: hir_map,
+            map: *map,
             scope: &wrap_scope,
             def_map: self.def_map,
             trait_ref_hack: self.trait_ref_hack,
@@ -534,20 +558,27 @@ fn with<F>(&mut self, wrap_scope: ScopeChain, f: F) where
     /// bound lifetimes are resolved by name and associated with a binder id (`binder_id`), so the
     /// ordering is not important there.
     fn visit_early_late<F>(&mut self,
+                           fn_id: ast::NodeId,
                            early_space: subst::ParamSpace,
+                           decl: &hir::FnDecl,
                            generics: &hir::Generics,
                            walk: F) where
         F: FnOnce(&mut LifetimeContext),
     {
-        let referenced_idents = early_bound_lifetime_names(generics);
-
-        debug!("visit_early_late: referenced_idents={:?}",
-               referenced_idents);
-
-        let (early, late): (Vec<_>, _) = generics.lifetimes.iter().cloned().partition(
-            |l| referenced_idents.iter().any(|&i| i == l.lifetime.name));
-
-        self.with(EarlyScope(early_space, &early, self.scope), move |old_scope, this| {
+        let fn_def_id = self.hir_map.local_def_id(fn_id);
+        insert_late_bound_lifetimes(self.map,
+                                    fn_def_id,
+                                    decl,
+                                    generics);
+
+        let (late, early): (Vec<_>, _) =
+            generics.lifetimes
+                    .iter()
+                    .cloned()
+                    .partition(|l| self.map.late_bound.contains_key(&l.lifetime.id));
+
+        let this = self;
+        this.with(EarlyScope(early_space, &early, this.scope), move |old_scope, this| {
             this.with(LateScope(&late, this.scope), move |_, this| {
                 this.check_lifetime_defs(old_scope, &generics.lifetimes);
                 walk(this);
@@ -756,11 +787,12 @@ fn insert_lifetime(&mut self,
                        probably a bug in syntax::fold");
         }
 
-        debug!("lifetime_ref={:?} id={:?} resolved to {:?}",
-                lifetime_to_string(lifetime_ref),
-                lifetime_ref.id,
-                def);
-        self.named_region_map.insert(lifetime_ref.id, def);
+        debug!("lifetime_ref={:?} id={:?} resolved to {:?} span={:?}",
+               lifetime_to_string(lifetime_ref),
+               lifetime_ref.id,
+               def,
+               self.sess.codemap().span_to_string(lifetime_ref.span));
+        self.map.defs.insert(lifetime_ref.id, def);
     }
 }
 
@@ -777,95 +809,132 @@ fn search_lifetimes<'a>(lifetimes: &'a [hir::LifetimeDef],
 
 ///////////////////////////////////////////////////////////////////////////
 
-pub fn early_bound_lifetimes<'a>(generics: &'a hir::Generics) -> Vec<hir::LifetimeDef> {
-    let referenced_idents = early_bound_lifetime_names(generics);
-    if referenced_idents.is_empty() {
-        return Vec::new();
+/// Detects late-bound lifetimes and inserts them into
+/// `map.late_bound`.
+///
+/// A region declared on a fn is **late-bound** if:
+/// - it is constrained by an argument type;
+/// - it does not appear in a where-clause.
+///
+/// "Constrained" basically means that it appears in any type but
+/// not amongst the inputs to a projection.  In other words, `<&'a
+/// T as Trait<''b>>::Foo` does not constrain `'a` or `'b`.
+fn insert_late_bound_lifetimes(map: &mut NamedRegionMap,
+                               fn_def_id: DefId,
+                               decl: &hir::FnDecl,
+                               generics: &hir::Generics) {
+    debug!("insert_late_bound_lifetimes(decl={:?}, generics={:?})", decl, generics);
+
+    let mut constrained_by_input = ConstrainedCollector { regions: FnvHashSet() };
+    for arg in &decl.inputs {
+        constrained_by_input.visit_ty(&arg.ty);
     }
 
-    generics.lifetimes.iter()
-        .filter(|l| referenced_idents.iter().any(|&i| i == l.lifetime.name))
-        .cloned()
-        .collect()
-}
-
-/// Given a set of generic declarations, returns a list of names containing all early bound
-/// lifetime names for those generics. (In fact, this list may also contain other names.)
-fn early_bound_lifetime_names(generics: &hir::Generics) -> Vec<ast::Name> {
-    // Create two lists, dividing the lifetimes into early/late bound.
-    // Initially, all of them are considered late, but we will move
-    // things from late into early as we go if we find references to
-    // them.
-    let mut early_bound = Vec::new();
-    let mut late_bound = generics.lifetimes.iter()
-                                           .map(|l| l.lifetime.name)
-                                           .collect();
-
-    // Any lifetime that appears in a type bound is early.
-    {
-        let mut collector =
-            FreeLifetimeCollector { early_bound: &mut early_bound,
-                                    late_bound: &mut late_bound };
-        for ty_param in generics.ty_params.iter() {
-            walk_list!(&mut collector, visit_ty_param_bound, &ty_param.bounds);
+    let mut appears_in_output = AllCollector { regions: FnvHashSet() };
+    intravisit::walk_fn_ret_ty(&mut appears_in_output, &decl.output);
+
+    debug!("insert_late_bound_lifetimes: constrained_by_input={:?}",
+           constrained_by_input.regions);
+
+    // Walk the lifetimes that appear in where clauses.
+    //
+    // Subtle point: because we disallow nested bindings, we can just
+    // ignore binders here and scrape up all names we see.
+    let mut appears_in_where_clause = AllCollector { regions: FnvHashSet() };
+    for ty_param in generics.ty_params.iter() {
+        walk_list!(&mut appears_in_where_clause,
+                   visit_ty_param_bound,
+                   &ty_param.bounds);
+    }
+    walk_list!(&mut appears_in_where_clause,
+               visit_where_predicate,
+               &generics.where_clause.predicates);
+    for lifetime_def in &generics.lifetimes {
+        if !lifetime_def.bounds.is_empty() {
+            // `'a: 'b` means both `'a` and `'b` are referenced
+            appears_in_where_clause.visit_lifetime_def(lifetime_def);
         }
-        for predicate in &generics.where_clause.predicates {
-            match predicate {
-                &hir::WherePredicate::BoundPredicate(hir::WhereBoundPredicate{ref bounds,
-                                                                              ref bounded_ty,
-                                                                              ..}) => {
-                    collector.visit_ty(&bounded_ty);
-                    walk_list!(&mut collector, visit_ty_param_bound, bounds);
+    }
+
+    debug!("insert_late_bound_lifetimes: appears_in_where_clause={:?}",
+           appears_in_where_clause.regions);
+
+    // Late bound regions are those that:
+    // - appear in the inputs
+    // - do not appear in the where-clauses
+    for lifetime in &generics.lifetimes {
+        let name = lifetime.lifetime.name;
+
+        // appears in the where clauses? early-bound.
+        if appears_in_where_clause.regions.contains(&name) { continue; }
+
+        // does not appear in the inputs, but appears in the return
+        // type? eventually this will be early-bound, but for now we
+        // just mark it so we can issue warnings.
+        let constrained_by_input = constrained_by_input.regions.contains(&name);
+        let appears_in_output = appears_in_output.regions.contains(&name);
+        let will_change = !constrained_by_input && appears_in_output;
+        let issue_32330 = if will_change {
+            ty::Issue32330::WillChange {
+                fn_def_id: fn_def_id,
+                region_name: name,
+            }
+        } else {
+            ty::Issue32330::WontChange
+        };
+
+        debug!("insert_late_bound_lifetimes: \
+                lifetime {:?} with id {:?} is late-bound ({:?}",
+               lifetime.lifetime.name, lifetime.lifetime.id, issue_32330);
+
+        let prev = map.late_bound.insert(lifetime.lifetime.id, issue_32330);
+        assert!(prev.is_none(), "visited lifetime {:?} twice", lifetime.lifetime.id);
+    }
+
+    return;
+
+    struct ConstrainedCollector {
+        regions: FnvHashSet<ast::Name>,
+    }
+
+    impl<'v> Visitor<'v> for ConstrainedCollector {
+        fn visit_ty(&mut self, ty: &'v hir::Ty) {
+            match ty.node {
+                hir::TyPath(Some(_), _) => {
+                    // ignore lifetimes appearing in associated type
+                    // projections, as they are not *constrained*
+                    // (defined above)
                 }
-                &hir::WherePredicate::RegionPredicate(hir::WhereRegionPredicate{ref lifetime,
-                                                                                ref bounds,
-                                                                                ..}) => {
-                    collector.visit_lifetime(lifetime);
 
-                    for bound in bounds {
-                        collector.visit_lifetime(bound);
+                hir::TyPath(None, ref path) => {
+                    // consider only the lifetimes on the final
+                    // segment; I am not sure it's even currently
+                    // valid to have them elsewhere, but even if it
+                    // is, those would be potentially inputs to
+                    // projections
+                    if let Some(last_segment) = path.segments.last() {
+                        self.visit_path_segment(path.span, last_segment);
                     }
                 }
-                &hir::WherePredicate::EqPredicate(_) => bug!("unimplemented")
-            }
-        }
-    }
 
-    // Any lifetime that either has a bound or is referenced by a
-    // bound is early.
-    for lifetime_def in &generics.lifetimes {
-        if !lifetime_def.bounds.is_empty() {
-            shuffle(&mut early_bound, &mut late_bound,
-                    lifetime_def.lifetime.name);
-            for bound in &lifetime_def.bounds {
-                shuffle(&mut early_bound, &mut late_bound,
-                        bound.name);
+                _ => {
+                    intravisit::walk_ty(self, ty);
+                }
             }
         }
-    }
-    return early_bound;
 
-    struct FreeLifetimeCollector<'a> {
-        early_bound: &'a mut Vec<ast::Name>,
-        late_bound: &'a mut Vec<ast::Name>,
+        fn visit_lifetime(&mut self, lifetime_ref: &'v hir::Lifetime) {
+            self.regions.insert(lifetime_ref.name);
+        }
     }
 
-    impl<'a, 'v> Visitor<'v> for FreeLifetimeCollector<'a> {
-        fn visit_lifetime(&mut self, lifetime_ref: &hir::Lifetime) {
-            shuffle(self.early_bound, self.late_bound,
-                    lifetime_ref.name);
-        }
+    struct AllCollector {
+        regions: FnvHashSet<ast::Name>,
     }
 
-    fn shuffle(early_bound: &mut Vec<ast::Name>,
-               late_bound: &mut Vec<ast::Name>,
-               name: ast::Name) {
-        match late_bound.iter().position(|n| *n == name) {
-            Some(index) => {
-                late_bound.swap_remove(index);
-                early_bound.push(name);
-            }
-            None => { }
+    impl<'v> Visitor<'v> for AllCollector {
+        fn visit_lifetime(&mut self, lifetime_ref: &'v hir::Lifetime) {
+            self.regions.insert(lifetime_ref.name);
         }
     }
 }
index 05ed02211097c309091e83d9bea9a8244acae7ef..d58128b1c4a887f7da53fb6ee331c7f429fcd6d0 100644 (file)
@@ -796,6 +796,7 @@ pub fn build_target_config(opts: &Options, sp: &Handler) -> Config {
     };
 
     let (int_type, uint_type) = match &target.target_pointer_width[..] {
+        "16" => (ast::IntTy::I16, ast::UintTy::U16),
         "32" => (ast::IntTy::I32, ast::UintTy::U32),
         "64" => (ast::IntTy::I64, ast::UintTy::U64),
         w    => panic!(sp.fatal(&format!("target specification was invalid: \
index 0d7d7afd120d28d9e7080d7cd8830fb5a78e06ad..23ffa4db96f77549b9382d7910c968d63ab64b14 100644 (file)
@@ -171,10 +171,12 @@ pub fn register_predicate_obligation(&mut self,
         // debug output much nicer to read and so on.
         let obligation = infcx.resolve_type_vars_if_possible(&obligation);
 
+        debug!("register_predicate_obligation(obligation={:?})", obligation);
+
         infcx.obligations_in_snapshot.set(true);
 
-        if infcx.tcx.fulfilled_predicates.borrow().check_duplicate(&obligation.predicate)
-        {
+        if infcx.tcx.fulfilled_predicates.borrow().check_duplicate(&obligation.predicate) {
+            debug!("register_predicate_obligation: duplicate");
             return
         }
 
@@ -406,6 +408,8 @@ fn process_predicate<'a, 'gcx, 'tcx>(
                     // also includes references to its upvars as part
                     // of its type, and those types are resolved at
                     // the same time.
+                    //
+                    // FIXME(#32286) logic seems false if no upvars
                     pending_obligation.stalled_on =
                         trait_ref_type_vars(selcx, data.to_poly_trait_ref());
 
index c177ec4dbede7bcfc81ec845d5cff766d41b01b6..5b363d90578b1e7c2d996c904bc5bd5b468f1af7 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-//! Trait Resolution. See the Book for more.
+//! Trait Resolution. See README.md for an overview of how this works.
 
 pub use self::SelectionError::*;
 pub use self::FulfillmentErrorCode::*;
@@ -30,8 +30,9 @@
 pub use self::coherence::overlapping_impls;
 pub use self::coherence::OrphanCheckErr;
 pub use self::fulfill::{FulfillmentContext, GlobalFulfilledPredicates, RegionObligation};
-pub use self::project::{MismatchedProjectionTypes, ProjectionMode};
+pub use self::project::MismatchedProjectionTypes;
 pub use self::project::{normalize, normalize_projection_type, Normalized};
+pub use self::project::{ProjectionCache, ProjectionCacheSnapshot, ProjectionMode};
 pub use self::object_safety::ObjectSafetyViolation;
 pub use self::object_safety::MethodViolationCode;
 pub use self::select::{EvaluationCache, SelectionContext, SelectionCache};
index 44ec42de8cbd68101b7f6af5f3d24b4c374dd949..30faf1a5f8b91b75fc13bf0ea69aaaec3f7acef3 100644 (file)
 use super::util;
 
 use hir::def_id::DefId;
-use infer::{self, InferOk, TypeOrigin};
+use infer::{InferOk, TypeOrigin};
+use rustc_data_structures::snapshot_map::{Snapshot, SnapshotMap};
+use syntax::parse::token;
+use syntax::ast;
 use ty::subst::Subst;
 use ty::{self, ToPredicate, ToPolyTraitRef, Ty, TyCtxt};
 use ty::fold::{TypeFoldable, TypeFolder};
-use syntax::parse::token;
-use syntax::ast;
 use util::common::FN_OUTPUT_NAME;
 
 use std::rc::Rc;
@@ -182,7 +183,8 @@ pub fn poly_project_and_unify_type<'cx, 'gcx, 'tcx>(
         let skol_obligation = obligation.with(skol_predicate);
         match project_and_unify_type(selcx, &skol_obligation) {
             Ok(result) => {
-                match infcx.leak_check(false, &skol_map, snapshot) {
+                let span = obligation.cause.span;
+                match infcx.leak_check(false, span, &skol_map, snapshot) {
                     Ok(()) => Ok(infcx.plug_leaks(skol_map, snapshot, &result)),
                     Err(e) => Err(MismatchedProjectionTypes { err: e }),
                 }
@@ -256,9 +258,13 @@ pub fn normalize_with_depth<'a, 'b, 'gcx, 'tcx, T>(
 
     where T : TypeFoldable<'tcx>
 {
+    debug!("normalize_with_depth(depth={}, value={:?})", depth, value);
     let mut normalizer = AssociatedTypeNormalizer::new(selcx, cause, depth);
     let result = normalizer.fold(value);
-
+    debug!("normalize_with_depth: depth={} result={:?} with {} obligations",
+           depth, result, normalizer.obligations.len());
+    debug!("normalize_with_depth: depth={} obligations={:?}",
+           depth, normalizer.obligations);
     Normalized {
         value: result,
         obligations: normalizer.obligations,
@@ -330,13 +336,16 @@ fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> {
                 // binder). It would be better to normalize in a
                 // binding-aware fashion.
 
-                let Normalized { value: ty, obligations } =
+                let Normalized { value: normalized_ty, obligations } =
                     normalize_projection_type(self.selcx,
                                               data.clone(),
                                               self.cause.clone(),
                                               self.depth);
+                debug!("AssociatedTypeNormalizer: depth={} normalized {:?} to {:?} \
+                        with {} add'l obligations",
+                       self.depth, ty, normalized_ty, obligations.len());
                 self.obligations.extend(obligations);
-                ty
+                normalized_ty
             }
 
             _ => {
@@ -404,64 +413,161 @@ fn opt_normalize_projection_type<'a, 'b, 'gcx, 'tcx>(
     depth: usize)
     -> Option<NormalizedTy<'tcx>>
 {
-    debug!("normalize_projection_type(\
+    let infcx = selcx.infcx();
+
+    let projection_ty = infcx.resolve_type_vars_if_possible(&projection_ty);
+
+    debug!("opt_normalize_projection_type(\
            projection_ty={:?}, \
            depth={})",
            projection_ty,
            depth);
 
+    // FIXME(#20304) For now, I am caching here, which is good, but it
+    // means we don't capture the type variables that are created in
+    // the case of ambiguity. Which means we may create a large stream
+    // of such variables. OTOH, if we move the caching up a level, we
+    // would not benefit from caching when proving `T: Trait<U=Foo>`
+    // bounds. It might be the case that we want two distinct caches,
+    // or else another kind of cache entry.
+
+    match infcx.projection_cache.borrow_mut().try_start(projection_ty) {
+        Ok(()) => { }
+        Err(ProjectionCacheEntry::Ambiguous) => {
+            // If we found ambiguity the last time, that generally
+            // means we will continue to do so until some type in the
+            // key changes (and we know it hasn't, because we just
+            // fully resolved it). One exception though is closure
+            // types, which can transition from having a fixed kind to
+            // no kind with no visible change in the key.
+            //
+            // FIXME(#32286) refactor this so that closure type
+            // changes
+            debug!("opt_normalize_projection_type: \
+                    found cache entry: ambiguous");
+            if !projection_ty.has_closure_types() {
+                return None;
+            }
+        }
+        Err(ProjectionCacheEntry::InProgress) => {
+            // If while normalized A::B, we are asked to normalize
+            // A::B, just return A::B itself. This is a conservative
+            // answer, in the sense that A::B *is* clearly equivalent
+            // to A::B, though there may be a better value we can
+            // find.
+
+            // Under lazy normalization, this can arise when
+            // bootstrapping.  That is, imagine an environment with a
+            // where-clause like `A::B == u32`. Now, if we are asked
+            // to normalize `A::B`, we will want to check the
+            // where-clauses in scope. So we will try to unify `A::B`
+            // with `A::B`, which can trigger a recursive
+            // normalization. In that case, I think we will want this code:
+            //
+            // ```
+            // let ty = selcx.tcx().mk_projection(projection_ty.trait_ref,
+            //                                    projection_ty.item_name);
+            // return Some(NormalizedTy { value: v, obligations: vec![] });
+            // ```
+
+            debug!("opt_normalize_projection_type: \
+                    found cache entry: in-progress");
+
+            // But for now, let's classify this as an overflow:
+            let recursion_limit = selcx.tcx().sess.recursion_limit.get();
+            let obligation = Obligation::with_depth(cause.clone(),
+                                                    recursion_limit,
+                                                    projection_ty);
+            selcx.infcx().report_overflow_error(&obligation, false);
+        }
+        Err(ProjectionCacheEntry::NormalizedTy(ty)) => {
+            // If we find the value in the cache, then the obligations
+            // have already been returned from the previous entry (and
+            // should therefore have been honored).
+            debug!("opt_normalize_projection_type: \
+                    found normalized ty `{:?}`",
+                   ty);
+            return Some(NormalizedTy { value: ty, obligations: vec![] });
+        }
+        Err(ProjectionCacheEntry::Error) => {
+            debug!("opt_normalize_projection_type: \
+                    found error");
+            return Some(normalize_to_error(selcx, projection_ty, cause, depth));
+        }
+    }
+
     let obligation = Obligation::with_depth(cause.clone(), depth, projection_ty.clone());
     match project_type(selcx, &obligation) {
-        Ok(ProjectedTy::Progress(projected_ty, mut obligations)) => {
+        Ok(ProjectedTy::Progress(Progress { ty: projected_ty,
+                                            mut obligations,
+                                            cacheable })) => {
             // if projection succeeded, then what we get out of this
             // is also non-normalized (consider: it was derived from
             // an impl, where-clause etc) and hence we must
             // re-normalize it
 
-            debug!("normalize_projection_type: projected_ty={:?} depth={} obligations={:?}",
+            debug!("opt_normalize_projection_type: \
+                    projected_ty={:?} \
+                    depth={} \
+                    obligations={:?} \
+                    cacheable={:?}",
                    projected_ty,
                    depth,
-                   obligations);
+                   obligations,
+                   cacheable);
 
-            if projected_ty.has_projection_types() {
+            let result = if projected_ty.has_projection_types() {
                 let mut normalizer = AssociatedTypeNormalizer::new(selcx, cause, depth+1);
                 let normalized_ty = normalizer.fold(&projected_ty);
 
-                debug!("normalize_projection_type: normalized_ty={:?} depth={}",
+                debug!("opt_normalize_projection_type: \
+                        normalized_ty={:?} depth={}",
                        normalized_ty,
                        depth);
 
                 obligations.extend(normalizer.obligations);
-                Some(Normalized {
+                Normalized {
                     value: normalized_ty,
                     obligations: obligations,
-                })
+                }
             } else {
-                Some(Normalized {
+                Normalized {
                     value: projected_ty,
                     obligations: obligations,
-                })
-            }
+                }
+            };
+            infcx.projection_cache.borrow_mut()
+                                  .complete(projection_ty, &result, cacheable);
+            Some(result)
         }
         Ok(ProjectedTy::NoProgress(projected_ty)) => {
-            debug!("normalize_projection_type: projected_ty={:?} no progress",
+            debug!("opt_normalize_projection_type: \
+                    projected_ty={:?} no progress",
                    projected_ty);
-            Some(Normalized {
+            let result = Normalized {
                 value: projected_ty,
                 obligations: vec!()
-            })
+            };
+            infcx.projection_cache.borrow_mut()
+                                  .complete(projection_ty, &result, true);
+            Some(result)
         }
         Err(ProjectionTyError::TooManyCandidates) => {
-            debug!("normalize_projection_type: too many candidates");
+            debug!("opt_normalize_projection_type: \
+                    too many candidates");
+            infcx.projection_cache.borrow_mut()
+                                  .ambiguous(projection_ty);
             None
         }
         Err(ProjectionTyError::TraitSelectionError(_)) => {
-            debug!("normalize_projection_type: ERROR");
+            debug!("opt_normalize_projection_type: ERROR");
             // if we got an error processing the `T as Trait` part,
             // just return `ty::err` but add the obligation `T :
             // Trait`, which when processed will cause the error to be
             // reported later
 
+            infcx.projection_cache.borrow_mut()
+                                  .error(projection_ty);
             Some(normalize_to_error(selcx, projection_ty, cause, depth))
         }
     }
@@ -504,11 +610,43 @@ fn normalize_to_error<'a, 'gcx, 'tcx>(selcx: &mut SelectionContext<'a, 'gcx, 'tc
 }
 
 enum ProjectedTy<'tcx> {
-    Progress(Ty<'tcx>, Vec<PredicateObligation<'tcx>>),
+    Progress(Progress<'tcx>),
     NoProgress(Ty<'tcx>),
 }
 
+struct Progress<'tcx> {
+    ty: Ty<'tcx>,
+    obligations: Vec<PredicateObligation<'tcx>>,
+    cacheable: bool,
+}
+
+impl<'tcx> Progress<'tcx> {
+    fn error<'a,'gcx>(tcx: TyCtxt<'a,'gcx,'tcx>) -> Self {
+        Progress {
+            ty: tcx.types.err,
+            obligations: vec![],
+            cacheable: true
+        }
+    }
+
+    fn with_addl_obligations(mut self,
+                             mut obligations: Vec<PredicateObligation<'tcx>>)
+                             -> Self {
+        debug!("with_addl_obligations: self.obligations.len={} obligations.len={}",
+               self.obligations.len(), obligations.len());
+
+        debug!("with_addl_obligations: self.obligations={:?} obligations={:?}",
+               self.obligations, obligations);
+
+        self.obligations.append(&mut obligations);
+        self
+    }
+}
+
 /// Compute the result of a projection type (if we can).
+///
+/// IMPORTANT:
+/// - `obligation` must be fully normalized
 fn project_type<'cx, 'gcx, 'tcx>(
     selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>,
     obligation: &ProjectionTyObligation<'tcx>)
@@ -523,13 +661,12 @@ fn project_type<'cx, 'gcx, 'tcx>(
         selcx.infcx().report_overflow_error(&obligation, true);
     }
 
-    let obligation_trait_ref =
-        selcx.infcx().resolve_type_vars_if_possible(&obligation.predicate.trait_ref);
+    let obligation_trait_ref = &obligation.predicate.trait_ref;
 
     debug!("project: obligation_trait_ref={:?}", obligation_trait_ref);
 
     if obligation_trait_ref.references_error() {
-        return Ok(ProjectedTy::Progress(selcx.tcx().types.err, vec!()));
+        return Ok(ProjectedTy::Progress(Progress::error(selcx.tcx())));
     }
 
     let mut candidates = ProjectionTyCandidateSet {
@@ -607,16 +744,17 @@ fn project_type<'cx, 'gcx, 'tcx>(
 
     match candidates.vec.pop() {
         Some(candidate) => {
-            let (ty, obligations) = confirm_candidate(selcx,
-                                                      obligation,
-                                                      &obligation_trait_ref,
-                                                      candidate);
-            Ok(ProjectedTy::Progress(ty, obligations))
+            Ok(ProjectedTy::Progress(
+                confirm_candidate(selcx,
+                                  obligation,
+                                  &obligation_trait_ref,
+                                  candidate)))
         }
         None => {
-            Ok(ProjectedTy::NoProgress(selcx.tcx().mk_projection(
-                obligation.predicate.trait_ref.clone(),
-                obligation.predicate.item_name)))
+            Ok(ProjectedTy::NoProgress(
+                selcx.tcx().mk_projection(
+                    obligation.predicate.trait_ref.clone(),
+                    obligation.predicate.item_name)))
         }
     }
 }
@@ -919,7 +1057,7 @@ fn confirm_candidate<'cx, 'gcx, 'tcx>(
     obligation: &ProjectionTyObligation<'tcx>,
     obligation_trait_ref: &ty::TraitRef<'tcx>,
     candidate: ProjectionTyCandidate<'tcx>)
-    -> (Ty<'tcx>, Vec<PredicateObligation<'tcx>>)
+    -> Progress<'tcx>
 {
     debug!("confirm_candidate(candidate={:?}, obligation={:?})",
            candidate,
@@ -941,7 +1079,7 @@ fn confirm_select_candidate<'cx, 'gcx, 'tcx>(
     selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>,
     obligation: &ProjectionTyObligation<'tcx>,
     obligation_trait_ref: &ty::TraitRef<'tcx>)
-    -> (Ty<'tcx>, Vec<PredicateObligation<'tcx>>)
+    -> Progress<'tcx>
 {
     let poly_trait_ref = obligation_trait_ref.to_poly_trait_ref();
     let trait_obligation = obligation.with(poly_trait_ref.to_poly_trait_predicate());
@@ -979,7 +1117,7 @@ fn confirm_object_candidate<'cx, 'gcx, 'tcx>(
     selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>,
     obligation:  &ProjectionTyObligation<'tcx>,
     obligation_trait_ref: &ty::TraitRef<'tcx>)
-    -> (Ty<'tcx>, Vec<PredicateObligation<'tcx>>)
+    -> Progress<'tcx>
 {
     let self_ty = obligation_trait_ref.self_ty();
     let object_ty = selcx.infcx().shallow_resolve(self_ty);
@@ -991,7 +1129,7 @@ fn confirm_object_candidate<'cx, 'gcx, 'tcx>(
             span_bug!(
                 obligation.cause.span,
                 "confirm_object_candidate called with non-object: {:?}",
-                object_ty);
+                object_ty)
         }
     };
     let projection_bounds = data.projection_bounds_with_self_ty(selcx.tcx(), object_ty);
@@ -1035,7 +1173,7 @@ fn confirm_object_candidate<'cx, 'gcx, 'tcx>(
                 debug!("confirm_object_candidate: no env-predicate \
                         found in object type `{:?}`; ill-formed",
                        object_ty);
-                return (selcx.tcx().types.err, vec!());
+                return Progress::error(selcx.tcx());
             }
         }
     };
@@ -1047,7 +1185,7 @@ fn confirm_fn_pointer_candidate<'cx, 'gcx, 'tcx>(
     selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>,
     obligation: &ProjectionTyObligation<'tcx>,
     fn_pointer_vtable: VtableFnPointerData<'tcx, PredicateObligation<'tcx>>)
-    -> (Ty<'tcx>, Vec<PredicateObligation<'tcx>>)
+    -> Progress<'tcx>
 {
     // FIXME(#32730) drop this assertion once obligations are propagated from inference (fn pointer
     // vtable nested obligations ONLY come from unification in inference)
@@ -1061,23 +1199,29 @@ fn confirm_closure_candidate<'cx, 'gcx, 'tcx>(
     selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>,
     obligation: &ProjectionTyObligation<'tcx>,
     vtable: VtableClosureData<'tcx, PredicateObligation<'tcx>>)
-    -> (Ty<'tcx>, Vec<PredicateObligation<'tcx>>)
+    -> Progress<'tcx>
 {
     let closure_typer = selcx.closure_typer();
     let closure_type = closure_typer.closure_type(vtable.closure_def_id, vtable.substs);
     let Normalized {
         value: closure_type,
-        mut obligations
+        obligations
     } = normalize_with_depth(selcx,
                              obligation.cause.clone(),
                              obligation.recursion_depth+1,
                              &closure_type);
-    let (ty, mut cc_obligations) = confirm_callable_candidate(selcx,
-                                                              obligation,
-                                                              &closure_type.sig,
-                                                              util::TupleArgumentsFlag::No);
-    obligations.append(&mut cc_obligations);
-    (ty, obligations)
+
+    debug!("confirm_closure_candidate: obligation={:?},closure_type={:?},obligations={:?}",
+           obligation,
+           closure_type,
+           obligations);
+
+    confirm_callable_candidate(selcx,
+                               obligation,
+                               &closure_type.sig,
+                               util::TupleArgumentsFlag::No)
+        .with_addl_obligations(obligations)
+        .with_addl_obligations(vtable.nested)
 }
 
 fn confirm_callable_candidate<'cx, 'gcx, 'tcx>(
@@ -1085,7 +1229,7 @@ fn confirm_callable_candidate<'cx, 'gcx, 'tcx>(
     obligation: &ProjectionTyObligation<'tcx>,
     fn_sig: &ty::PolyFnSig<'tcx>,
     flag: util::TupleArgumentsFlag)
-    -> (Ty<'tcx>, Vec<PredicateObligation<'tcx>>)
+    -> Progress<'tcx>
 {
     let tcx = selcx.tcx();
 
@@ -1118,47 +1262,38 @@ fn confirm_param_env_candidate<'cx, 'gcx, 'tcx>(
     selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>,
     obligation: &ProjectionTyObligation<'tcx>,
     poly_projection: ty::PolyProjectionPredicate<'tcx>)
-    -> (Ty<'tcx>, Vec<PredicateObligation<'tcx>>)
+    -> Progress<'tcx>
 {
     let infcx = selcx.infcx();
-
-    let projection =
-        infcx.replace_late_bound_regions_with_fresh_var(
-            obligation.cause.span,
-            infer::LateBoundRegionConversionTime::HigherRankedType,
-            &poly_projection).0;
-
-    assert_eq!(projection.projection_ty.item_name,
-               obligation.predicate.item_name);
-
     let origin = TypeOrigin::RelateOutputImplTypes(obligation.cause.span);
-    let obligations = match infcx.eq_trait_refs(false,
-                                                origin,
-                                                obligation.predicate.trait_ref.clone(),
-                                                projection.projection_ty.trait_ref.clone()) {
-        Ok(InferOk { obligations, .. }) => {
-            // FIXME(#32730) once obligations are generated in inference, remove this assertion
+    let trait_ref = obligation.predicate.trait_ref;
+    match infcx.match_poly_projection_predicate(origin, poly_projection, trait_ref) {
+        Ok(InferOk { value: ty_match, obligations }) => {
+            // FIXME(#32730) once obligations are generated in inference, drop this assertion
             assert!(obligations.is_empty());
-            obligations
+            Progress {
+                ty: ty_match.value,
+                obligations: obligations,
+                cacheable: ty_match.unconstrained_regions.is_empty(),
+            }
         }
         Err(e) => {
             span_bug!(
                 obligation.cause.span,
-                "Failed to unify `{:?}` and `{:?}` in projection: {}",
+                "Failed to unify obligation `{:?}` \
+                 with poly_projection `{:?}`: {:?}",
                 obligation,
-                projection,
+                poly_projection,
                 e);
         }
-    };
-
-    (projection.ty, obligations)
+    }
 }
 
 fn confirm_impl_candidate<'cx, 'gcx, 'tcx>(
     selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>,
     obligation: &ProjectionTyObligation<'tcx>,
     impl_vtable: VtableImplData<'tcx, PredicateObligation<'tcx>>)
-    -> (Ty<'tcx>, Vec<PredicateObligation<'tcx>>)
+    -> Progress<'tcx>
 {
     let VtableImplData { substs, nested, impl_def_id } = impl_vtable;
 
@@ -1179,7 +1314,11 @@ fn confirm_impl_candidate<'cx, 'gcx, 'tcx>(
                 tcx.types.err
             });
             let substs = translate_substs(selcx.infcx(), impl_def_id, substs, node_item.node);
-            (ty.subst(tcx, substs), nested)
+            Progress {
+                ty: ty.subst(tcx, substs),
+                obligations: nested,
+                cacheable: true
+            }
         }
         None => {
             span_bug!(obligation.cause.span,
@@ -1222,3 +1361,91 @@ fn assoc_ty_def<'cx, 'gcx, 'tcx>(
             .next()
     }
 }
+
+// # Cache
+
+pub struct ProjectionCache<'tcx> {
+    map: SnapshotMap<ty::ProjectionTy<'tcx>, ProjectionCacheEntry<'tcx>>,
+}
+
+#[derive(Clone, Debug)]
+enum ProjectionCacheEntry<'tcx> {
+    InProgress,
+    Ambiguous,
+    Error,
+    NormalizedTy(Ty<'tcx>),
+}
+
+// NB: intentionally not Clone
+pub struct ProjectionCacheSnapshot {
+    snapshot: Snapshot
+}
+
+impl<'tcx> ProjectionCache<'tcx> {
+    pub fn new() -> Self {
+        ProjectionCache {
+            map: SnapshotMap::new()
+        }
+    }
+
+    pub fn snapshot(&mut self) -> ProjectionCacheSnapshot {
+        ProjectionCacheSnapshot { snapshot: self.map.snapshot() }
+    }
+
+    pub fn rollback_to(&mut self, snapshot: ProjectionCacheSnapshot) {
+        self.map.rollback_to(snapshot.snapshot);
+    }
+
+    pub fn commit(&mut self, snapshot: ProjectionCacheSnapshot) {
+        self.map.commit(snapshot.snapshot);
+    }
+
+    /// Try to start normalize `key`; returns an error if
+    /// normalization already occured (this error corresponds to a
+    /// cache hit, so it's actually a good thing).
+    fn try_start(&mut self, key: ty::ProjectionTy<'tcx>)
+                 -> Result<(), ProjectionCacheEntry<'tcx>> {
+        match self.map.get(&key) {
+            Some(entry) => return Err(entry.clone()),
+            None => { }
+        }
+
+        self.map.insert(key, ProjectionCacheEntry::InProgress);
+        Ok(())
+    }
+
+    /// Indicates that `key` was normalized to `value`. If `cacheable` is false,
+    /// then this result is sadly not cacheable.
+    fn complete(&mut self,
+                key: ty::ProjectionTy<'tcx>,
+                value: &NormalizedTy<'tcx>,
+                cacheable: bool) {
+        let fresh_key = if cacheable {
+            debug!("ProjectionCacheEntry::complete: adding cache entry: key={:?}, value={:?}",
+                   key, value);
+            self.map.insert(key, ProjectionCacheEntry::NormalizedTy(value.value))
+        } else {
+            debug!("ProjectionCacheEntry::complete: cannot cache: key={:?}, value={:?}",
+                   key, value);
+            !self.map.remove(key)
+        };
+
+        assert!(!fresh_key, "never started projecting `{:?}`", key);
+    }
+
+    /// Indicates that trying to normalize `key` resulted in
+    /// ambiguity. No point in trying it again then until we gain more
+    /// type information (in which case, the "fully resolved" key will
+    /// be different).
+    fn ambiguous(&mut self, key: ty::ProjectionTy<'tcx>) {
+        let fresh = self.map.insert(key, ProjectionCacheEntry::Ambiguous);
+        assert!(!fresh, "never started projecting `{:?}`", key);
+    }
+
+    /// Indicates that trying to normalize `key` resulted in
+    /// error.
+    fn error(&mut self, key: ty::ProjectionTy<'tcx>) {
+        let fresh = self.map.insert(key, ProjectionCacheEntry::Error);
+        assert!(!fresh, "never started projecting `{:?}`", key);
+    }
+}
index 5307749b87b6adf3515bbc2a7f360f46809f49a0..7a20b43b8f2e69a470c67810c95a1be71a48a819 100644 (file)
@@ -46,6 +46,7 @@
 use std::cell::RefCell;
 use std::fmt;
 use std::marker::PhantomData;
+use std::mem;
 use std::rc::Rc;
 use syntax::abi::Abi;
 use hir;
@@ -1237,6 +1238,9 @@ fn match_projection_obligation_against_bounds_from_trait(
                                                    skol_trait_predicate.trait_ref.clone(),
                                                    &skol_map,
                                                    snapshot);
+
+                self.infcx.pop_skolemized(skol_map, snapshot);
+
                 assert!(result);
                 true
             }
@@ -1263,7 +1267,7 @@ fn match_projection(&mut self,
             Err(_) => { return false; }
         }
 
-        self.infcx.leak_check(false, skol_map, snapshot).is_ok()
+        self.infcx.leak_check(false, obligation.cause.span, skol_map, snapshot).is_ok()
     }
 
     /// Given an obligation like `<SomeTrait for T>`, search the obligations that the caller
@@ -1422,9 +1426,16 @@ fn assemble_candidates_from_impls(&mut self,
             self.tcx(),
             obligation.predicate.0.trait_ref.self_ty(),
             |impl_def_id| {
-                self.probe(|this, snapshot| {
-                    if let Ok(_) = this.match_impl(impl_def_id, obligation, snapshot) {
-                        candidates.vec.push(ImplCandidate(impl_def_id));
+                self.probe(|this, snapshot| { /* [1] */
+                    match this.match_impl(impl_def_id, obligation, snapshot) {
+                        Ok(skol_map) => {
+                            candidates.vec.push(ImplCandidate(impl_def_id));
+
+                            // NB: we can safely drop the skol map
+                            // since we are in a probe [1]
+                            mem::drop(skol_map);
+                        }
+                        Err(_) => { }
                     }
                 });
             }
@@ -1509,9 +1520,11 @@ fn assemble_candidates_from_object_ty(&mut self,
             return;
         }
 
-        self.probe(|this, snapshot| {
-            let (self_ty, _) =
-                this.infcx().skolemize_late_bound_regions(&obligation.self_ty(), snapshot);
+        self.probe(|this, _snapshot| {
+            // the code below doesn't care about regions, and the
+            // self-ty here doesn't escape this probe, so just erase
+            // any LBR.
+            let self_ty = this.tcx().erase_late_bound_regions(&obligation.self_ty());
             let poly_trait_ref = match self_ty.sty {
                 ty::TyTrait(ref data) => {
                     match this.tcx().lang_items.to_builtin_kind(obligation.predicate.def_id()) {
@@ -2710,7 +2723,10 @@ fn match_impl(&mut self,
             })?;
         self.inferred_obligations.extend(obligations);
 
-        if let Err(e) = self.infcx.leak_check(false, &skol_map, snapshot) {
+        if let Err(e) = self.infcx.leak_check(false,
+                                              obligation.cause.span,
+                                              &skol_map,
+                                              snapshot) {
             debug!("match_impl: failed leak check due to `{}`", e);
             return Err(());
         }
index a1da3017fcd043f9eff961ab05be01da47777f51..b12581b34003d64841a6d5253171254fb0edba37 100644 (file)
@@ -176,9 +176,13 @@ fn add_fn_sig(&mut self, fn_sig: &ty::PolyFnSig) {
 
     fn add_region(&mut self, r: ty::Region) {
         match r {
-            ty::ReVar(..) |
+            ty::ReVar(..) => {
+                self.add_flags(TypeFlags::HAS_RE_INFER);
+                self.add_flags(TypeFlags::KEEP_IN_LOCAL_TCX);
+            }
             ty::ReSkolemized(..) => {
                 self.add_flags(TypeFlags::HAS_RE_INFER);
+                self.add_flags(TypeFlags::HAS_RE_SKOL);
                 self.add_flags(TypeFlags::KEEP_IN_LOCAL_TCX);
             }
             ty::ReLateBound(debruijn, _) => { self.add_depth(debruijn.depth); }
index 82a3b0b8db28467c7d32097ec3ca66da4e0d64a2..07156c67a22075231e5bba9718b1d14ace745505 100644 (file)
@@ -170,6 +170,7 @@ pub fn parse(sess: &Session) -> TargetDataLayout {
     /// address space on 64-bit ARMv8 and x86_64.
     pub fn obj_size_bound(&self) -> u64 {
         match self.pointer_size.bits() {
+            16 => 1 << 15,
             32 => 1 << 31,
             64 => 1 << 47,
             bits => bug!("obj_size_bound: unknown pointer bit size {}", bits)
@@ -178,6 +179,7 @@ pub fn obj_size_bound(&self) -> u64 {
 
     pub fn ptr_sized_integer(&self) -> Integer {
         match self.pointer_size.bits() {
+            16 => I16,
             32 => I32,
             64 => I64,
             bits => bug!("ptr_sized_integer: unknown pointer bit size {}", bits)
index 24f0671ce6184c2c03a396653ecb9a329df81421..4069aa6b955d4af9f951bc737500410edbda3791 100644 (file)
@@ -60,6 +60,7 @@
 pub use self::sty::{ClosureSubsts, TypeAndMut};
 pub use self::sty::{TraitRef, TypeVariants, PolyTraitRef};
 pub use self::sty::{BoundRegion, EarlyBoundRegion, FreeRegion, Region};
+pub use self::sty::Issue32330;
 pub use self::sty::{TyVid, IntVid, FloatVid, RegionVid, SkolemizedRegionVid};
 pub use self::sty::BoundRegion::*;
 pub use self::sty::FnOutput::*;
@@ -514,19 +515,20 @@ pub enum FragmentInfo {
         const HAS_SELF           = 1 << 1,
         const HAS_TY_INFER       = 1 << 2,
         const HAS_RE_INFER       = 1 << 3,
-        const HAS_RE_EARLY_BOUND = 1 << 4,
-        const HAS_FREE_REGIONS   = 1 << 5,
-        const HAS_TY_ERR         = 1 << 6,
-        const HAS_PROJECTION     = 1 << 7,
-        const HAS_TY_CLOSURE     = 1 << 8,
+        const HAS_RE_SKOL        = 1 << 4,
+        const HAS_RE_EARLY_BOUND = 1 << 5,
+        const HAS_FREE_REGIONS   = 1 << 6,
+        const HAS_TY_ERR         = 1 << 7,
+        const HAS_PROJECTION     = 1 << 8,
+        const HAS_TY_CLOSURE     = 1 << 9,
 
         // true if there are "names" of types and regions and so forth
         // that are local to a particular fn
-        const HAS_LOCAL_NAMES   = 1 << 9,
+        const HAS_LOCAL_NAMES    = 1 << 10,
 
         // Present if the type belongs in a local type context.
         // Only set for TyInfer other than Fresh.
-        const KEEP_IN_LOCAL_TCX = 1 << 10,
+        const KEEP_IN_LOCAL_TCX  = 1 << 11,
 
         const NEEDS_SUBST        = TypeFlags::HAS_PARAMS.bits |
                                    TypeFlags::HAS_SELF.bits |
@@ -739,7 +741,8 @@ pub fn to_early_bound_region(&self) -> ty::Region {
         })
     }
     pub fn to_bound_region(&self) -> ty::BoundRegion {
-        ty::BoundRegion::BrNamed(self.def_id, self.name)
+        // this is an early bound region, so unaffected by #32330
+        ty::BoundRegion::BrNamed(self.def_id, self.name, Issue32330::WontChange)
     }
 }
 
@@ -946,7 +949,28 @@ pub fn def_id(&self) -> DefId {
 
     /// Creates the dep-node for selecting/evaluating this trait reference.
     fn dep_node(&self) -> DepNode<DefId> {
-        DepNode::TraitSelect(self.def_id())
+        // Ideally, the dep-node would just have all the input types
+        // in it.  But they are limited to including def-ids. So as an
+        // approximation we include the def-ids for all nominal types
+        // found somewhere. This means that we will e.g. conflate the
+        // dep-nodes for `u32: SomeTrait` and `u64: SomeTrait`, but we
+        // would have distinct dep-nodes for `Vec<u32>: SomeTrait`,
+        // `Rc<u32>: SomeTrait`, and `(Vec<u32>, Rc<u32>): SomeTrait`.
+        // Note that it's always sound to conflate dep-nodes, it just
+        // leads to more recompilation.
+        let def_ids: Vec<_> =
+            self.input_types()
+                .iter()
+                .flat_map(|t| t.walk())
+                .filter_map(|t| match t.sty {
+                    ty::TyStruct(adt_def, _) |
+                    ty::TyEnum(adt_def, _) =>
+                        Some(adt_def.did),
+                    _ =>
+                        None
+                })
+                .collect();
+        DepNode::TraitSelect(self.def_id(), def_ids)
     }
 
     pub fn input_types(&self) -> &[Ty<'tcx>] {
@@ -992,7 +1016,7 @@ pub fn dep_node(&self) -> DepNode<DefId> {
 /// equality between arbitrary types. Processing an instance of Form
 /// #2 eventually yields one of these `ProjectionPredicate`
 /// instances to normalize the LHS.
-#[derive(Clone, PartialEq, Eq, Hash)]
+#[derive(Copy, Clone, PartialEq, Eq, Hash)]
 pub struct ProjectionPredicate<'tcx> {
     pub projection_ty: ProjectionTy<'tcx>,
     pub ty: Ty<'tcx>,
@@ -1768,9 +1792,8 @@ fn calculate_sized_constraint_inner(&'tcx self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
                                         stack: &mut Vec<AdtDefMaster<'tcx>>)
     {
 
-        let dep_node = DepNode::SizedConstraint(self.did);
-
-        if self.sized_constraint.get(dep_node).is_some() {
+        let dep_node = || DepNode::SizedConstraint(self.did);
+        if self.sized_constraint.get(dep_node()).is_some() {
             return;
         }
 
@@ -1780,7 +1803,7 @@ fn calculate_sized_constraint_inner(&'tcx self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
             //
             // Consider the type as Sized in the meanwhile to avoid
             // further errors.
-            self.sized_constraint.fulfill(dep_node, tcx.types.err);
+            self.sized_constraint.fulfill(dep_node(), tcx.types.err);
             return;
         }
 
@@ -1803,14 +1826,14 @@ fn calculate_sized_constraint_inner(&'tcx self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
             _ => tcx.mk_tup(tys)
         };
 
-        match self.sized_constraint.get(dep_node) {
+        match self.sized_constraint.get(dep_node()) {
             Some(old_ty) => {
                 debug!("calculate_sized_constraint: {:?} recurred", self);
                 assert_eq!(old_ty, tcx.types.err)
             }
             None => {
                 debug!("calculate_sized_constraint: {:?} => {:?}", self, ty);
-                self.sized_constraint.fulfill(dep_node, ty)
+                self.sized_constraint.fulfill(dep_node(), ty)
             }
         }
     }
@@ -2835,7 +2858,7 @@ pub fn construct_free_substs(self, generics: &Generics<'gcx>,
         for def in generics.regions.as_slice() {
             let region =
                 ReFree(FreeRegion { scope: free_id_outlive,
-                                    bound_region: BrNamed(def.def_id, def.name) });
+                                    bound_region: def.to_bound_region() });
             debug!("push_region_params {:?}", region);
             regions.push(def.space, region);
         }
index 77e980ff3196520c05600cf42bcf5245c56c7e6e..1e2920ca87ea6d712b3a2f64839160f1fcc7a542 100644 (file)
@@ -827,7 +827,7 @@ fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F)
             def_id: self.def_id,
             space: self.space,
             index: self.index,
-            bounds: self.bounds.fold_with(folder)
+            bounds: self.bounds.fold_with(folder),
         }
     }
 
index 32434d40e61a9d4ece268d69dcc99162bb41f16b..2bb88e52f8887b06496e6b0100a3626322ca8179 100644 (file)
@@ -58,7 +58,7 @@ pub enum BoundRegion {
     ///
     /// The def-id is needed to distinguish free regions in
     /// the event of shadowing.
-    BrNamed(DefId, Name),
+    BrNamed(DefId, Name, Issue32330),
 
     /// Fresh bound identifiers created during GLB computations.
     BrFresh(u32),
@@ -68,6 +68,25 @@ pub enum BoundRegion {
     BrEnv
 }
 
+/// True if this late-bound region is unconstrained, and hence will
+/// become early-bound once #32330 is fixed.
+#[derive(Copy, Clone, Debug, PartialEq, PartialOrd, Eq, Ord, Hash,
+         RustcEncodable, RustcDecodable)]
+pub enum Issue32330 {
+    WontChange,
+
+    /// this region will change from late-bound to early-bound once
+    /// #32330 is fixed.
+    WillChange {
+        /// fn where is region declared
+        fn_def_id: DefId,
+
+        /// name of region; duplicates the info in BrNamed but convenient
+        /// to have it here, and this code is only temporary
+        region_name: ast::Name,
+    }
+}
+
 // NB: If you change this, you'll probably want to change the corresponding
 // AST structure in libsyntax/ast.rs as well.
 #[derive(Clone, PartialEq, Eq, Hash, Debug)]
@@ -697,7 +716,7 @@ pub struct EarlyBoundRegion {
 
 #[derive(Clone, Copy, PartialEq, Eq, Hash)]
 pub struct TyVid {
-    pub index: u32
+    pub index: u32,
 }
 
 #[derive(Clone, Copy, PartialEq, Eq, Hash)]
index a4df02872166986c698ec10fae4477552ad147e1..416347919a83fea27006caeb2c7ea9882795dbd0 100644 (file)
@@ -62,6 +62,7 @@ fn initial_discriminant<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Disr {
             SignedInt(ast::IntTy::I32)   => ConstInt::I32(0),
             SignedInt(ast::IntTy::I64)   => ConstInt::I64(0),
             SignedInt(ast::IntTy::Is) => match tcx.sess.target.int_type {
+                ast::IntTy::I16 => ConstInt::Isize(ConstIsize::Is16(0)),
                 ast::IntTy::I32 => ConstInt::Isize(ConstIsize::Is32(0)),
                 ast::IntTy::I64 => ConstInt::Isize(ConstIsize::Is64(0)),
                 _ => bug!(),
@@ -71,6 +72,7 @@ fn initial_discriminant<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Disr {
             UnsignedInt(ast::UintTy::U32) => ConstInt::U32(0),
             UnsignedInt(ast::UintTy::U64) => ConstInt::U64(0),
             UnsignedInt(ast::UintTy::Us) => match tcx.sess.target.uint_type {
+                ast::UintTy::U16 => ConstInt::Usize(ConstUsize::Us16(0)),
                 ast::UintTy::U32 => ConstInt::Usize(ConstUsize::Us32(0)),
                 ast::UintTy::U64 => ConstInt::Usize(ConstUsize::Us64(0)),
                 _ => bug!(),
index 1a802064b61270d253ae8c9a669e1ba856deb2cf..a851e8354a9234995b64c2ebe9b43debe0cf89aa 100644 (file)
@@ -261,7 +261,7 @@ fn in_binder<'a, 'gcx, 'tcx, T, U>(f: &mut fmt::Formatter,
     let new_value = tcx.replace_late_bound_regions(&value, |br| {
         let _ = start_or_continue(f, "for<", ", ");
         ty::ReLateBound(ty::DebruijnIndex::new(1), match br {
-            ty::BrNamed(_, name) => {
+            ty::BrNamed(_, name, _) => {
                 let _ = write!(f, "{}", name);
                 br
             }
@@ -270,7 +270,9 @@ fn in_binder<'a, 'gcx, 'tcx, T, U>(f: &mut fmt::Formatter,
             ty::BrEnv => {
                 let name = token::intern("'r");
                 let _ = write!(f, "{}", name);
-                ty::BrNamed(tcx.map.local_def_id(CRATE_NODE_ID), name)
+                ty::BrNamed(tcx.map.local_def_id(CRATE_NODE_ID),
+                            name,
+                            ty::Issue32330::WontChange)
             }
         })
     }).0;
@@ -485,7 +487,7 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         }
 
         match *self {
-            BrNamed(_, name) => write!(f, "{}", name),
+            BrNamed(_, name, _) => write!(f, "{}", name),
             BrAnon(_) | BrFresh(_) | BrEnv => Ok(())
         }
     }
@@ -496,8 +498,9 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         match *self {
             BrAnon(n) => write!(f, "BrAnon({:?})", n),
             BrFresh(n) => write!(f, "BrFresh({:?})", n),
-            BrNamed(did, name) => {
-                write!(f, "BrNamed({:?}:{:?}, {:?})", did.krate, did.index, name)
+            BrNamed(did, name, issue32330) => {
+                write!(f, "BrNamed({:?}:{:?}, {:?}, {:?})",
+                       did.krate, did.index, name, issue32330)
             }
             BrEnv => "BrEnv".fmt(f),
         }
index 8b1d7bed7c42d8fba4ee947cb640c090f31c12b5..f2abdf831a3b8fdbdae8c6033bbc17085e38b3cc 100644 (file)
@@ -384,18 +384,19 @@ struct X { x: (), }
 
 // This fails to compile because the match is irrefutable.
 while let Irrefutable(x) = irr {
-    ...
+    // ...
 }
+```
 
 Try this instead:
 
-```
+```no_run
 struct Irrefutable(i32);
 let irr = Irrefutable(0);
 
 loop {
     let Irrefutable(x) = irr;
-    ...
+    // ...
 }
 ```
 "##,
index b727b778fcd791495897af9f77d0829fd29a9e1b..5613b1211199ab0f475f4f61f3551d755942d70d 100644 (file)
@@ -583,6 +583,9 @@ pub fn eval_const_expr_partial<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                     (&LitKind::Int(n, Unsuffixed), Some(&ty::TyInt(IntTy::Is))) |
                     (&LitKind::Int(n, Signed(IntTy::Is)), _) => {
                         match tcx.sess.target.int_type {
+                            IntTy::I16 => if n == I16_OVERFLOW {
+                                return Ok(Integral(Isize(Is16(::std::i16::MIN))));
+                            },
                             IntTy::I32 => if n == I32_OVERFLOW {
                                 return Ok(Integral(Isize(Is32(::std::i32::MIN))));
                             },
index 64f03be3b5f07454ba0c50e7ef106554d96bb2a4..28a5887847252cb871f988768a0659de68c9fcda 100644 (file)
@@ -77,12 +77,14 @@ pub fn infer(self, other: Self) -> Result<(Self, Self), ConstMathErr> {
             (Infer(a @ 0...as_u64::I16MAX), I16(_)) => I16(a as i64 as i16),
             (Infer(a @ 0...as_u64::I32MAX), I32(_)) => I32(a as i64 as i32),
             (Infer(a @ 0...as_u64::I64MAX), I64(_)) => I64(a as i64),
+            (Infer(a @ 0...as_u64::I16MAX), Isize(Is16(_))) => Isize(Is16(a as i64 as i16)),
             (Infer(a @ 0...as_u64::I32MAX), Isize(Is32(_))) => Isize(Is32(a as i64 as i32)),
             (Infer(a @ 0...as_u64::I64MAX), Isize(Is64(_))) => Isize(Is64(a as i64)),
             (Infer(a @ 0...as_u64::U8MAX), U8(_)) => U8(a as u8),
             (Infer(a @ 0...as_u64::U16MAX), U16(_)) => U16(a as u16),
             (Infer(a @ 0...as_u64::U32MAX), U32(_)) => U32(a as u32),
             (Infer(a), U64(_)) => U64(a),
+            (Infer(a @ 0...as_u64::U16MAX), Usize(Us16(_))) => Usize(Us16(a as u16)),
             (Infer(a @ 0...as_u64::U32MAX), Usize(Us32(_))) => Usize(Us32(a as u32)),
             (Infer(a), Usize(Us64(_))) => Usize(Us64(a)),
 
@@ -92,6 +94,9 @@ pub fn infer(self, other: Self) -> Result<(Self, Self), ConstMathErr> {
             (InferSigned(a @ as_i64::I16MIN...as_i64::I16MAX), I16(_)) => I16(a as i16),
             (InferSigned(a @ as_i64::I32MIN...as_i64::I32MAX), I32(_)) => I32(a as i32),
             (InferSigned(a), I64(_)) => I64(a),
+            (InferSigned(a @ as_i64::I16MIN...as_i64::I16MAX), Isize(Is16(_))) => {
+                Isize(Is16(a as i16))
+            },
             (InferSigned(a @ as_i64::I32MIN...as_i64::I32MAX), Isize(Is32(_))) => {
                 Isize(Is32(a as i32))
             },
@@ -100,6 +105,7 @@ pub fn infer(self, other: Self) -> Result<(Self, Self), ConstMathErr> {
             (InferSigned(a @ 0...as_i64::U16MAX), U16(_)) => U16(a as u16),
             (InferSigned(a @ 0...as_i64::U32MAX), U32(_)) => U32(a as u32),
             (InferSigned(a @ 0...as_i64::I64MAX), U64(_)) => U64(a as u64),
+            (InferSigned(a @ 0...as_i64::U16MAX), Usize(Us16(_))) => Usize(Us16(a as u16)),
             (InferSigned(a @ 0...as_i64::U32MAX), Usize(Us32(_))) => Usize(Us32(a as u32)),
             (InferSigned(a @ 0...as_i64::I64MAX), Usize(Us64(_))) => Usize(Us64(a as u64)),
             (InferSigned(_), _) => return Err(ConstMathErr::NotInRange),
@@ -117,6 +123,7 @@ pub fn erase_type(self) -> Self {
             I16(i) if i < 0 => InferSigned(i as i64),
             I32(i) if i < 0 => InferSigned(i as i64),
             I64(i) if i < 0 => InferSigned(i as i64),
+            Isize(Is16(i)) if i < 0 => InferSigned(i as i64),
             Isize(Is32(i)) if i < 0 => InferSigned(i as i64),
             Isize(Is64(i)) if i < 0 => InferSigned(i as i64),
             InferSigned(i) => Infer(i as u64),
@@ -124,12 +131,14 @@ pub fn erase_type(self) -> Self {
             I16(i) => Infer(i as u64),
             I32(i) => Infer(i as u64),
             I64(i) => Infer(i as u64),
+            Isize(Is16(i)) => Infer(i as u64),
             Isize(Is32(i)) => Infer(i as u64),
             Isize(Is64(i)) => Infer(i as u64),
             U8(i) => Infer(i as u64),
             U16(i) => Infer(i as u64),
             U32(i) => Infer(i as u64),
             U64(i) => Infer(i as u64),
+            Usize(Us16(i)) => Infer(i as u64),
             Usize(Us32(i)) => Infer(i as u64),
             Usize(Us64(i)) => Infer(i),
         }
@@ -173,6 +182,7 @@ pub fn to_u32(&self) -> Option<u32> {
             | Isize(Is64(v))
             | I64(v) if v >= 0 && v <= ::std::u32::MAX as i64 => Some(v as u32),
             Isize(Is32(v)) if v >= 0 => Some(v as u32),
+            Isize(Is16(v)) if v >= 0 => Some(v as u32),
             U8(v) => Some(v as u32),
             U16(v) => Some(v as u32),
             U32(v) => Some(v),
@@ -180,6 +190,7 @@ pub fn to_u32(&self) -> Option<u32> {
             | Usize(Us64(v))
             | U64(v) if v <= ::std::u32::MAX as u64 => Some(v as u32),
             Usize(Us32(v)) => Some(v),
+            Usize(Us16(v)) => Some(v as u32),
             _ => None,
         }
     }
@@ -193,12 +204,14 @@ pub fn to_u64(&self) -> Option<u64> {
             I16(v) if v >= 0 => Some(v as u64),
             I32(v) if v >= 0 => Some(v as u64),
             I64(v) if v >= 0 => Some(v as u64),
+            Isize(Is16(v)) if v >= 0 => Some(v as u64),
             Isize(Is32(v)) if v >= 0 => Some(v as u64),
             Isize(Is64(v)) if v >= 0 => Some(v as u64),
             U8(v) => Some(v as u64),
             U16(v) => Some(v as u64),
             U32(v) => Some(v as u64),
             U64(v) => Some(v),
+            Usize(Us16(v)) => Some(v as u64),
             Usize(Us32(v)) => Some(v as u64),
             Usize(Us64(v)) => Some(v),
             _ => None,
@@ -211,6 +224,7 @@ pub fn is_negative(&self) -> bool {
             I16(v) => v < 0,
             I32(v) => v < 0,
             I64(v) => v < 0,
+            Isize(Is16(v)) => v < 0,
             Isize(Is32(v)) => v < 0,
             Isize(Is64(v)) => v < 0,
             InferSigned(v) => v < 0,
@@ -225,12 +239,14 @@ pub fn try_cmp(self, rhs: Self) -> Result<::std::cmp::Ordering, ConstMathErr> {
             (I16(a), I16(b)) => Ok(a.cmp(&b)),
             (I32(a), I32(b)) => Ok(a.cmp(&b)),
             (I64(a), I64(b)) => Ok(a.cmp(&b)),
+            (Isize(Is16(a)), Isize(Is16(b))) => Ok(a.cmp(&b)),
             (Isize(Is32(a)), Isize(Is32(b))) => Ok(a.cmp(&b)),
             (Isize(Is64(a)), Isize(Is64(b))) => Ok(a.cmp(&b)),
             (U8(a), U8(b)) => Ok(a.cmp(&b)),
             (U16(a), U16(b)) => Ok(a.cmp(&b)),
             (U32(a), U32(b)) => Ok(a.cmp(&b)),
             (U64(a), U64(b)) => Ok(a.cmp(&b)),
+            (Usize(Us16(a)), Usize(Us16(b))) => Ok(a.cmp(&b)),
             (Usize(Us32(a)), Usize(Us32(b))) => Ok(a.cmp(&b)),
             (Usize(Us64(a)), Usize(Us64(b))) => Ok(a.cmp(&b)),
             (Infer(a), Infer(b)) => Ok(a.cmp(&b)),
@@ -249,12 +265,14 @@ macro_rules! add1 {
             ConstInt::I16(i) => ConstInt::I16(add1!(i)),
             ConstInt::I32(i) => ConstInt::I32(add1!(i)),
             ConstInt::I64(i) => ConstInt::I64(add1!(i)),
+            ConstInt::Isize(ConstIsize::Is16(i)) => ConstInt::Isize(ConstIsize::Is16(add1!(i))),
             ConstInt::Isize(ConstIsize::Is32(i)) => ConstInt::Isize(ConstIsize::Is32(add1!(i))),
             ConstInt::Isize(ConstIsize::Is64(i)) => ConstInt::Isize(ConstIsize::Is64(add1!(i))),
             ConstInt::U8(i) => ConstInt::U8(add1!(i)),
             ConstInt::U16(i) => ConstInt::U16(add1!(i)),
             ConstInt::U32(i) => ConstInt::U32(add1!(i)),
             ConstInt::U64(i) => ConstInt::U64(add1!(i)),
+            ConstInt::Usize(ConstUsize::Us16(i)) => ConstInt::Usize(ConstUsize::Us16(add1!(i))),
             ConstInt::Usize(ConstUsize::Us32(i)) => ConstInt::Usize(ConstUsize::Us32(add1!(i))),
             ConstInt::Usize(ConstUsize::Us64(i)) => ConstInt::Usize(ConstUsize::Us64(add1!(i))),
             ConstInt::Infer(_) | ConstInt::InferSigned(_) => panic!("no type info for const int"),
@@ -301,12 +319,14 @@ fn fmt(&self, fmt: &mut ::std::fmt::Formatter) -> Result<(), ::std::fmt::Error>
             I64(i) => write!(fmt, "{}i64", i),
             Isize(ConstIsize::Is64(i)) => write!(fmt, "{}isize", i),
             Isize(ConstIsize::Is32(i)) => write!(fmt, "{}isize", i),
+            Isize(ConstIsize::Is16(i)) => write!(fmt, "{}isize", i),
             U8(i) => write!(fmt, "{}u8", i),
             U16(i) => write!(fmt, "{}u16", i),
             U32(i) => write!(fmt, "{}u32", i),
             U64(i) => write!(fmt, "{}u64", i),
             Usize(ConstUsize::Us64(i)) => write!(fmt, "{}usize", i),
             Usize(ConstUsize::Us32(i)) => write!(fmt, "{}usize", i),
+            Usize(ConstUsize::Us16(i)) => write!(fmt, "{}usize", i),
         }
     }
 }
@@ -331,12 +351,14 @@ fn $func(self, rhs: Self) -> Result<Self, ConstMathErr> {
                     (I16(a), I16(b)) => a.$checked_func(b).map(I16),
                     (I32(a), I32(b)) => a.$checked_func(b).map(I32),
                     (I64(a), I64(b)) => a.$checked_func(b).map(I64),
+                    (Isize(Is16(a)), Isize(Is16(b))) => a.$checked_func(b).map(Is16).map(Isize),
                     (Isize(Is32(a)), Isize(Is32(b))) => a.$checked_func(b).map(Is32).map(Isize),
                     (Isize(Is64(a)), Isize(Is64(b))) => a.$checked_func(b).map(Is64).map(Isize),
                     (U8(a), U8(b)) => a.$checked_func(b).map(U8),
                     (U16(a), U16(b)) => a.$checked_func(b).map(U16),
                     (U32(a), U32(b)) => a.$checked_func(b).map(U32),
                     (U64(a), U64(b)) => a.$checked_func(b).map(U64),
+                    (Usize(Us16(a)), Usize(Us16(b))) => a.$checked_func(b).map(Us16).map(Usize),
                     (Usize(Us32(a)), Usize(Us32(b))) => a.$checked_func(b).map(Us32).map(Usize),
                     (Usize(Us64(a)), Usize(Us64(b))) => a.$checked_func(b).map(Us64).map(Usize),
                     (Infer(a), Infer(b)) => a.$checked_func(b).map(Infer),
@@ -358,12 +380,14 @@ fn $func(self, rhs: Self) -> Result<Self, ConstMathErr> {
                     (I16(a), I16(b)) => Ok(I16(a.$func(b))),
                     (I32(a), I32(b)) => Ok(I32(a.$func(b))),
                     (I64(a), I64(b)) => Ok(I64(a.$func(b))),
+                    (Isize(Is16(a)), Isize(Is16(b))) => Ok(Isize(Is16(a.$func(b)))),
                     (Isize(Is32(a)), Isize(Is32(b))) => Ok(Isize(Is32(a.$func(b)))),
                     (Isize(Is64(a)), Isize(Is64(b))) => Ok(Isize(Is64(a.$func(b)))),
                     (U8(a), U8(b)) => Ok(U8(a.$func(b))),
                     (U16(a), U16(b)) => Ok(U16(a.$func(b))),
                     (U32(a), U32(b)) => Ok(U32(a.$func(b))),
                     (U64(a), U64(b)) => Ok(U64(a.$func(b))),
+                    (Usize(Us16(a)), Usize(Us16(b))) => Ok(Usize(Us16(a.$func(b)))),
                     (Usize(Us32(a)), Usize(Us32(b))) => Ok(Usize(Us32(a.$func(b)))),
                     (Usize(Us64(a)), Usize(Us64(b))) => Ok(Usize(Us64(a.$func(b)))),
                     (Infer(a), Infer(b)) => Ok(Infer(a.$func(b))),
@@ -393,6 +417,7 @@ fn check_division(
         (I16(_), I16(0)) => Err(zerr),
         (I32(_), I32(0)) => Err(zerr),
         (I64(_), I64(0)) => Err(zerr),
+        (Isize(_), Isize(Is16(0))) => Err(zerr),
         (Isize(_), Isize(Is32(0))) => Err(zerr),
         (Isize(_), Isize(Is64(0))) => Err(zerr),
         (InferSigned(_), InferSigned(0)) => Err(zerr),
@@ -401,6 +426,7 @@ fn check_division(
         (U16(_), U16(0)) => Err(zerr),
         (U32(_), U32(0)) => Err(zerr),
         (U64(_), U64(0)) => Err(zerr),
+        (Usize(_), Usize(Us16(0))) => Err(zerr),
         (Usize(_), Usize(Us32(0))) => Err(zerr),
         (Usize(_), Usize(Us64(0))) => Err(zerr),
         (Infer(_), Infer(0)) => Err(zerr),
@@ -409,6 +435,7 @@ fn check_division(
         (I16(::std::i16::MIN), I16(-1)) => Err(Overflow(op)),
         (I32(::std::i32::MIN), I32(-1)) => Err(Overflow(op)),
         (I64(::std::i64::MIN), I64(-1)) => Err(Overflow(op)),
+        (Isize(Is16(::std::i16::MIN)), Isize(Is16(-1))) => Err(Overflow(op)),
         (Isize(Is32(::std::i32::MIN)), Isize(Is32(-1))) => Err(Overflow(op)),
         (Isize(Is64(::std::i64::MIN)), Isize(Is64(-1))) => Err(Overflow(op)),
         (InferSigned(::std::i64::MIN), InferSigned(-1)) => Err(Overflow(op)),
@@ -427,6 +454,7 @@ fn div(self, rhs: Self) -> Result<Self, ConstMathErr> {
             (I16(a), I16(b)) => Ok(I16(a/b)),
             (I32(a), I32(b)) => Ok(I32(a/b)),
             (I64(a), I64(b)) => Ok(I64(a/b)),
+            (Isize(Is16(a)), Isize(Is16(b))) => Ok(Isize(Is16(a/b))),
             (Isize(Is32(a)), Isize(Is32(b))) => Ok(Isize(Is32(a/b))),
             (Isize(Is64(a)), Isize(Is64(b))) => Ok(Isize(Is64(a/b))),
             (InferSigned(a), InferSigned(b)) => Ok(InferSigned(a/b)),
@@ -435,6 +463,7 @@ fn div(self, rhs: Self) -> Result<Self, ConstMathErr> {
             (U16(a), U16(b)) => Ok(U16(a/b)),
             (U32(a), U32(b)) => Ok(U32(a/b)),
             (U64(a), U64(b)) => Ok(U64(a/b)),
+            (Usize(Us16(a)), Usize(Us16(b))) => Ok(Usize(Us16(a/b))),
             (Usize(Us32(a)), Usize(Us32(b))) => Ok(Usize(Us32(a/b))),
             (Usize(Us64(a)), Usize(Us64(b))) => Ok(Usize(Us64(a/b))),
             (Infer(a), Infer(b)) => Ok(Infer(a/b)),
@@ -455,6 +484,7 @@ fn rem(self, rhs: Self) -> Result<Self, ConstMathErr> {
             (I16(a), I16(b)) => Ok(I16(a%b)),
             (I32(a), I32(b)) => Ok(I32(a%b)),
             (I64(a), I64(b)) => Ok(I64(a%b)),
+            (Isize(Is16(a)), Isize(Is16(b))) => Ok(Isize(Is16(a%b))),
             (Isize(Is32(a)), Isize(Is32(b))) => Ok(Isize(Is32(a%b))),
             (Isize(Is64(a)), Isize(Is64(b))) => Ok(Isize(Is64(a%b))),
             (InferSigned(a), InferSigned(b)) => Ok(InferSigned(a%b)),
@@ -463,6 +493,7 @@ fn rem(self, rhs: Self) -> Result<Self, ConstMathErr> {
             (U16(a), U16(b)) => Ok(U16(a%b)),
             (U32(a), U32(b)) => Ok(U32(a%b)),
             (U64(a), U64(b)) => Ok(U64(a%b)),
+            (Usize(Us16(a)), Usize(Us16(b))) => Ok(Usize(Us16(a%b))),
             (Usize(Us32(a)), Usize(Us32(b))) => Ok(Usize(Us32(a%b))),
             (Usize(Us64(a)), Usize(Us64(b))) => Ok(Usize(Us64(a%b))),
             (Infer(a), Infer(b)) => Ok(Infer(a%b)),
@@ -481,12 +512,14 @@ fn shl(self, rhs: Self) -> Result<Self, ConstMathErr> {
             I16(a) => Ok(I16(overflowing!(a.overflowing_shl(b), Op::Shl))),
             I32(a) => Ok(I32(overflowing!(a.overflowing_shl(b), Op::Shl))),
             I64(a) => Ok(I64(overflowing!(a.overflowing_shl(b), Op::Shl))),
+            Isize(Is16(a)) => Ok(Isize(Is16(overflowing!(a.overflowing_shl(b), Op::Shl)))),
             Isize(Is32(a)) => Ok(Isize(Is32(overflowing!(a.overflowing_shl(b), Op::Shl)))),
             Isize(Is64(a)) => Ok(Isize(Is64(overflowing!(a.overflowing_shl(b), Op::Shl)))),
             U8(a) => Ok(U8(overflowing!(a.overflowing_shl(b), Op::Shl))),
             U16(a) => Ok(U16(overflowing!(a.overflowing_shl(b), Op::Shl))),
             U32(a) => Ok(U32(overflowing!(a.overflowing_shl(b), Op::Shl))),
             U64(a) => Ok(U64(overflowing!(a.overflowing_shl(b), Op::Shl))),
+            Usize(Us16(a)) => Ok(Usize(Us16(overflowing!(a.overflowing_shl(b), Op::Shl)))),
             Usize(Us32(a)) => Ok(Usize(Us32(overflowing!(a.overflowing_shl(b), Op::Shl)))),
             Usize(Us64(a)) => Ok(Usize(Us64(overflowing!(a.overflowing_shl(b), Op::Shl)))),
             Infer(a) => Ok(Infer(overflowing!(a.overflowing_shl(b), Op::Shl))),
@@ -504,12 +537,14 @@ fn shr(self, rhs: Self) -> Result<Self, ConstMathErr> {
             I16(a) => Ok(I16(overflowing!(a.overflowing_shr(b), Op::Shr))),
             I32(a) => Ok(I32(overflowing!(a.overflowing_shr(b), Op::Shr))),
             I64(a) => Ok(I64(overflowing!(a.overflowing_shr(b), Op::Shr))),
+            Isize(Is16(a)) => Ok(Isize(Is16(overflowing!(a.overflowing_shr(b), Op::Shr)))),
             Isize(Is32(a)) => Ok(Isize(Is32(overflowing!(a.overflowing_shr(b), Op::Shr)))),
             Isize(Is64(a)) => Ok(Isize(Is64(overflowing!(a.overflowing_shr(b), Op::Shr)))),
             U8(a) => Ok(U8(overflowing!(a.overflowing_shr(b), Op::Shr))),
             U16(a) => Ok(U16(overflowing!(a.overflowing_shr(b), Op::Shr))),
             U32(a) => Ok(U32(overflowing!(a.overflowing_shr(b), Op::Shr))),
             U64(a) => Ok(U64(overflowing!(a.overflowing_shr(b), Op::Shr))),
+            Usize(Us16(a)) => Ok(Usize(Us16(overflowing!(a.overflowing_shr(b), Op::Shr)))),
             Usize(Us32(a)) => Ok(Usize(Us32(overflowing!(a.overflowing_shr(b), Op::Shr)))),
             Usize(Us64(a)) => Ok(Usize(Us64(overflowing!(a.overflowing_shr(b), Op::Shr)))),
             Infer(a) => Ok(Infer(overflowing!(a.overflowing_shr(b), Op::Shr))),
@@ -526,12 +561,14 @@ fn neg(self) -> Result<Self, ConstMathErr> {
             I16(a) => Ok(I16(overflowing!(a.overflowing_neg(), Op::Neg))),
             I32(a) => Ok(I32(overflowing!(a.overflowing_neg(), Op::Neg))),
             I64(a) => Ok(I64(overflowing!(a.overflowing_neg(), Op::Neg))),
+            Isize(Is16(a)) => Ok(Isize(Is16(overflowing!(a.overflowing_neg(), Op::Neg)))),
             Isize(Is32(a)) => Ok(Isize(Is32(overflowing!(a.overflowing_neg(), Op::Neg)))),
             Isize(Is64(a)) => Ok(Isize(Is64(overflowing!(a.overflowing_neg(), Op::Neg)))),
             U8(0) => Ok(U8(0)),
             U16(0) => Ok(U16(0)),
             U32(0) => Ok(U32(0)),
             U64(0) => Ok(U64(0)),
+            Usize(Us16(0)) => Ok(Usize(Us16(0))),
             Usize(Us32(0)) => Ok(Usize(Us32(0))),
             Usize(Us64(0)) => Ok(Usize(Us64(0))),
             U8(_) => Err(UnsignedNegation),
@@ -554,12 +591,14 @@ fn not(self) -> Result<Self, ConstMathErr> {
             I16(a) => Ok(I16(!a)),
             I32(a) => Ok(I32(!a)),
             I64(a) => Ok(I64(!a)),
+            Isize(Is16(a)) => Ok(Isize(Is16(!a))),
             Isize(Is32(a)) => Ok(Isize(Is32(!a))),
             Isize(Is64(a)) => Ok(Isize(Is64(!a))),
             U8(a) => Ok(U8(!a)),
             U16(a) => Ok(U16(!a)),
             U32(a) => Ok(U32(!a)),
             U64(a) => Ok(U64(!a)),
+            Usize(Us16(a)) => Ok(Usize(Us16(!a))),
             Usize(Us32(a)) => Ok(Usize(Us32(!a))),
             Usize(Us64(a)) => Ok(Usize(Us64(!a))),
             Infer(a) => Ok(Infer(!a)),
index 082c6510f8bc961308387571b40c2e2c71ffc5ce..4d2db355eb0258ec9b328c57a5c85afa1a4311f3 100644 (file)
@@ -15,6 +15,7 @@
 /// Anything else is an error. This invariant is checked at several locations
 #[derive(Copy, Clone, Debug, RustcEncodable, RustcDecodable, Hash, Eq, PartialEq)]
 pub enum ConstIsize {
+    Is16(i16),
     Is32(i32),
     Is64(i64),
 }
@@ -23,6 +24,7 @@ pub enum ConstIsize {
 impl ConstIsize {
     pub fn as_i64(self, target_int_ty: ast::IntTy) -> i64 {
         match (self, target_int_ty) {
+            (Is16(i), ast::IntTy::I16) => i as i64,
             (Is32(i), ast::IntTy::I32) => i as i64,
             (Is64(i), ast::IntTy::I64) => i,
             _ => panic!("got invalid isize size for target"),
@@ -30,6 +32,8 @@ pub fn as_i64(self, target_int_ty: ast::IntTy) -> i64 {
     }
     pub fn new(i: i64, target_int_ty: ast::IntTy) -> Result<Self, ConstMathErr> {
         match target_int_ty {
+            ast::IntTy::I16 if i as i16 as i64 == i => Ok(Is16(i as i16)),
+            ast::IntTy::I16 => Err(LitOutOfRange(ast::IntTy::Is)),
             ast::IntTy::I32 if i as i32 as i64 == i => Ok(Is32(i as i32)),
             ast::IntTy::I32 => Err(LitOutOfRange(ast::IntTy::Is)),
             ast::IntTy::I64 => Ok(Is64(i)),
index e5a7086d43663c1ba93b889c5891ee0b8228bac1..2b224d0646616c0f0d6ac23db4e9c5173d3c446f 100644 (file)
@@ -15,6 +15,7 @@
 /// Anything else is an error. This invariant is checked at several locations
 #[derive(Copy, Clone, Debug, RustcEncodable, RustcDecodable, Hash, Eq, PartialEq)]
 pub enum ConstUsize {
+    Us16(u16),
     Us32(u32),
     Us64(u64),
 }
@@ -23,6 +24,7 @@ pub enum ConstUsize {
 impl ConstUsize {
     pub fn as_u64(self, target_uint_ty: ast::UintTy) -> u64 {
         match (self, target_uint_ty) {
+            (Us16(i), ast::UintTy::U16) => i as u64,
             (Us32(i), ast::UintTy::U32) => i as u64,
             (Us64(i), ast::UintTy::U64) => i,
             _ => panic!("got invalid usize size for target"),
@@ -30,6 +32,8 @@ pub fn as_u64(self, target_uint_ty: ast::UintTy) -> u64 {
     }
     pub fn new(i: u64, target_uint_ty: ast::UintTy) -> Result<Self, ConstMathErr> {
         match target_uint_ty {
+            ast::UintTy::U16 if i as u16 as u64 == i => Ok(Us16(i as u16)),
+            ast::UintTy::U16 => Err(ULitOutOfRange(ast::UintTy::Us)),
             ast::UintTy::U32 if i as u32 as u64 == i => Ok(Us32(i as u32)),
             ast::UintTy::U32 => Err(ULitOutOfRange(ast::UintTy::Us)),
             ast::UintTy::U64 => Ok(Us64(i)),
index 926ee85230a311e0498b8eaa806d05420ae76bbb..00f797d1b9022640241af909f0248fdb6eacce11 100644 (file)
@@ -42,6 +42,7 @@
 pub mod graph;
 pub mod ivar;
 pub mod obligation_forest;
+pub mod snapshot_map;
 pub mod snapshot_vec;
 pub mod transitive_relation;
 pub mod unify;
diff --git a/src/librustc_data_structures/snapshot_map/mod.rs b/src/librustc_data_structures/snapshot_map/mod.rs
new file mode 100644 (file)
index 0000000..b398901
--- /dev/null
@@ -0,0 +1,138 @@
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use fnv::FnvHashMap;
+use std::hash::Hash;
+use std::ops;
+
+#[cfg(test)]
+mod test;
+
+pub struct SnapshotMap<K, V>
+    where K: Hash + Clone + Eq
+{
+    map: FnvHashMap<K, V>,
+    undo_log: Vec<UndoLog<K, V>>,
+}
+
+pub struct Snapshot {
+    len: usize
+}
+
+enum UndoLog<K, V> {
+    OpenSnapshot,
+    CommittedSnapshot,
+    Inserted(K),
+    Overwrite(K, V),
+}
+
+impl<K, V> SnapshotMap<K, V>
+    where K: Hash + Clone + Eq
+{
+    pub fn new() -> Self {
+        SnapshotMap {
+            map: FnvHashMap(),
+            undo_log: vec![]
+        }
+    }
+
+    pub fn insert(&mut self, key: K, value: V) -> bool {
+        match self.map.insert(key.clone(), value) {
+            None => {
+                if !self.undo_log.is_empty() {
+                    self.undo_log.push(UndoLog::Inserted(key));
+                }
+                true
+            }
+            Some(old_value) => {
+                if !self.undo_log.is_empty() {
+                    self.undo_log.push(UndoLog::Overwrite(key, old_value));
+                }
+                false
+            }
+        }
+    }
+
+    pub fn remove(&mut self, key: K) -> bool {
+        match self.map.remove(&key) {
+            Some(old_value) => {
+                if !self.undo_log.is_empty() {
+                    self.undo_log.push(UndoLog::Overwrite(key, old_value));
+                }
+                true
+            }
+            None => {
+                false
+            }
+        }
+    }
+
+    pub fn get(&self, key: &K) -> Option<&V> {
+        self.map.get(key)
+    }
+
+    pub fn snapshot(&mut self) -> Snapshot {
+        self.undo_log.push(UndoLog::OpenSnapshot);
+        let len = self.undo_log.len() - 1;
+        Snapshot { len: len }
+    }
+
+    fn assert_open_snapshot(&self, snapshot: &Snapshot) {
+        assert!(snapshot.len < self.undo_log.len());
+        assert!(match self.undo_log[snapshot.len] {
+            UndoLog::OpenSnapshot => true,
+            _ => false
+        });
+    }
+
+    pub fn commit(&mut self, snapshot: Snapshot) {
+        self.assert_open_snapshot(&snapshot);
+        if snapshot.len == 0 {
+            // The root snapshot.
+            self.undo_log.truncate(0);
+        } else {
+            self.undo_log[snapshot.len] = UndoLog::CommittedSnapshot;
+        }
+    }
+
+    pub fn rollback_to(&mut self, snapshot: Snapshot) {
+        self.assert_open_snapshot(&snapshot);
+        while self.undo_log.len() > snapshot.len + 1 {
+            match self.undo_log.pop().unwrap() {
+                UndoLog::OpenSnapshot => {
+                    panic!("cannot rollback an uncommitted snapshot");
+                }
+
+                UndoLog::CommittedSnapshot => { }
+
+                UndoLog::Inserted(key) => {
+                    self.map.remove(&key);
+                }
+
+                UndoLog::Overwrite(key, old_value) => {
+                    self.map.insert(key, old_value);
+                }
+            }
+        }
+
+        let v = self.undo_log.pop().unwrap();
+        assert!(match v { UndoLog::OpenSnapshot => true, _ => false });
+        assert!(self.undo_log.len() == snapshot.len);
+    }
+}
+
+impl<'k, K, V> ops::Index<&'k K> for SnapshotMap<K, V>
+    where K: Hash + Clone + Eq
+{
+    type Output = V;
+    fn index(&self, key: &'k K) -> &V {
+        &self.map[key]
+    }
+}
diff --git a/src/librustc_data_structures/snapshot_map/test.rs b/src/librustc_data_structures/snapshot_map/test.rs
new file mode 100644 (file)
index 0000000..4114082
--- /dev/null
@@ -0,0 +1,50 @@
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use super::SnapshotMap;
+
+#[test]
+fn basic() {
+    let mut map = SnapshotMap::new();
+    map.insert(22, "twenty-two");
+    let snapshot = map.snapshot();
+    map.insert(22, "thirty-three");
+    assert_eq!(map[&22], "thirty-three");
+    map.insert(44, "fourty-four");
+    assert_eq!(map[&44], "fourty-four");
+    assert_eq!(map.get(&33), None);
+    map.rollback_to(snapshot);
+    assert_eq!(map[&22], "twenty-two");
+    assert_eq!(map.get(&33), None);
+    assert_eq!(map.get(&44), None);
+}
+
+#[test]
+#[should_panic]
+fn out_of_order() {
+    let mut map = SnapshotMap::new();
+    map.insert(22, "twenty-two");
+    let snapshot1 = map.snapshot();
+    let _snapshot2 = map.snapshot();
+    map.rollback_to(snapshot1);
+}
+
+#[test]
+fn nested_commit_then_rollback() {
+    let mut map = SnapshotMap::new();
+    map.insert(22, "twenty-two");
+    let snapshot1 = map.snapshot();
+    let snapshot2 = map.snapshot();
+    map.insert(22, "thirty-three");
+    map.commit(snapshot2);
+    assert_eq!(map[&22], "thirty-three");
+    map.rollback_to(snapshot1);
+    assert_eq!(map[&22], "twenty-two");
+}
index 911626bd2c2cc8391d797ca9079b8e420061e73e..96a1bdf62dea7f5c9e081cc200da18cf14fb8f62 100644 (file)
@@ -94,15 +94,15 @@ macro_rules! controller_entry_point {
     // large chunks of memory alive and we want to free them as soon as
     // possible to keep the peak memory usage low
     let (outputs, trans) = {
-        let (outputs, expanded_crate, id) = {
-            let krate = match phase_1_parse_input(sess, cfg, input) {
-                Ok(krate) => krate,
-                Err(mut parse_error) => {
-                    parse_error.emit();
-                    return Err(1);
-                }
-            };
+        let krate = match phase_1_parse_input(sess, cfg, input) {
+            Ok(krate) => krate,
+            Err(mut parse_error) => {
+                parse_error.emit();
+                return Err(1);
+            }
+        };
 
+        let krate = {
             let mut compile_state = CompileState::state_after_parse(input,
                                                                     sess,
                                                                     outdir,
@@ -113,17 +113,15 @@ macro_rules! controller_entry_point {
                                     sess,
                                     compile_state,
                                     Ok(()));
-            let krate = compile_state.krate.unwrap();
 
-            let outputs = build_output_filenames(input, outdir, output, &krate.attrs, sess);
-            let id = link::find_crate_name(Some(sess), &krate.attrs, input);
-            let expanded_crate = phase_2_configure_and_expand(sess,
-                                                              &cstore,
-                                                              krate,
-                                                              &id,
-                                                              addl_plugins)?;
+            compile_state.krate.unwrap()
+        };
 
-            (outputs, expanded_crate, id)
+        let outputs = build_output_filenames(input, outdir, output, &krate.attrs, sess);
+        let id = link::find_crate_name(Some(sess), &krate.attrs, input);
+        let ExpansionResult { expanded_crate, defs, analysis, resolutions, mut hir_forest } = {
+            let make_glob_map = control.make_glob_map;
+            phase_2_configure_and_expand(sess, &cstore, krate, &id, addl_plugins, make_glob_map)?
         };
 
         controller_entry_point!(after_expand,
@@ -150,42 +148,12 @@ macro_rules! controller_entry_point {
                                                                      &id),
                                 Ok(()));
 
-        let expanded_crate = assign_node_ids(sess, expanded_crate);
-
-        // Collect defintions for def ids.
-        let mut defs = time(sess.time_passes(),
-                            "collecting defs",
-                            || hir_map::collect_definitions(&expanded_crate));
-
-        time(sess.time_passes(),
-             "external crate/lib resolution",
-             || read_local_crates(sess, &cstore, &defs, &expanded_crate, &id, &sess.dep_graph));
-
-        time(sess.time_passes(),
-             "early lint checks",
-             || lint::check_ast_crate(sess, &expanded_crate));
-
-        time(sess.time_passes(),
-             "AST validation",
-             || ast_validation::check_crate(sess, &expanded_crate));
-
-        let (analysis, resolutions, mut hir_forest) = {
-            lower_and_resolve(sess, &id, &mut defs, &expanded_crate,
-                              &sess.dep_graph, control.make_glob_map)
-        };
-
-        // Discard MTWT tables that aren't required past lowering to HIR.
-        if !keep_mtwt_tables(sess) {
-            syntax::ext::mtwt::clear_tables();
-        }
-
         let arenas = ty::CtxtArenas::new();
 
         // Construct the HIR map
-        let hir_forest = &mut hir_forest;
         let hir_map = time(sess.time_passes(),
                            "indexing hir",
-                           move || hir_map::map_crate(hir_forest, defs));
+                           || hir_map::map_crate(&mut hir_forest, defs));
 
         {
             let _ignore = hir_map.dep_graph.in_ignore();
@@ -577,19 +545,28 @@ fn count_nodes(krate: &ast::Crate) -> usize {
 // For continuing compilation after a parsed crate has been
 // modified
 
+pub struct ExpansionResult<'a> {
+    pub expanded_crate: ast::Crate,
+    pub defs: hir_map::Definitions,
+    pub analysis: ty::CrateAnalysis<'a>,
+    pub resolutions: Resolutions,
+    pub hir_forest: hir_map::Forest,
+}
+
 /// Run the "early phases" of the compiler: initial `cfg` processing,
 /// loading compiler plugins (including those from `addl_plugins`),
 /// syntax expansion, secondary `cfg` expansion, synthesis of a test
-/// harness if one is to be provided and injection of a dependency on the
-/// standard library and prelude.
+/// harness if one is to be provided, injection of a dependency on the
+/// standard library and prelude, and name resolution.
 ///
 /// Returns `None` if we're aborting after handling -W help.
-pub fn phase_2_configure_and_expand(sess: &Session,
-                                    cstore: &CStore,
-                                    mut krate: ast::Crate,
-                                    crate_name: &str,
-                                    addl_plugins: Option<Vec<String>>)
-                                    -> Result<ast::Crate, usize> {
+pub fn phase_2_configure_and_expand<'a>(sess: &Session,
+                                        cstore: &CStore,
+                                        mut krate: ast::Crate,
+                                        crate_name: &'a str,
+                                        addl_plugins: Option<Vec<String>>,
+                                        make_glob_map: resolve::MakeGlobMap)
+                                        -> Result<ExpansionResult<'a>, usize> {
     let time_passes = sess.time_passes();
 
     // strip before anything else because crate metadata may use #[cfg_attr]
@@ -747,10 +724,6 @@ pub fn phase_2_configure_and_expand(sess: &Session,
                  "prelude injection",
                  || syntax::std_inject::maybe_inject_prelude(&sess.parse_sess, krate));
 
-    time(time_passes,
-         "checking that all macro invocations are gone",
-         || syntax::ext::expand::check_for_macros(&sess.parse_sess, &krate));
-
     time(time_passes,
          "checking for inline asm in case the target doesn't support it",
          || no_asm::check_crate(sess, &krate));
@@ -771,7 +744,39 @@ pub fn phase_2_configure_and_expand(sess: &Session,
         println!("Post-expansion node count: {}", count_nodes(&krate));
     }
 
-    Ok(krate)
+    krate = assign_node_ids(sess, krate);
+
+    // Collect defintions for def ids.
+    let mut defs =
+        time(sess.time_passes(), "collecting defs", || hir_map::collect_definitions(&krate));
+
+    time(sess.time_passes(),
+         "external crate/lib resolution",
+         || read_local_crates(sess, &cstore, &defs, &krate, crate_name, &sess.dep_graph));
+
+    time(sess.time_passes(),
+         "early lint checks",
+         || lint::check_ast_crate(sess, &krate));
+
+    time(sess.time_passes(),
+         "AST validation",
+         || ast_validation::check_crate(sess, &krate));
+
+    let (analysis, resolutions, hir_forest) =
+        lower_and_resolve(sess, crate_name, &mut defs, &krate, &sess.dep_graph, make_glob_map);
+
+    // Discard MTWT tables that aren't required past lowering to HIR.
+    if !keep_mtwt_tables(sess) {
+        syntax::ext::mtwt::clear_tables();
+    }
+
+    Ok(ExpansionResult {
+        expanded_crate: krate,
+        defs: defs,
+        analysis: analysis,
+        resolutions: resolutions,
+        hir_forest: hir_forest
+    })
 }
 
 pub fn assign_node_ids(sess: &Session, krate: ast::Crate) -> ast::Crate {
index 6423d94e6f6fb703baba0b6da4948431d122ea82..c147a534e5ea72632f9e2da866d6daa638e4ea29 100644 (file)
@@ -26,7 +26,6 @@
 use rustc::ty::{self, Ty, TyCtxt, TypeFoldable};
 use rustc::infer::{self, InferOk, InferResult, TypeOrigin};
 use rustc_metadata::cstore::CStore;
-use rustc_metadata::creader::read_local_crates;
 use rustc::hir::map as hir_map;
 use rustc::session::{self, config};
 use std::rc::Rc;
@@ -116,19 +115,11 @@ fn test_env<F>(source_string: &str,
         input: source_string.to_string(),
     };
     let krate = driver::phase_1_parse_input(&sess, krate_config, &input).unwrap();
-    let krate = driver::phase_2_configure_and_expand(&sess, &cstore, krate, "test", None)
-                    .expect("phase 2 aborted");
-
-    let krate = driver::assign_node_ids(&sess, krate);
-    let mut defs = hir_map::collect_definitions(&krate);
-    read_local_crates(&sess, &cstore, &defs, &krate, "test_crate", &dep_graph);
+    let driver::ExpansionResult { defs, resolutions, mut hir_forest, .. } =
+        driver::phase_2_configure_and_expand(&sess, &cstore, krate, "test", None, MakeGlobMap::No)
+            .expect("phase 2 aborted");
     let _ignore = dep_graph.in_ignore();
 
-    let (_, resolutions, mut hir_forest) = {
-        driver::lower_and_resolve(&sess, "test-crate", &mut defs, &krate,
-                                  &sess.dep_graph, MakeGlobMap::No)
-    };
-
     let arenas = ty::CtxtArenas::new();
     let ast_map = hir_map::map_crate(&mut hir_forest, defs);
 
index 9dc50a6306406d8684afcab039c98331e75f4417..1c0274cdcca9a68bf19d5cf8c431b8e0a64fca4e 100644 (file)
@@ -195,7 +195,7 @@ fn check_paths<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
             }
         };
 
-        for &(_, source_def_id, source_dep_node) in sources {
+        for &(_, source_def_id, ref source_dep_node) in sources {
             let dependents = query.transitive_successors(source_dep_node);
             for &(target_span, ref target_pass, _, ref target_dep_node) in targets {
                 if !dependents.contains(&target_dep_node) {
@@ -239,7 +239,7 @@ fn dump_graph(tcx: TyCtxt) {
     { // dump a .txt file with just the edges:
         let txt_path = format!("{}.txt", path);
         let mut file = File::create(&txt_path).unwrap();
-        for &(source, target) in &edges {
+        for &(ref source, ref target) in &edges {
             write!(file, "{:?} -> {:?}\n", source, target).unwrap();
         }
     }
@@ -252,34 +252,34 @@ fn dump_graph(tcx: TyCtxt) {
     }
 }
 
-pub struct GraphvizDepGraph(FnvHashSet<DepNode<DefId>>,
-                            Vec<(DepNode<DefId>, DepNode<DefId>)>);
+pub struct GraphvizDepGraph<'q>(FnvHashSet<&'q DepNode<DefId>>,
+                                Vec<(&'q DepNode<DefId>, &'q DepNode<DefId>)>);
 
-impl<'a, 'tcx> dot::GraphWalk<'a> for GraphvizDepGraph {
-    type Node = DepNode<DefId>;
-    type Edge = (DepNode<DefId>, DepNode<DefId>);
-    fn nodes(&self) -> dot::Nodes<DepNode<DefId>> {
+impl<'a, 'tcx, 'q> dot::GraphWalk<'a> for GraphvizDepGraph<'q> {
+    type Node = &'q DepNode<DefId>;
+    type Edge = (&'q DepNode<DefId>, &'q DepNode<DefId>);
+    fn nodes(&self) -> dot::Nodes<&'q DepNode<DefId>> {
         let nodes: Vec<_> = self.0.iter().cloned().collect();
         nodes.into_cow()
     }
-    fn edges(&self) -> dot::Edges<(DepNode<DefId>, DepNode<DefId>)> {
+    fn edges(&self) -> dot::Edges<(&'q DepNode<DefId>, &'q DepNode<DefId>)> {
         self.1[..].into_cow()
     }
-    fn source(&self, edge: &(DepNode<DefId>, DepNode<DefId>)) -> DepNode<DefId> {
+    fn source(&self, edge: &(&'q DepNode<DefId>, &'q DepNode<DefId>)) -> &'q DepNode<DefId> {
         edge.0
     }
-    fn target(&self, edge: &(DepNode<DefId>, DepNode<DefId>)) -> DepNode<DefId> {
+    fn target(&self, edge: &(&'q DepNode<DefId>, &'q DepNode<DefId>)) -> &'q DepNode<DefId> {
         edge.1
     }
 }
 
-impl<'a, 'tcx> dot::Labeller<'a> for GraphvizDepGraph {
-    type Node = DepNode<DefId>;
-    type Edge = (DepNode<DefId>, DepNode<DefId>);
+impl<'a, 'tcx, 'q> dot::Labeller<'a> for GraphvizDepGraph<'q> {
+    type Node = &'q DepNode<DefId>;
+    type Edge = (&'q DepNode<DefId>, &'q DepNode<DefId>);
     fn graph_id(&self) -> dot::Id {
         dot::Id::new("DependencyGraph").unwrap()
     }
-    fn node_id(&self, n: &DepNode<DefId>) -> dot::Id {
+    fn node_id(&self, n: &&'q DepNode<DefId>) -> dot::Id {
         let s: String =
             format!("{:?}", n).chars()
                               .map(|c| if c == '_' || c.is_alphanumeric() { c } else { '_' })
@@ -287,7 +287,7 @@ fn node_id(&self, n: &DepNode<DefId>) -> dot::Id {
         debug!("n={:?} s={:?}", n, s);
         dot::Id::new(s).unwrap()
     }
-    fn node_label(&self, n: &DepNode<DefId>) -> dot::LabelText {
+    fn node_label(&self, n: &&'q DepNode<DefId>) -> dot::LabelText {
         dot::LabelText::label(format!("{:?}", n))
     }
 }
@@ -295,8 +295,8 @@ fn node_label(&self, n: &DepNode<DefId>) -> dot::LabelText {
 // Given an optional filter like `"x,y,z"`, returns either `None` (no
 // filter) or the set of nodes whose labels contain all of those
 // substrings.
-fn node_set(query: &DepGraphQuery<DefId>, filter: &DepNodeFilter)
-            -> Option<FnvHashSet<DepNode<DefId>>>
+fn node_set<'q>(query: &'q DepGraphQuery<DefId>, filter: &DepNodeFilter)
+                -> Option<FnvHashSet<&'q DepNode<DefId>>>
 {
     debug!("node_set(filter={:?})", filter);
 
@@ -307,10 +307,10 @@ fn node_set(query: &DepGraphQuery<DefId>, filter: &DepNodeFilter)
     Some(query.nodes().into_iter().filter(|n| filter.test(n)).collect())
 }
 
-fn filter_nodes(query: &DepGraphQuery<DefId>,
-                sources: &Option<FnvHashSet<DepNode<DefId>>>,
-                targets: &Option<FnvHashSet<DepNode<DefId>>>)
-                -> FnvHashSet<DepNode<DefId>>
+fn filter_nodes<'q>(query: &'q DepGraphQuery<DefId>,
+                    sources: &Option<FnvHashSet<&'q DepNode<DefId>>>,
+                    targets: &Option<FnvHashSet<&'q DepNode<DefId>>>)
+                    -> FnvHashSet<&'q DepNode<DefId>>
 {
     if let &Some(ref sources) = sources {
         if let &Some(ref targets) = targets {
@@ -325,21 +325,21 @@ fn filter_nodes(query: &DepGraphQuery<DefId>,
     }
 }
 
-fn walk_nodes(query: &DepGraphQuery<DefId>,
-              starts: &FnvHashSet<DepNode<DefId>>,
-              direction: Direction)
-              -> FnvHashSet<DepNode<DefId>>
+fn walk_nodes<'q>(query: &'q DepGraphQuery<DefId>,
+                  starts: &FnvHashSet<&'q DepNode<DefId>>,
+                  direction: Direction)
+                  -> FnvHashSet<&'q DepNode<DefId>>
 {
     let mut set = FnvHashSet();
-    for start in starts {
+    for &start in starts {
         debug!("walk_nodes: start={:?} outgoing?={:?}", start, direction == OUTGOING);
-        if set.insert(*start) {
+        if set.insert(start) {
             let mut stack = vec![query.indices[start]];
             while let Some(index) = stack.pop() {
                 for (_, edge) in query.graph.adjacent_edges(index, direction) {
                     let neighbor_index = edge.source_or_target(direction);
                     let neighbor = query.graph.node_data(neighbor_index);
-                    if set.insert(*neighbor) {
+                    if set.insert(neighbor) {
                         stack.push(neighbor_index);
                     }
                 }
@@ -349,10 +349,10 @@ fn walk_nodes(query: &DepGraphQuery<DefId>,
     set
 }
 
-fn walk_between(query: &DepGraphQuery<DefId>,
-                sources: &FnvHashSet<DepNode<DefId>>,
-                targets: &FnvHashSet<DepNode<DefId>>)
-                -> FnvHashSet<DepNode<DefId>>
+fn walk_between<'q>(query: &'q DepGraphQuery<DefId>,
+                    sources: &FnvHashSet<&'q DepNode<DefId>>,
+                    targets: &FnvHashSet<&'q DepNode<DefId>>)
+                    -> FnvHashSet<&'q DepNode<DefId>>
 {
     // This is a bit tricky. We want to include a node only if it is:
     // (a) reachable from a source and (b) will reach a target. And we
@@ -365,16 +365,16 @@ enum State { Undecided, Deciding, Included, Excluded }
     let mut node_states = vec![State::Undecided; query.graph.len_nodes()];
 
     for &target in targets {
-        node_states[query.indices[&target].0] = State::Included;
+        node_states[query.indices[target].0] = State::Included;
     }
 
-    for source in sources.iter().map(|n| query.indices[n]) {
+    for source in sources.iter().map(|&n| query.indices[n]) {
         recurse(query, &mut node_states, source);
     }
 
     return query.nodes()
                 .into_iter()
-                .filter(|n| {
+                .filter(|&n| {
                     let index = query.indices[n];
                     node_states[index.0] == State::Included
                 })
@@ -417,12 +417,12 @@ fn recurse(query: &DepGraphQuery<DefId>,
     }
 }
 
-fn filter_edges(query: &DepGraphQuery<DefId>,
-                nodes: &FnvHashSet<DepNode<DefId>>)
-                -> Vec<(DepNode<DefId>, DepNode<DefId>)>
+fn filter_edges<'q>(query: &'q DepGraphQuery<DefId>,
+                    nodes: &FnvHashSet<&'q DepNode<DefId>>)
+                    -> Vec<(&'q DepNode<DefId>, &'q DepNode<DefId>)>
 {
     query.edges()
          .into_iter()
-         .filter(|&(source, target)| nodes.contains(&source) && nodes.contains(&target))
+         .filter(|&(source, target)| nodes.contains(source) && nodes.contains(target))
          .collect()
 }
index f9e90f393219dcc7d6462d16628a55d576e46004..2fd6973909a8ec59c33a146658b60913049be8a2 100644 (file)
@@ -57,7 +57,7 @@ pub fn def_id(&self, index: DefPathIndex) -> Option<DefId> {
         self.ids[index.index as usize]
     }
 
-    pub fn map(&self, node: DepNode<DefPathIndex>) -> Option<DepNode<DefId>> {
+    pub fn map(&self, node: &DepNode<DefPathIndex>) -> Option<DepNode<DefId>> {
         node.map_def(|&index| self.def_id(index))
     }
 }
@@ -91,7 +91,7 @@ pub fn add(&mut self, def_id: DefId) -> DefPathIndex {
                  .clone()
     }
 
-    pub fn map(&mut self, node: DepNode<DefId>) -> DepNode<DefPathIndex> {
+    pub fn map(&mut self, node: &DepNode<DefId>) -> DepNode<DefPathIndex> {
         node.map_def(|&def_id| Some(self.add(def_id))).unwrap()
     }
 
index b729f25b873d4f05ec8df2b45e7239c4cb02fa7f..99119dd184c8b3929045d5e75afa08134e3a776a 100644 (file)
@@ -39,8 +39,8 @@ pub fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Self {
         }
     }
 
-    pub fn hash(&mut self, dep_node: DepNode<DefId>) -> Option<u64> {
-        match dep_node {
+    pub fn hash(&mut self, dep_node: &DepNode<DefId>) -> Option<u64> {
+        match *dep_node {
             // HIR nodes (which always come from our crate) are an input:
             DepNode::Hir(def_id) => {
                 assert!(def_id.is_local());
index e3fd290443c11bf38ceb002c0bb23d2400b0f748..0ac1018462ee7a7eb06a2e6cc2b75a01eacda4db 100644 (file)
@@ -114,15 +114,15 @@ pub fn decode_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
     let clean_nodes =
         serialized_dep_graph.nodes
                             .iter()
-                            .filter_map(|&node| retraced.map(node))
+                            .filter_map(|node| retraced.map(node))
                             .filter(|node| !dirty_nodes.contains(node))
-                            .map(|node| (node, node));
+                            .map(|node| (node.clone(), node));
 
     // Add nodes and edges that are not dirty into our main graph.
     let dep_graph = tcx.dep_graph.clone();
     for (source, target) in clean_edges.into_iter().chain(clean_nodes) {
-        let _task = dep_graph.in_task(target);
-        dep_graph.read(source);
+        let _task = dep_graph.in_task(target.clone());
+        dep_graph.read(source.clone());
 
         debug!("decode_dep_graph: clean edge: {:?} -> {:?}", source, target);
     }
@@ -140,7 +140,7 @@ fn initial_dirty_nodes<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
     for hash in hashes {
         match hash.node.map_def(|&i| retraced.def_id(i)) {
             Some(dep_node) => {
-                let current_hash = hcx.hash(dep_node).unwrap();
+                let current_hash = hcx.hash(&dep_node).unwrap();
                 debug!("initial_dirty_nodes: hash of {:?} is {:?}, was {:?}",
                        dep_node, current_hash, hash.hash);
                 if current_hash != hash.hash {
@@ -171,7 +171,7 @@ fn compute_clean_edges(serialized_edges: &[(SerializedEdge)],
     // target) if neither node has been removed. If the source has
     // been removed, add target to the list of dirty nodes.
     let mut clean_edges = Vec::with_capacity(serialized_edges.len());
-    for &(serialized_source, serialized_target) in serialized_edges {
+    for &(ref serialized_source, ref serialized_target) in serialized_edges {
         if let Some(target) = retraced.map(serialized_target) {
             if let Some(source) = retraced.map(serialized_source) {
                 clean_edges.push((source, target))
index 7deb1ca36dbded0451760ab5e15ac870434bbcc7..99f4d4f3072989b6b74a4c561bd9f32f086a0871 100644 (file)
@@ -99,7 +99,7 @@ pub fn encode_dep_graph<'a, 'tcx>(hcx: &mut HashContext<'a, 'tcx>,
         query.nodes()
              .into_iter()
              .filter_map(|dep_node| {
-                 hcx.hash(dep_node)
+                 hcx.hash(&dep_node)
                     .map(|hash| {
                         let node = builder.map(dep_node);
                         SerializedHash { node: node, hash: hash }
@@ -147,7 +147,7 @@ pub fn encode_metadata_hashes<'a, 'tcx>(hcx: &mut HashContext<'a, 'tcx>,
         let meta_data_def_ids =
             query.nodes()
                  .into_iter()
-                 .filter_map(|dep_node| match dep_node {
+                 .filter_map(|dep_node| match *dep_node {
                      DepNode::MetaData(def_id) if def_id.is_local() => Some(def_id),
                      _ => None,
                  });
@@ -165,8 +165,8 @@ pub fn encode_metadata_hashes<'a, 'tcx>(hcx: &mut HashContext<'a, 'tcx>,
                 let dep_node = DepNode::MetaData(def_id);
                 let mut state = SipHasher::new();
                 debug!("save: computing metadata hash for {:?}", dep_node);
-                for node in query.transitive_predecessors(dep_node) {
-                    if let Some(hash) = hcx.hash(node) {
+                for node in query.transitive_predecessors(&dep_node) {
+                    if let Some(hash) = hcx.hash(&node) {
                         debug!("save: predecessor {:?} has hash {}", node, hash);
                         state.write_u64(hash.to_le());
                     } else {
index d1153fe2d06037e6a9cbd17e17afa869011be365..1131e409aa05048d7a9770408d8f9d07831c88c6 100644 (file)
@@ -1662,31 +1662,12 @@ fn doc_generics<'a, 'tcx>(base_doc: rbml::Doc,
     }
 
     let mut regions = subst::VecPerParamSpace::empty();
-    for rp_doc in reader::tagged_docs(doc, tag_region_param_def) {
-        let ident_str_doc = reader::get_doc(rp_doc,
-                                            tag_region_param_def_ident);
-        let name = item_name(&token::get_ident_interner(), ident_str_doc);
-        let def_id_doc = reader::get_doc(rp_doc,
-                                         tag_region_param_def_def_id);
-        let def_id = translated_def_id(cdata, def_id_doc);
-
-        let doc = reader::get_doc(rp_doc, tag_region_param_def_space);
-        let space = subst::ParamSpace::from_uint(reader::doc_as_u64(doc) as usize);
-
-        let doc = reader::get_doc(rp_doc, tag_region_param_def_index);
-        let index = reader::doc_as_u64(doc) as u32;
-
-        let bounds = reader::tagged_docs(rp_doc, tag_items_data_region).map(|p| {
+    for p in reader::tagged_docs(doc, tag_region_param_def) {
+        let bd =
             TyDecoder::with_doc(tcx, cdata.cnum, p,
                                 &mut |did| translate_def_id(cdata, did))
-            .parse_region()
-        }).collect();
-
-        regions.push(space, ty::RegionParameterDef { name: name,
-                                                     def_id: def_id,
-                                                     space: space,
-                                                     index: index,
-                                                     bounds: bounds });
+            .parse_region_param_def();
+        regions.push(bd.space, bd);
     }
 
     ty::Generics { types: types, regions: regions }
index e0c35a6fba84f3d253792f874918c57e65dbb27f..b14be158197e8ab379e45bcbf679dfe59e1d6536 100644 (file)
@@ -203,15 +203,6 @@ fn encode_type<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>,
     rbml_w.end_tag();
 }
 
-fn encode_region(ecx: &EncodeContext,
-                 rbml_w: &mut Encoder,
-                 r: ty::Region) {
-    rbml_w.start_tag(tag_items_data_region);
-    tyencode::enc_region(rbml_w.writer, &ecx.ty_str_ctxt(), r);
-    rbml_w.mark_stable_position();
-    rbml_w.end_tag();
-}
-
 fn encode_disr_val(_: &EncodeContext,
                    rbml_w: &mut Encoder,
                    disr_val: ty::Disr) {
@@ -535,24 +526,8 @@ fn encode_generics<'a, 'tcx>(rbml_w: &mut Encoder,
     // Region parameters
     for param in &generics.regions {
         rbml_w.start_tag(tag_region_param_def);
-
-        rbml_w.start_tag(tag_region_param_def_ident);
-        encode_name(rbml_w, param.name);
-        rbml_w.end_tag();
-
-        rbml_w.wr_tagged_u64(tag_region_param_def_def_id,
-                             def_to_u64(param.def_id));
-
-        rbml_w.wr_tagged_u64(tag_region_param_def_space,
-                             param.space.to_uint() as u64);
-
-        rbml_w.wr_tagged_u64(tag_region_param_def_index,
-                             param.index as u64);
-
-        for &bound_region in &param.bounds {
-            encode_region(ecx, rbml_w, bound_region);
-        }
-
+        tyencode::enc_region_param_def(rbml_w.writer, &ecx.ty_str_ctxt(), param);
+        rbml_w.mark_stable_position();
         rbml_w.end_tag();
     }
 
index c94af9c5b3ae318d64332c35cc05650a275d3d45..797af8964a14da196088b5a6acc0f8eff4cea786 100644 (file)
@@ -158,8 +158,21 @@ fn parse_bound_region(&mut self) -> ty::BoundRegion {
             }
             '[' => {
                 let def = self.parse_def();
-                let name = token::intern(&self.parse_str(']'));
-                ty::BrNamed(def, name)
+                let name = token::intern(&self.parse_str('|'));
+                let issue32330 = match self.next() {
+                    'n' => {
+                        assert_eq!(self.next(), ']');
+                        ty::Issue32330::WontChange
+                    }
+                    'y' => {
+                        ty::Issue32330::WillChange {
+                            fn_def_id: self.parse_def(),
+                            region_name: token::intern(&self.parse_str(']')),
+                        }
+                    }
+                    c => panic!("expected n or y not {}", c)
+                };
+                ty::BrNamed(def, name, issue32330)
             }
             'f' => {
                 let id = self.parse_u32();
@@ -623,7 +636,7 @@ pub fn parse_region_param_def(&mut self) -> ty::RegionParameterDef {
             def_id: def_id,
             space: space,
             index: index,
-            bounds: bounds
+            bounds: bounds,
         }
     }
 
index 343c452f89165aa3a12085cd3ff7e768cec7ca6e..87a2e50bb25e9b4e446778b56faef139f89844cd 100644 (file)
@@ -308,10 +308,17 @@ fn enc_bound_region(w: &mut Cursor<Vec<u8>>, cx: &ctxt, br: ty::BoundRegion) {
         ty::BrAnon(idx) => {
             write!(w, "a{}|", idx);
         }
-        ty::BrNamed(d, name) => {
-            write!(w, "[{}|{}]",
-                     (cx.ds)(cx.tcx, d),
-                     name);
+        ty::BrNamed(d, name, issue32330) => {
+            write!(w, "[{}|{}|",
+                   (cx.ds)(cx.tcx, d),
+                   name);
+
+            match issue32330 {
+                ty::Issue32330::WontChange =>
+                    write!(w, "n]"),
+                ty::Issue32330::WillChange { fn_def_id, region_name } =>
+                    write!(w, "y{}|{}]", (cx.ds)(cx.tcx, fn_def_id), region_name),
+            };
         }
         ty::BrFresh(id) => {
             write!(w, "f{}|", id);
index e53584a3f8b11ee42b733572caaa30f1b1da8631..79656ea21f755bacbbf0debd5ba1d225962c56ad 100644 (file)
@@ -162,21 +162,53 @@ pub fn perform_test(&mut self,
             }
 
             TestKind::SwitchInt { switch_ty, ref options, indices: _ } => {
-                let otherwise = self.cfg.start_new_block();
-                let targets: Vec<_> =
-                    options.iter()
-                           .map(|_| self.cfg.start_new_block())
-                           .chain(Some(otherwise))
-                           .collect();
+                let (targets, term) = match switch_ty.sty {
+                    // If we're matching on boolean we can
+                    // use the If TerminatorKind instead
+                    ty::TyBool => {
+                        assert!(options.len() > 0 && options.len() <= 2);
+
+                        let (true_bb, else_bb) =
+                            (self.cfg.start_new_block(),
+                             self.cfg.start_new_block());
+
+                        let targets = match &options[0] {
+                            &ConstVal::Bool(true) => vec![true_bb, else_bb],
+                            &ConstVal::Bool(false) => vec![else_bb, true_bb],
+                            v => span_bug!(test.span, "expected boolean value but got {:?}", v)
+                        };
+
+                        (targets,
+                         TerminatorKind::If {
+                             cond: Operand::Consume(lvalue.clone()),
+                             targets: (true_bb, else_bb)
+                         })
+
+                    }
+                    _ => {
+                        // The switch may be inexhaustive so we
+                        // add a catch all block
+                        let otherwise = self.cfg.start_new_block();
+                        let targets: Vec<_> =
+                            options.iter()
+                                   .map(|_| self.cfg.start_new_block())
+                                   .chain(Some(otherwise))
+                                   .collect();
+
+                        (targets.clone(),
+                         TerminatorKind::SwitchInt {
+                             discr: lvalue.clone(),
+                             switch_ty: switch_ty,
+                             values: options.clone(),
+                             targets: targets
+                         })
+                    }
+                };
+
                 self.cfg.terminate(block,
                                    scope_id,
                                    test.span,
-                                   TerminatorKind::SwitchInt {
-                                       discr: lvalue.clone(),
-                                       switch_ty: switch_ty,
-                                       values: options.clone(),
-                                       targets: targets.clone(),
-                                   });
+                                   term);
                 targets
             }
 
index 36053d3c4ffa78da45e937b69d55541f6b335a2a..4941f867f0657e4d44f629a3d767d28f5641254b 100644 (file)
@@ -153,7 +153,8 @@ enum ResolutionError<'a> {
         message: &'a str,
         context: UnresolvedNameContext<'a>,
         is_static_method: bool,
-        is_field: bool
+        is_field: bool,
+        def: Def,
     },
     /// error E0426: use of undeclared label
     UndeclaredLabel(&'a str),
@@ -413,7 +414,7 @@ fn resolve_struct_error<'b, 'a: 'b, 'c>(resolver: &'b Resolver<'a>,
                              argument is missing?")
         }
         ResolutionError::UnresolvedName { path, message: msg, context, is_static_method,
-                                          is_field } => {
+                                          is_field, def } => {
             let mut err = struct_span_err!(resolver.session,
                                            span,
                                            E0425,
@@ -430,19 +431,20 @@ fn resolve_struct_error<'b, 'a: 'b, 'c>(resolver: &'b Resolver<'a>,
                 UnresolvedNameContext::PathIsMod(parent) => {
                     err.help(&match parent.map(|parent| &parent.node) {
                         Some(&ExprKind::Field(_, ident)) => {
-                            format!("To reference an item from the `{module}` module, \
+                            format!("to reference an item from the `{module}` module, \
                                      use `{module}::{ident}`",
                                     module = path,
                                     ident = ident.node)
                         }
                         Some(&ExprKind::MethodCall(ident, _, _)) => {
-                            format!("To call a function from the `{module}` module, \
+                            format!("to call a function from the `{module}` module, \
                                      use `{module}::{ident}(..)`",
                                     module = path,
                                     ident = ident.node)
                         }
                         _ => {
-                            format!("Module `{module}` cannot be used as an expression",
+                            format!("{def} `{module}` cannot be used as an expression",
+                                    def = def.kind_name(),
                                     module = path)
                         }
                     });
@@ -1113,7 +1115,8 @@ fn resolve_generated_global_path(&mut self, path: &hir::Path, is_value: bool) ->
                         message: "",
                         context: UnresolvedNameContext::Other,
                         is_static_method: false,
-                        is_field: false
+                        is_field: false,
+                        def: Def::Err,
                     };
                 resolve_error(self, path.span, error);
                 Def::Err
@@ -3063,6 +3066,7 @@ fn resolve_expr(&mut self, expr: &Expr, parent: Option<&Expr>) {
                                 };
 
                                 let mut context =  UnresolvedNameContext::Other;
+                                let mut def = Def::Err;
                                 if !msg.is_empty() {
                                     msg = format!(". Did you mean {}?", msg);
                                 } else {
@@ -3075,7 +3079,10 @@ fn resolve_expr(&mut self, expr: &Expr, parent: Option<&Expr>) {
                                     match self.resolve_module_path(&name_path[..],
                                                                    UseLexicalScope,
                                                                    expr.span) {
-                                        Success(_) => {
+                                        Success(e) => {
+                                            if let Some(def_type) = e.def {
+                                                def = def_type;
+                                            }
                                             context = UnresolvedNameContext::PathIsMod(parent);
                                         },
                                         _ => {},
@@ -3090,6 +3097,7 @@ fn resolve_expr(&mut self, expr: &Expr, parent: Option<&Expr>) {
                                                   context: context,
                                                   is_static_method: method_scope && is_static,
                                                   is_field: is_field,
+                                                  def: def,
                                               });
                             }
                         }
index c1685e6a749046b8c8f7ee42ee737b4b648cbdf9..1b13a662d369cc465c02b135d18daa1d05dfcc67 100644 (file)
@@ -1061,7 +1061,7 @@ pub fn fulfill_obligation<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
     let trait_ref = tcx.erase_regions(&trait_ref);
 
     scx.trait_cache().memoize(trait_ref, || {
-        debug!("trans fulfill_obligation: trait_ref={:?} def_id={:?}",
+        debug!("trans::fulfill_obligation(trait_ref={:?}, def_id={:?})",
                trait_ref, trait_ref.def_id());
 
         // Do the initial selection for the obligation. This yields the
@@ -1096,11 +1096,14 @@ pub fn fulfill_obligation<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
                 }
             };
 
+            debug!("fulfill_obligation: selection={:?}", selection);
+
             // Currently, we use a fulfillment context to completely resolve
             // all nested obligations. This is because they can inform the
             // inference of the impl's type parameters.
             let mut fulfill_cx = traits::FulfillmentContext::new();
             let vtable = selection.map(|predicate| {
+                debug!("fulfill_obligation: register_predicate_obligation {:?}", predicate);
                 fulfill_cx.register_predicate_obligation(&infcx, predicate);
             });
             let vtable = infcx.drain_fulfillment_cx_or_panic(span, &mut fulfill_cx, &vtable);
index 36a593a546b9c045172c9fa94894e99d228c9aa8..d75516ff648372a6d8f7c64ca73f4bfeb010711b 100644 (file)
@@ -2155,11 +2155,13 @@ fn to_intrinsic_name(&self, tcx: TyCtxt, ty: Ty) -> &'static str {
 
         let new_sty = match ty.sty {
             TyInt(Is) => match &tcx.sess.target.target.target_pointer_width[..] {
+                "16" => TyInt(I16),
                 "32" => TyInt(I32),
                 "64" => TyInt(I64),
                 _ => bug!("unsupported target word size")
             },
             TyUint(Us) => match &tcx.sess.target.target.target_pointer_width[..] {
+                "16" => TyUint(U16),
                 "32" => TyUint(U32),
                 "64" => TyUint(U64),
                 _ => bug!("unsupported target word size")
index 640ac25a5e31cc666556f6982c71dd20073a178a..54c825fa5faceb73de37c113333640b154f50cc0 100644 (file)
@@ -1577,6 +1577,7 @@ fn int_type_width_signed<'tcx>(sty: &ty::TypeVariants<'tcx>, ccx: &CrateContext)
         TyInt(t) => Some((match t {
             ast::IntTy::Is => {
                 match &ccx.tcx().sess.target.target.target_pointer_width[..] {
+                    "16" => 16,
                     "32" => 32,
                     "64" => 64,
                     tws => bug!("Unsupported target word size for isize: {}", tws),
@@ -1590,6 +1591,7 @@ fn int_type_width_signed<'tcx>(sty: &ty::TypeVariants<'tcx>, ccx: &CrateContext)
         TyUint(t) => Some((match t {
             ast::UintTy::Us => {
                 match &ccx.tcx().sess.target.target.target_pointer_width[..] {
+                    "16" => 16,
                     "32" => 32,
                     "64" => 64,
                     tws => bug!("Unsupported target word size for usize: {}", tws),
index 35a60cd5422b40ac0dcc5be1a1aa6876c0bbbb89..001cd197e60b8a8df178068723062dc82b51c0df 100644 (file)
@@ -122,6 +122,7 @@ pub fn i8p(ccx: &CrateContext) -> Type {
 
     pub fn int(ccx: &CrateContext) -> Type {
         match &ccx.tcx().sess.target.target.target_pointer_width[..] {
+            "16" => Type::i16(ccx),
             "32" => Type::i32(ccx),
             "64" => Type::i64(ccx),
             tws => bug!("Unsupported target word size for int: {}", tws),
index fc1abb56d5abc61631dc42a8f7decb9d8b4136fa..472d6ac67f4f19eca852f2d461e33a52a35143c1 100644 (file)
@@ -170,7 +170,7 @@ struct ConvertedBinding<'tcx> {
 
 pub fn ast_region_to_region(tcx: TyCtxt, lifetime: &hir::Lifetime)
                             -> ty::Region {
-    let r = match tcx.named_region_map.get(&lifetime.id) {
+    let r = match tcx.named_region_map.defs.get(&lifetime.id) {
         None => {
             // should have been recorded by the `resolve_lifetime` pass
             span_bug!(lifetime.span, "unresolved lifetime");
@@ -181,7 +181,20 @@ pub fn ast_region_to_region(tcx: TyCtxt, lifetime: &hir::Lifetime)
         }
 
         Some(&rl::DefLateBoundRegion(debruijn, id)) => {
-            ty::ReLateBound(debruijn, ty::BrNamed(tcx.map.local_def_id(id), lifetime.name))
+            // If this region is declared on a function, it will have
+            // an entry in `late_bound`, but if it comes from
+            // `for<'a>` in some type or something, it won't
+            // necessarily have one. In that case though, we won't be
+            // changed from late to early bound, so we can just
+            // substitute false.
+            let issue_32330 = tcx.named_region_map
+                                 .late_bound
+                                 .get(&id)
+                                 .cloned()
+                                 .unwrap_or(ty::Issue32330::WontChange);
+            ty::ReLateBound(debruijn, ty::BrNamed(tcx.map.local_def_id(id),
+                                                  lifetime.name,
+                                                  issue_32330))
         }
 
         Some(&rl::DefEarlyBoundRegion(space, index, _)) => {
@@ -193,11 +206,21 @@ pub fn ast_region_to_region(tcx: TyCtxt, lifetime: &hir::Lifetime)
         }
 
         Some(&rl::DefFreeRegion(scope, id)) => {
+            // As in DefLateBoundRegion above, could be missing for some late-bound
+            // regions, but also for early-bound regions.
+            let issue_32330 = tcx.named_region_map
+                                 .late_bound
+                                 .get(&id)
+                                 .cloned()
+                                 .unwrap_or(ty::Issue32330::WontChange);
             ty::ReFree(ty::FreeRegion {
                     scope: scope.to_code_extent(&tcx.region_maps),
                     bound_region: ty::BrNamed(tcx.map.local_def_id(id),
-                                              lifetime.name)
-                })
+                                              lifetime.name,
+                                              issue_32330)
+            })
+
+                // (*) -- not late-bound, won't change
         }
     };
 
@@ -911,7 +934,7 @@ fn ast_type_binding_to_poly_projection_predicate(
         debug!("late_bound_in_ty = {:?}", late_bound_in_ty);
         for br in late_bound_in_ty.difference(&late_bound_in_trait_ref) {
             let br_name = match *br {
-                ty::BrNamed(_, name) => name,
+                ty::BrNamed(_, name, _) => name,
                 _ => {
                     span_bug!(
                         binding.span,
@@ -1675,7 +1698,7 @@ pub fn ast_ty_to_ty(&self, rscope: &RegionScope, ast_ty: &hir::Ty) -> Ty<'tcx> {
                 let late_bound_in_ret = tcx.collect_referenced_late_bound_regions(&output);
                 for br in late_bound_in_ret.difference(&late_bound_in_args) {
                     let br_name = match *br {
-                        ty::BrNamed(_, name) => name,
+                        ty::BrNamed(_, name, _) => name,
                         _ => {
                             span_bug!(
                                 bf.decl.output.span(),
index 92027a56ec1b0ac814a1d141c420f0b7680fcbf8..e65f3f0ff413ff898fce31edf536569994498b11 100644 (file)
@@ -64,7 +64,6 @@
 use hir::def_id::DefId;
 use constrained_type_params as ctp;
 use middle::lang_items::SizedTraitLangItem;
-use middle::resolve_lifetime;
 use middle::const_val::ConstVal;
 use rustc_const_eval::EvalHint::UncheckedExprHint;
 use rustc_const_eval::{eval_const_expr_partial, ConstEvalErr};
@@ -1745,14 +1744,16 @@ fn add_unsized_bound<'tcx>(astconv: &AstConv<'tcx, 'tcx>,
 /// the lifetimes that are declared. For fns or methods, we have to
 /// screen out those that do not appear in any where-clauses etc using
 /// `resolve_lifetime::early_bound_lifetimes`.
-fn early_bound_lifetimes_from_generics(space: ParamSpace,
-                                       ast_generics: &hir::Generics)
-                                       -> Vec<hir::LifetimeDef>
+fn early_bound_lifetimes_from_generics<'a, 'tcx, 'hir>(
+    ccx: &CrateCtxt<'a, 'tcx>,
+    ast_generics: &'hir hir::Generics)
+    -> Vec<&'hir hir::LifetimeDef>
 {
-    match space {
-        SelfSpace | TypeSpace => ast_generics.lifetimes.to_vec(),
-        FnSpace => resolve_lifetime::early_bound_lifetimes(ast_generics),
-    }
+    ast_generics
+        .lifetimes
+        .iter()
+        .filter(|l| !ccx.tcx.named_region_map.late_bound.contains_key(&l.lifetime.id))
+        .collect()
 }
 
 fn ty_generic_predicates<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>,
@@ -1781,7 +1782,7 @@ fn ty_generic_predicates<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>,
     // Collect the region predicates that were declared inline as
     // well. In the case of parameters declared on a fn or method, we
     // have to be careful to only iterate over early-bound regions.
-    let early_lifetimes = early_bound_lifetimes_from_generics(space, ast_generics);
+    let early_lifetimes = early_bound_lifetimes_from_generics(ccx, ast_generics);
     for (index, param) in early_lifetimes.iter().enumerate() {
         let index = index as u32;
         let region =
@@ -1864,7 +1865,7 @@ fn ty_generics<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>,
     let tcx = ccx.tcx;
     let mut result = base_generics.clone();
 
-    let early_lifetimes = early_bound_lifetimes_from_generics(space, ast_generics);
+    let early_lifetimes = early_bound_lifetimes_from_generics(ccx, ast_generics);
     for (i, l) in early_lifetimes.iter().enumerate() {
         let bounds = l.bounds.iter()
                              .map(|l| ast_region_to_region(tcx, l))
index d06030637afd8cc3b23a318c057219f279d9a048..e9e52a0121a3695467d6518adc732571e313f486 100644 (file)
@@ -2040,6 +2040,7 @@ impl Foo for Bar {
     // the trait
     fn foo(&self) {}
 }
+```
 "##,
 
 E0186: r##"
index a532f9744f49813346a6c7a686540a1b1fc4f729..612007da0e97acf33992d1ffd36e59fd8904af2a 100644 (file)
@@ -144,7 +144,7 @@ fn inferred_index(&self, param_id: ast::NodeId) -> InferredIndex {
     fn find_binding_for_lifetime(&self, param_id: ast::NodeId) -> ast::NodeId {
         let tcx = self.terms_cx.tcx;
         assert!(is_lifetime(&tcx.map, param_id));
-        match tcx.named_region_map.get(&param_id) {
+        match tcx.named_region_map.defs.get(&param_id) {
             Some(&rl::DefEarlyBoundRegion(_, _, lifetime_decl_id))
                 => lifetime_decl_id,
             Some(_) => bug!("should not encounter non early-bound cases"),
index bf503141ff642c09e5cdbb36207759788e578f29..0f3c62aca2a8595b51eba8be4f38b854ba3cbcce 100644 (file)
@@ -819,7 +819,7 @@ impl Clean<Option<Lifetime>> for ty::Region {
     fn clean(&self, cx: &DocContext) -> Option<Lifetime> {
         match *self {
             ty::ReStatic => Some(Lifetime::statik()),
-            ty::ReLateBound(_, ty::BrNamed(_, name)) => Some(Lifetime(name.to_string())),
+            ty::ReLateBound(_, ty::BrNamed(_, name, _)) => Some(Lifetime(name.to_string())),
             ty::ReEarlyBound(ref data) => Some(Lifetime(data.name.clean(cx))),
 
             ty::ReLateBound(..) |
index 61985d390801bd132e9fae4423e201343a38487a..73e0a529a45c6a60135c04d8786b44331693d7e0 100644 (file)
@@ -21,7 +21,6 @@
 use rustc_trans::back::link;
 use rustc_resolve as resolve;
 use rustc_metadata::cstore::CStore;
-use rustc_metadata::creader::read_local_crates;
 
 use syntax::{ast, codemap, errors};
 use syntax::errors::emitter::ColorConfig;
@@ -146,21 +145,12 @@ pub fn run_core(search_paths: SearchPaths,
 
     let krate = panictry!(driver::phase_1_parse_input(&sess, cfg, &input));
 
-    let name = link::find_crate_name(Some(&sess), &krate.attrs,
-                                     &input);
+    let name = link::find_crate_name(Some(&sess), &krate.attrs, &input);
 
-    let krate = driver::phase_2_configure_and_expand(&sess, &cstore, krate, &name, None)
-                    .expect("phase_2_configure_and_expand aborted in rustdoc!");
-
-    let krate = driver::assign_node_ids(&sess, krate);
-
-    let mut defs = hir_map::collect_definitions(&krate);
-    read_local_crates(&sess, &cstore, &defs, &krate, &name, &dep_graph);
-
-    // Lower ast -> hir and resolve.
-    let (analysis, resolutions, mut hir_forest) = {
-        driver::lower_and_resolve(&sess, &name, &mut defs, &krate,
-                                  &sess.dep_graph, resolve::MakeGlobMap::No)
+    let driver::ExpansionResult { defs, analysis, resolutions, mut hir_forest, .. } = {
+        let make_glob_map = resolve::MakeGlobMap::No;
+        driver::phase_2_configure_and_expand(&sess, &cstore, krate, &name, None, make_glob_map)
+            .expect("phase_2_configure_and_expand aborted in rustdoc!")
     };
 
     let arenas = ty::CtxtArenas::new();
index 53201a9580ec0057d3222bddefafce66d1465076..f0ca89097f701f6f98f6ab114f44e6413c3f23ef 100644 (file)
 use rustc::session::{self, config};
 use rustc::session::config::{get_unstable_features_setting, OutputType};
 use rustc::session::search_paths::{SearchPaths, PathKind};
-use rustc::hir::lowering::{lower_crate, DummyResolver};
 use rustc_back::dynamic_lib::DynamicLibrary;
 use rustc_back::tempdir::TempDir;
 use rustc_driver::{driver, Compilation};
+use rustc_driver::driver::phase_2_configure_and_expand;
 use rustc_metadata::cstore::CStore;
+use rustc_resolve::MakeGlobMap;
 use syntax::codemap::CodeMap;
 use syntax::errors;
 use syntax::errors::emitter::ColorConfig;
@@ -93,21 +94,16 @@ pub fn run(input: &str,
     let mut cfg = config::build_configuration(&sess);
     cfg.extend(config::parse_cfgspecs(cfgs.clone()));
     let krate = panictry!(driver::phase_1_parse_input(&sess, cfg, &input));
-    let krate = driver::phase_2_configure_and_expand(&sess, &cstore, krate,
-                                                     "rustdoc-test", None)
-        .expect("phase_2_configure_and_expand aborted in rustdoc!");
-    let krate = driver::assign_node_ids(&sess, krate);
-    let dep_graph = DepGraph::new(false);
-    let defs = hir_map::collect_definitions(&krate);
-
-    let mut dummy_resolver = DummyResolver;
-    let krate = lower_crate(&sess, &krate, &sess, &mut dummy_resolver);
-
-    let opts = scrape_test_config(&krate);
+    let driver::ExpansionResult { defs, mut hir_forest, .. } = {
+        let make_glob_map = MakeGlobMap::No;
+        phase_2_configure_and_expand(&sess, &cstore, krate, "rustdoc-test", None, make_glob_map)
+            .expect("phase_2_configure_and_expand aborted in rustdoc!")
+    };
 
+    let dep_graph = DepGraph::new(false);
+    let opts = scrape_test_config(hir_forest.krate());
     let _ignore = dep_graph.in_ignore();
-    let mut forest = hir_map::Forest::new(krate, &dep_graph);
-    let map = hir_map::map_crate(&mut forest, defs);
+    let map = hir_map::map_crate(&mut hir_forest, defs);
 
     let ctx = core::DocContext {
         map: &map,
index 734f774043d6dd4bdcc194424adb9b42049a1b46..0180c3118a586f41c813e65b5ddfa207e3b945f6 100644 (file)
@@ -32,6 +32,8 @@
 /// it was opened with. Files also implement `Seek` to alter the logical cursor
 /// that the file contains internally.
 ///
+/// Files are automatically closed when they go out of scope.
+///
 /// # Examples
 ///
 /// ```no_run
@@ -1341,8 +1343,9 @@ pub fn remove_dir_all<P: AsRef<Path>>(path: P) -> io::Result<()> {
 ///     if dir.is_dir() {
 ///         for entry in try!(fs::read_dir(dir)) {
 ///             let entry = try!(entry);
-///             if try!(entry.file_type()).is_dir() {
-///                 try!(visit_dirs(&entry.path(), cb));
+///             let path = entry.path();
+///             if path.is_dir() {
+///                 try!(visit_dirs(&path, cb));
 ///             } else {
 ///                 cb(&entry);
 ///             }
index 016130e99989e3630c00c986495a731459349aed..d8cadf09cb2a4c3a3c8ae9ea78b6ab0059627b04 100644 (file)
 #[stable(feature = "panic_hooks", since = "1.10.0")]
 pub use panicking::{take_hook, set_hook, PanicInfo, Location};
 
-///
-#[rustc_deprecated(since = "1.9.0", reason = "renamed to set_hook")]
-#[unstable(feature = "panic_handler", reason = "awaiting feedback", issue = "30449")]
-pub fn set_handler<F>(handler: F) where F: Fn(&PanicInfo) + 'static + Sync + Send {
-    set_hook(Box::new(handler))
-}
-
-///
-#[rustc_deprecated(since = "1.9.0", reason = "renamed to take_hook")]
-#[unstable(feature = "panic_handler", reason = "awaiting feedback", issue = "30449")]
-pub fn take_handler() -> Box<Fn(&PanicInfo) + 'static + Sync + Send> {
-    take_hook()
-}
-
-/// A marker trait which represents "unwind safe" types in Rust.
+/// A marker trait which represents "panic safe" types in Rust.
 ///
 /// This trait is implemented by default for many types and behaves similarly in
 /// terms of inference of implementation to the `Send` and `Sync` traits. The
@@ -117,14 +103,6 @@ pub fn take_handler() -> Box<Fn(&PanicInfo) + 'static + Sync + Send> {
                             across an unwind boundary"]
 pub trait UnwindSafe {}
 
-/// Deprecated, renamed to UnwindSafe
-#[unstable(feature = "recover", reason = "awaiting feedback", issue = "27719")]
-#[rustc_deprecated(reason = "renamed to `UnwindSafe`", since = "1.9.0")]
-pub trait RecoverSafe {}
-#[unstable(feature = "recover", reason = "awaiting feedback", issue = "27719")]
-#[allow(deprecated)]
-impl<T: UnwindSafe> RecoverSafe for T {}
-
 /// A marker trait representing types where a shared reference is considered
 /// unwind safe.
 ///
@@ -202,11 +180,6 @@ pub struct AssertUnwindSafe<T>(
     pub T
 );
 
-/// Deprecated, renamed to `AssertUnwindSafe`
-#[unstable(feature = "recover", issue = "27719")]
-#[rustc_deprecated(reason = "renamed to `AssertUnwindSafe`", since = "1.9.0")]
-pub struct AssertRecoverSafe<T>(pub T);
-
 // Implementations of the `UnwindSafe` trait:
 //
 // * By default everything is unwind safe
@@ -234,9 +207,6 @@ impl<T: ?Sized> UnwindSafe for Mutex<T> {}
 impl<T: ?Sized> UnwindSafe for RwLock<T> {}
 #[stable(feature = "catch_unwind", since = "1.9.0")]
 impl<T> UnwindSafe for AssertUnwindSafe<T> {}
-#[unstable(feature = "recover", issue = "27719")]
-#[allow(deprecated)]
-impl<T> UnwindSafe for AssertRecoverSafe<T> {}
 
 // not covered via the Shared impl above b/c the inner contents use
 // Cell/AtomicUsize, but the usage here is unwind safe so we can lift the
@@ -256,9 +226,6 @@ impl RefUnwindSafe for .. {}
 impl<T: ?Sized> !RefUnwindSafe for UnsafeCell<T> {}
 #[stable(feature = "catch_unwind", since = "1.9.0")]
 impl<T> RefUnwindSafe for AssertUnwindSafe<T> {}
-#[unstable(feature = "recover", issue = "27719")]
-#[allow(deprecated)]
-impl<T> RefUnwindSafe for AssertRecoverSafe<T> {}
 
 #[stable(feature = "catch_unwind", since = "1.9.0")]
 impl<T> Deref for AssertUnwindSafe<T> {
@@ -285,53 +252,6 @@ extern "rust-call" fn call_once(self, _args: ()) -> R {
     }
 }
 
-#[allow(deprecated)]
-impl<T> AssertRecoverSafe<T> {
-    /// Creates a new `AssertRecoverSafe` wrapper around the provided type.
-    #[unstable(feature = "recover", reason = "awaiting feedback", issue = "27719")]
-    #[rustc_deprecated(reason = "the type's field is now public, construct it directly",
-                       since = "1.9.0")]
-    pub fn new(t: T) -> AssertRecoverSafe<T> {
-        AssertRecoverSafe(t)
-    }
-
-    /// Consumes the `AssertRecoverSafe`, returning the wrapped value.
-    #[unstable(feature = "recover", reason = "awaiting feedback", issue = "27719")]
-    #[rustc_deprecated(reason = "the type's field is now public, access it directly",
-                       since = "1.9.0")]
-    pub fn into_inner(self) -> T {
-        self.0
-    }
-}
-
-#[unstable(feature = "recover", issue = "27719")]
-#[allow(deprecated)]
-impl<T> Deref for AssertRecoverSafe<T> {
-    type Target = T;
-
-    fn deref(&self) -> &T {
-        &self.0
-    }
-}
-
-#[unstable(feature = "recover", issue = "27719")]
-#[allow(deprecated)]
-impl<T> DerefMut for AssertRecoverSafe<T> {
-    fn deref_mut(&mut self) -> &mut T {
-        &mut self.0
-    }
-}
-
-#[unstable(feature = "recover", issue = "27719")]
-#[allow(deprecated)]
-impl<R, F: FnOnce() -> R> FnOnce<()> for AssertRecoverSafe<F> {
-    type Output = R;
-
-    extern "rust-call" fn call_once(self, _args: ()) -> R {
-        (self.0)()
-    }
-}
-
 /// Invokes a closure, capturing the cause of an unwinding panic if one occurs.
 ///
 /// This function will return `Ok` with the closure's result if the closure
@@ -388,13 +308,6 @@ pub fn catch_unwind<F: FnOnce() -> R + UnwindSafe, R>(f: F) -> Result<R> {
     }
 }
 
-/// Deprecated, renamed to `catch_unwind`
-#[unstable(feature = "recover", reason = "awaiting feedback", issue = "27719")]
-#[rustc_deprecated(reason = "renamed to `catch_unwind`", since = "1.9.0")]
-pub fn recover<F: FnOnce() -> R + UnwindSafe, R>(f: F) -> Result<R> {
-    catch_unwind(f)
-}
-
 /// Triggers a panic without invoking the panic hook.
 ///
 /// This is designed to be used in conjunction with `catch_unwind` to, for
@@ -424,10 +337,3 @@ pub fn recover<F: FnOnce() -> R + UnwindSafe, R>(f: F) -> Result<R> {
 pub fn resume_unwind(payload: Box<Any + Send>) -> ! {
     panicking::rust_panic(payload)
 }
-
-/// Deprecated, use resume_unwind instead
-#[unstable(feature = "panic_propagate", reason = "awaiting feedback", issue = "30752")]
-#[rustc_deprecated(reason = "renamed to `resume_unwind`", since = "1.9.0")]
-pub fn propagate(payload: Box<Any + Send>) -> ! {
-    resume_unwind(payload)
-}
index e083605a2acd5bf1ed38d6b1d7b29023b38cd296..11af768c5b9b0a0abc8c186a4fae546b00040370 100644 (file)
@@ -28,7 +28,7 @@
 /// ```
 ///
 /// [`assert!`]: macro.assert!.html
-/// [`if` conditionals]: ../book/if.html
+/// [`if`]: ../book/if.html
 /// [`BitAnd`]: ops/trait.BitAnd.html
 /// [`BitOr`]: ops/trait.BitOr.html
 /// [`Not`]: ops/trait.Not.html
index 1b6f6c3e875c98f12267fd362054b326d32046b3..3ce9bcc79f24ad8dfdedceb315286d0deba49188 100644 (file)
@@ -195,7 +195,7 @@ fn from_inner(pipe: AnonPipe) -> ChildStderr {
 ///                      .arg("-c")
 ///                      .arg("echo hello")
 ///                      .output()
-///                      .expect("failed to execute proces");
+///                      .expect("failed to execute process");
 ///
 /// let hello = output.stdout;
 /// ```
index 7217eaa1325d63f140a14fed89293c152e93ae7c..5a7c0fe4816c981334849c6fd2d5a91602fe08f2 100644 (file)
@@ -48,7 +48,7 @@ fn lang_start(main: *const u8, argc: isize, argv: *const *const u8) -> isize {
         // created. Note that this isn't necessary in general for new threads,
         // but we just do this to name the main thread and to give it correct
         // info about the stack bounds.
-        let thread: Thread = NewThread::new(Some("<main>".to_owned()));
+        let thread: Thread = NewThread::new(Some("main".to_owned()));
         thread_info::set(main_guard, thread);
 
         // Store our args if necessary in a squirreled away location
index 15e69628c7a5d20df96a0aab5e24d3f79b4e6d79..c75a5c09146a495abe9e626efea78e605f9d8b12 100644 (file)
@@ -204,10 +204,14 @@ impl<T> Mutex<T> {
     /// Creates a new mutex in an unlocked state ready for use.
     #[stable(feature = "rust1", since = "1.0.0")]
     pub fn new(t: T) -> Mutex<T> {
-        Mutex {
+        let mut m = Mutex {
             inner: box StaticMutex::new(),
             data: UnsafeCell::new(t),
+        };
+        unsafe {
+            m.inner.lock.init();
         }
+        m
     }
 }
 
index e1e764bd255cbd7302cf26b2eb4368330042f060..03d3483902dcf87b899aefcbe1f2a66ea07d2a30 100644 (file)
@@ -468,42 +468,6 @@ unsafe fn new(lock: &'rwlock StaticRwLock, data: &'rwlock UnsafeCell<T>)
             }
         })
     }
-
-    /// Transform this guard to hold a sub-borrow of the original data.
-    ///
-    /// Applies the supplied closure to the data, returning a new lock
-    /// guard referencing the borrow returned by the closure.
-    ///
-    /// # Examples
-    ///
-    /// ```rust
-    /// # #![feature(guard_map)]
-    /// # use std::sync::{RwLockReadGuard, RwLock};
-    /// let x = RwLock::new(vec![1, 2]);
-    ///
-    /// let y = RwLockReadGuard::map(x.read().unwrap(), |v| &v[0]);
-    /// assert_eq!(*y, 1);
-    /// ```
-    #[unstable(feature = "guard_map",
-               reason = "recently added, needs RFC for stabilization,
-                         questionable interaction with Condvar",
-               issue = "27746")]
-    #[rustc_deprecated(since = "1.8.0",
-                       reason = "unsound on Mutex because of Condvar and \
-                                 RwLock may also with to be used with Condvar \
-                                 one day")]
-    pub fn map<U: ?Sized, F>(this: Self, cb: F) -> RwLockReadGuard<'rwlock, U>
-        where F: FnOnce(&T) -> &U
-    {
-        let new = RwLockReadGuard {
-            __lock: this.__lock,
-            __data: cb(this.__data)
-        };
-
-        mem::forget(this);
-
-        new
-    }
 }
 
 #[allow(deprecated)]
@@ -518,57 +482,6 @@ unsafe fn new(lock: &'rwlock StaticRwLock, data: &'rwlock UnsafeCell<T>)
             }
         })
     }
-
-    /// Transform this guard to hold a sub-borrow of the original data.
-    ///
-    /// Applies the supplied closure to the data, returning a new lock
-    /// guard referencing the borrow returned by the closure.
-    ///
-    /// # Examples
-    ///
-    /// ```rust
-    /// # #![feature(guard_map)]
-    /// # use std::sync::{RwLockWriteGuard, RwLock};
-    /// let x = RwLock::new(vec![1, 2]);
-    ///
-    /// {
-    ///     let mut y = RwLockWriteGuard::map(x.write().unwrap(), |v| &mut v[0]);
-    ///     assert_eq!(*y, 1);
-    ///
-    ///     *y = 10;
-    /// }
-    ///
-    /// assert_eq!(&**x.read().unwrap(), &[10, 2]);
-    /// ```
-    #[unstable(feature = "guard_map",
-               reason = "recently added, needs RFC for stabilization,
-                         questionable interaction with Condvar",
-               issue = "27746")]
-    #[rustc_deprecated(since = "1.8.0",
-                       reason = "unsound on Mutex because of Condvar and \
-                                 RwLock may also with to be used with Condvar \
-                                 one day")]
-    pub fn map<U: ?Sized, F>(this: Self, cb: F) -> RwLockWriteGuard<'rwlock, U>
-        where F: FnOnce(&mut T) -> &mut U
-    {
-        // Compute the new data while still owning the original lock
-        // in order to correctly poison if the callback panics.
-        let data = unsafe { ptr::read(&this.__data) };
-        let new_data = cb(data);
-
-        // We don't want to unlock the lock by running the destructor of the
-        // original lock, so just read the fields we need and forget it.
-        let (poison, lock) = unsafe {
-            (ptr::read(&this.__poison), ptr::read(&this.__lock))
-        };
-        mem::forget(this);
-
-        RwLockWriteGuard {
-            __lock: lock,
-            __data: new_data,
-            __poison: poison
-        }
-    }
 }
 
 #[stable(feature = "rust1", since = "1.0.0")]
@@ -619,7 +532,7 @@ mod tests {
     use rand::{self, Rng};
     use sync::mpsc::channel;
     use thread;
-    use sync::{Arc, RwLock, StaticRwLock, TryLockError, RwLockWriteGuard};
+    use sync::{Arc, RwLock, StaticRwLock, TryLockError};
     use sync::atomic::{AtomicUsize, Ordering};
 
     #[derive(Eq, PartialEq, Debug)]
@@ -867,20 +780,4 @@ fn test_get_mut_poison() {
             Ok(x) => panic!("get_mut of poisoned RwLock is Ok: {:?}", x),
         }
     }
-
-    #[test]
-    fn test_rwlock_write_map_poison() {
-        let rwlock = Arc::new(RwLock::new(vec![1, 2]));
-        let rwlock2 = rwlock.clone();
-
-        thread::spawn(move || {
-            let _ = RwLockWriteGuard::map::<usize, _>(rwlock2.write().unwrap(), |_| panic!());
-        }).join().unwrap_err();
-
-        match rwlock.read() {
-            Ok(r) => panic!("Read lock on poisioned RwLock is Ok: {:?}", &*r),
-            Err(_) => {}
-        };
-    }
 }
-
index 5a6dfe7fb1a1507739b6b47447490ec60d426b92..7a2183c522f5b92154b198e55aea3cdc4455a5f4 100644 (file)
@@ -27,6 +27,12 @@ impl Mutex {
     /// first used with any of the functions below.
     pub const fn new() -> Mutex { Mutex(imp::Mutex::new()) }
 
+    /// Prepare the mutex for use.
+    ///
+    /// This should be called once the mutex is at a stable memory address.
+    #[inline]
+    pub unsafe fn init(&mut self) { self.0.init() }
+
     /// Locks the mutex blocking the current thread until it is available.
     ///
     /// Behavior is undefined if the mutex has been moved between this and any
index 1df511a8818c414849262f14df1926e7e1818741..b5d0357633875829c1164e6f8e1685456c77d0f3 100644 (file)
@@ -42,20 +42,28 @@ pub fn dumb_print(args: fmt::Arguments) {
 // implemented as an illegal instruction.
 #[cfg(unix)]
 unsafe fn abort_internal() -> ! {
-    use libc;
-    libc::abort()
+    ::libc::abort()
 }
 
-// On Windows, we want to avoid using libc, and there isn't a direct
-// equivalent of libc::abort.  The __failfast intrinsic may be a reasonable
-// substitute, but desireability of using it over the abort instrinsic is
-// debateable; see https://github.com/rust-lang/rust/pull/31519 for details.
-#[cfg(not(unix))]
+// On Windows, use the processor-specific __fastfail mechanism.  In Windows 8
+// and later, this will terminate the process immediately without running any
+// in-process exception handlers.  In earlier versions of Windows, this
+// sequence of instructions will be treated as an access violation,
+// terminating the process but without necessarily bypassing all exception
+// handlers.
+//
+// https://msdn.microsoft.com/en-us/library/dn774154.aspx
+#[cfg(all(windows, any(target_arch = "x86", target_arch = "x86_64")))]
 unsafe fn abort_internal() -> ! {
-    use intrinsics;
-    intrinsics::abort()
+    asm!("int $$0x29" :: "{ecx}"(7) ::: volatile); // 7 is FAST_FAIL_FATAL_APP_EXIT
+    ::intrinsics::unreachable();
 }
 
+// Other platforms should use the appropriate platform-specific mechanism for
+// aborting the process.  If no platform-specific mechanism is available,
+// ::intrinsics::abort() may be used instead.  The above implementations cover
+// all targets currently supported by libstd.
+
 pub fn abort(args: fmt::Arguments) -> ! {
     dumb_print(format_args!("fatal runtime error: {}\n", args));
     unsafe { abort_internal(); }
index b0fed2f4694bfcd4163fc53509e1ab9e0de9d225..430ec5f94a6f85da5a0444ddfa0e73b4f52d9f33 100644 (file)
@@ -34,21 +34,6 @@ pub trait CommandExt {
     #[stable(feature = "rust1", since = "1.0.0")]
     fn gid(&mut self, id: u32) -> &mut process::Command;
 
-    /// Create a new session (cf. `setsid(2)`) for the child process. This means
-    /// that the child is the leader of a new process group. The parent process
-    /// remains the child reaper of the new process.
-    ///
-    /// This is not enough to create a daemon process. The *init* process should
-    /// be the child reaper of a daemon. This can be achieved if the parent
-    /// process exit. Moreover, a daemon should not have a controlling terminal.
-    /// To achieve this, a session leader (the child) must spawn another process
-    /// (the daemon) in the same session.
-    #[unstable(feature = "process_session_leader", reason = "recently added",
-               issue = "27811")]
-    #[rustc_deprecated(reason = "use `before_exec` instead",
-                       since = "1.9.0")]
-    fn session_leader(&mut self, on: bool) -> &mut process::Command;
-
     /// Schedules a closure to be run just before the `exec` function is
     /// invoked.
     ///
@@ -112,11 +97,6 @@ fn gid(&mut self, id: u32) -> &mut process::Command {
         self
     }
 
-    fn session_leader(&mut self, on: bool) -> &mut process::Command {
-        self.as_inner_mut().session_leader(on);
-        self
-    }
-
     fn before_exec<F>(&mut self, f: F) -> &mut process::Command
         where F: FnMut() -> io::Result<()> + Send + Sync + 'static
     {
index 4e4abcfbeee4ddb9256451e8f6bdb54e9a0f3d4f..52cf3f97c5c83ba49e12fb37a746710d87840160 100644 (file)
@@ -30,6 +30,39 @@ pub const fn new() -> Mutex {
         Mutex { inner: UnsafeCell::new(libc::PTHREAD_MUTEX_INITIALIZER) }
     }
     #[inline]
+    pub unsafe fn init(&mut self) {
+        // Issue #33770
+        //
+        // A pthread mutex initialized with PTHREAD_MUTEX_INITIALIZER will have
+        // a type of PTHREAD_MUTEX_DEFAULT, which has undefined behavior if you
+        // try to re-lock it from the same thread when you already hold a lock.
+        //
+        // In practice, glibc takes advantage of this undefined behavior to
+        // implement hardware lock elision, which uses hardware transactional
+        // memory to avoid acquiring the lock. While a transaction is in
+        // progress, the lock appears to be unlocked. This isn't a problem for
+        // other threads since the transactional memory will abort if a conflict
+        // is detected, however no abort is generated if re-locking from the
+        // same thread.
+        //
+        // Since locking the same mutex twice will result in two aliasing &mut
+        // references, we instead create the mutex with type
+        // PTHREAD_MUTEX_NORMAL which is guaranteed to deadlock if we try to
+        // re-lock it from the same thread, thus avoiding undefined behavior.
+        //
+        // We can't do anything for StaticMutex, but that type is deprecated
+        // anyways.
+        let mut attr: libc::pthread_mutexattr_t = mem::uninitialized();
+        let r = libc::pthread_mutexattr_init(&mut attr);
+        debug_assert_eq!(r, 0);
+        let r = libc::pthread_mutexattr_settype(&mut attr, libc::PTHREAD_MUTEX_NORMAL);
+        debug_assert_eq!(r, 0);
+        let r = libc::pthread_mutex_init(self.inner.get(), &attr);
+        debug_assert_eq!(r, 0);
+        let r = libc::pthread_mutexattr_destroy(&mut attr);
+        debug_assert_eq!(r, 0);
+    }
+    #[inline]
     pub unsafe fn lock(&self) {
         let r = libc::pthread_mutex_lock(self.inner.get());
         debug_assert_eq!(r, 0);
index d57191675426455a6856527dad9aab8e5e9e4dce..98cfdcdf11041510049283485fac0d5a8a22b337 100644 (file)
@@ -55,7 +55,6 @@ pub struct Command {
     cwd: Option<CString>,
     uid: Option<uid_t>,
     gid: Option<gid_t>,
-    session_leader: bool,
     saw_nul: bool,
     closures: Vec<Box<FnMut() -> io::Result<()> + Send + Sync>>,
     stdin: Option<Stdio>,
@@ -105,7 +104,6 @@ pub fn new(program: &OsStr) -> Command {
             cwd: None,
             uid: None,
             gid: None,
-            session_leader: false,
             saw_nul: saw_nul,
             closures: Vec::new(),
             stdin: None,
@@ -197,9 +195,6 @@ pub fn uid(&mut self, id: uid_t) {
     pub fn gid(&mut self, id: gid_t) {
         self.gid = Some(id);
     }
-    pub fn session_leader(&mut self, session_leader: bool) {
-        self.session_leader = session_leader;
-    }
 
     pub fn before_exec(&mut self,
                        f: Box<FnMut() -> io::Result<()> + Send + Sync>) {
@@ -367,12 +362,6 @@ macro_rules! t {
 
             t!(cvt(libc::setuid(u as uid_t)));
         }
-        if self.session_leader {
-            // Don't check the error of setsid because it fails if we're the
-            // process leader already. We just forked so it shouldn't return
-            // error, but ignore it anyway.
-            let _ = libc::setsid();
-        }
         if let Some(ref cwd) = self.cwd {
             t!(cvt(libc::chdir(cwd.as_ptr())));
         }
index 44bd5d895f2e4373af64e753349c799b193be556..fbd4e1d120817ebc21a0b5e4e08d2f8593ff02d2 100644 (file)
 
 use libc;
 use cell::UnsafeCell;
+use sync::atomic::{AtomicUsize, Ordering};
 
-pub struct RWLock { inner: UnsafeCell<libc::pthread_rwlock_t> }
+pub struct RWLock {
+    inner: UnsafeCell<libc::pthread_rwlock_t>,
+    write_locked: UnsafeCell<bool>,
+    num_readers: AtomicUsize,
+}
 
 unsafe impl Send for RWLock {}
 unsafe impl Sync for RWLock {}
 
 impl RWLock {
     pub const fn new() -> RWLock {
-        RWLock { inner: UnsafeCell::new(libc::PTHREAD_RWLOCK_INITIALIZER) }
+        RWLock {
+            inner: UnsafeCell::new(libc::PTHREAD_RWLOCK_INITIALIZER),
+            write_locked: UnsafeCell::new(false),
+            num_readers: AtomicUsize::new(0),
+        }
     }
     #[inline]
     pub unsafe fn read(&self) {
@@ -35,37 +44,86 @@ pub unsafe fn read(&self) {
         //
         // We roughly maintain the deadlocking behavior by panicking to ensure
         // that this lock acquisition does not succeed.
-        if r == libc::EDEADLK {
+        //
+        // We also check whether there this lock is already write locked. This
+        // is only possible if it was write locked by the current thread and
+        // the implementation allows recursive locking. The POSIX standard
+        // doesn't require recursivly locking a rwlock to deadlock, but we can't
+        // allow that because it could lead to aliasing issues.
+        if r == libc::EDEADLK || *self.write_locked.get() {
+            if r == 0 {
+                self.raw_unlock();
+            }
             panic!("rwlock read lock would result in deadlock");
         } else {
             debug_assert_eq!(r, 0);
+            self.num_readers.fetch_add(1, Ordering::Relaxed);
         }
     }
     #[inline]
     pub unsafe fn try_read(&self) -> bool {
-        libc::pthread_rwlock_tryrdlock(self.inner.get()) == 0
+        let r = libc::pthread_rwlock_tryrdlock(self.inner.get());
+        if r == 0 {
+            if *self.write_locked.get() {
+                self.raw_unlock();
+                false
+            } else {
+                self.num_readers.fetch_add(1, Ordering::Relaxed);
+                true
+            }
+        } else {
+            false
+        }
     }
     #[inline]
     pub unsafe fn write(&self) {
         let r = libc::pthread_rwlock_wrlock(self.inner.get());
-        // see comments above for why we check for EDEADLK
-        if r == libc::EDEADLK {
+        // See comments above for why we check for EDEADLK and write_locked. We
+        // also need to check that num_readers is 0.
+        if r == libc::EDEADLK || *self.write_locked.get() ||
+           self.num_readers.load(Ordering::Relaxed) != 0 {
+            if r == 0 {
+                self.raw_unlock();
+            }
             panic!("rwlock write lock would result in deadlock");
         } else {
             debug_assert_eq!(r, 0);
         }
+        *self.write_locked.get() = true;
     }
     #[inline]
     pub unsafe fn try_write(&self) -> bool {
-        libc::pthread_rwlock_trywrlock(self.inner.get()) == 0
+        let r = libc::pthread_rwlock_trywrlock(self.inner.get());
+        if r == 0 {
+            if *self.write_locked.get() || self.num_readers.load(Ordering::Relaxed) != 0 {
+                self.raw_unlock();
+                false
+            } else {
+                *self.write_locked.get() = true;
+                true
+            }
+        } else {
+            false
+        }
     }
     #[inline]
-    pub unsafe fn read_unlock(&self) {
+    unsafe fn raw_unlock(&self) {
         let r = libc::pthread_rwlock_unlock(self.inner.get());
         debug_assert_eq!(r, 0);
     }
     #[inline]
-    pub unsafe fn write_unlock(&self) { self.read_unlock() }
+    pub unsafe fn read_unlock(&self) {
+        debug_assert!(!*self.write_locked.get());
+        self.num_readers.fetch_sub(1, Ordering::Relaxed);
+        self.raw_unlock();
+    }
+    #[inline]
+    pub unsafe fn write_unlock(&self) {
+        debug_assert_eq!(self.num_readers.load(Ordering::Relaxed), 0);
+        debug_assert!(*self.write_locked.get());
+        *self.write_locked.get() = false;
+        self.raw_unlock();
+    }
     #[inline]
     pub unsafe fn destroy(&self) {
         let r = libc::pthread_rwlock_destroy(self.inner.get());
index b770156582d3bd6ff87ba053d1fd7af9833bc235..8762b34e3da484df7fbf49854f4ac57f754a8a1d 100644 (file)
@@ -64,6 +64,8 @@ pub const fn new() -> Mutex {
             held: UnsafeCell::new(false),
         }
     }
+    #[inline]
+    pub unsafe fn init(&mut self) {}
     pub unsafe fn lock(&self) {
         match kind() {
             Kind::SRWLock => c::AcquireSRWLockExclusive(raw(self)),
index dc26370590cf825297e0e9d5901e79d1191ed144..c8783a60c4117d23ee12211de55de900f111ec1e 100644 (file)
 ////////////////////////////////////////////////////////////////////////////////
 
 #[macro_use] mod local;
-#[macro_use] mod scoped_tls;
 
 #[stable(feature = "rust1", since = "1.0.0")]
 pub use self::local::{LocalKey, LocalKeyState};
 
-#[unstable(feature = "scoped_tls",
-           reason = "scoped TLS has yet to have wide enough use to fully \
-                     consider stabilizing its interface",
-           issue = "27715")]
-#[allow(deprecated)]
-pub use self::scoped_tls::ScopedKey;
-
 #[unstable(feature = "libstd_thread_internals", issue = "0")]
 #[cfg(target_thread_local)]
 #[doc(hidden)] pub use self::local::elf::Key as __ElfLocalKeyInner;
 #[unstable(feature = "libstd_thread_internals", issue = "0")]
 #[doc(hidden)] pub use self::local::os::Key as __OsLocalKeyInner;
-#[unstable(feature = "libstd_thread_internals", issue = "0")]
-#[doc(hidden)] pub use self::scoped_tls::__KeyInner as __ScopedKeyInner;
 
 ////////////////////////////////////////////////////////////////////////////////
 // Builder
diff --git a/src/libstd/thread/scoped_tls.rs b/src/libstd/thread/scoped_tls.rs
deleted file mode 100644 (file)
index dea58d0..0000000
+++ /dev/null
@@ -1,298 +0,0 @@
-// Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Scoped thread-local storage
-//!
-//! This module provides the ability to generate *scoped* thread-local
-//! variables. In this sense, scoped indicates that thread local storage
-//! actually stores a reference to a value, and this reference is only placed
-//! in storage for a scoped amount of time.
-//!
-//! There are no restrictions on what types can be placed into a scoped
-//! variable, but all scoped variables are initialized to the equivalent of
-//! null. Scoped thread local storage is useful when a value is present for a known
-//! period of time and it is not required to relinquish ownership of the
-//! contents.
-//!
-//! # Examples
-//!
-//! ```
-//! #![feature(scoped_tls)]
-//!
-//! scoped_thread_local!(static FOO: u32);
-//!
-//! // Initially each scoped slot is empty.
-//! assert!(!FOO.is_set());
-//!
-//! // When inserting a value, the value is only in place for the duration
-//! // of the closure specified.
-//! FOO.set(&1, || {
-//!     FOO.with(|slot| {
-//!         assert_eq!(*slot, 1);
-//!     });
-//! });
-//! ```
-
-#![unstable(feature = "thread_local_internals", issue = "0")]
-#![allow(deprecated)]
-
-#[doc(hidden)]
-pub use self::imp::KeyInner as __KeyInner;
-
-/// Type representing a thread local storage key corresponding to a reference
-/// to the type parameter `T`.
-///
-/// Keys are statically allocated and can contain a reference to an instance of
-/// type `T` scoped to a particular lifetime. Keys provides two methods, `set`
-/// and `with`, both of which currently use closures to control the scope of
-/// their contents.
-#[unstable(feature = "scoped_tls",
-           reason = "scoped TLS has yet to have wide enough use to fully consider \
-                     stabilizing its interface",
-           issue = "27715")]
-#[rustc_deprecated(since = "1.8.0",
-                   reason = "hasn't proven itself over LocalKey")]
-pub struct ScopedKey<T:'static> { inner: fn() -> &'static imp::KeyInner<T> }
-
-/// Declare a new scoped thread local storage key.
-///
-/// This macro declares a `static` item on which methods are used to get and
-/// set the value stored within.
-///
-/// See [ScopedKey documentation](thread/struct.ScopedKey.html) for more
-/// information.
-#[unstable(feature = "thread_local_internals",
-           reason = "should not be necessary",
-           issue = "0")]
-#[rustc_deprecated(since = "1.8.0",
-                   reason = "hasn't proven itself over LocalKey")]
-#[macro_export]
-#[allow_internal_unstable]
-macro_rules! scoped_thread_local {
-    (static $name:ident: $t:ty) => (
-        static $name: $crate::thread::ScopedKey<$t> =
-            __scoped_thread_local_inner!($t);
-    );
-    (pub static $name:ident: $t:ty) => (
-        pub static $name: $crate::thread::ScopedKey<$t> =
-            __scoped_thread_local_inner!($t);
-    );
-}
-
-#[doc(hidden)]
-#[unstable(feature = "thread_local_internals",
-           reason = "should not be necessary",
-           issue = "0")]
-#[rustc_deprecated(since = "1.8.0",
-                   reason = "hasn't proven itself over LocalKey")]
-#[macro_export]
-#[allow_internal_unstable]
-macro_rules! __scoped_thread_local_inner {
-    ($t:ty) => {{
-        #[cfg_attr(target_thread_local, thread_local)]
-        static _KEY: $crate::thread::__ScopedKeyInner<$t> =
-            $crate::thread::__ScopedKeyInner::new();
-        fn _getit() -> &'static $crate::thread::__ScopedKeyInner<$t> { &_KEY }
-        $crate::thread::ScopedKey::new(_getit)
-    }}
-}
-
-#[unstable(feature = "scoped_tls",
-           reason = "scoped TLS has yet to have wide enough use to fully consider \
-                     stabilizing its interface",
-           issue = "27715")]
-#[rustc_deprecated(since = "1.8.0",
-                   reason = "hasn't proven itself over LocalKey")]
-impl<T> ScopedKey<T> {
-    #[doc(hidden)]
-    pub const fn new(inner: fn() -> &'static imp::KeyInner<T>) -> ScopedKey<T> {
-        ScopedKey { inner: inner }
-    }
-
-    /// Inserts a value into this scoped thread local storage slot for a
-    /// duration of a closure.
-    ///
-    /// While `cb` is running, the value `t` will be returned by `get` unless
-    /// this function is called recursively inside of `cb`.
-    ///
-    /// Upon return, this function will restore the previous value, if any
-    /// was available.
-    ///
-    /// # Examples
-    ///
-    /// ```
-    /// #![feature(scoped_tls)]
-    ///
-    /// scoped_thread_local!(static FOO: u32);
-    ///
-    /// FOO.set(&100, || {
-    ///     let val = FOO.with(|v| *v);
-    ///     assert_eq!(val, 100);
-    ///
-    ///     // set can be called recursively
-    ///     FOO.set(&101, || {
-    ///         // ...
-    ///     });
-    ///
-    ///     // Recursive calls restore the previous value.
-    ///     let val = FOO.with(|v| *v);
-    ///     assert_eq!(val, 100);
-    /// });
-    /// ```
-    pub fn set<R, F>(&'static self, t: &T, cb: F) -> R where
-        F: FnOnce() -> R,
-    {
-        struct Reset<'a, T: 'a> {
-            key: &'a imp::KeyInner<T>,
-            val: *mut T,
-        }
-                impl<'a, T> Drop for Reset<'a, T> {
-            fn drop(&mut self) {
-                unsafe { self.key.set(self.val) }
-            }
-        }
-
-        let inner = (self.inner)();
-        let prev = unsafe {
-            let prev = inner.get();
-            inner.set(t as *const T as *mut T);
-            prev
-        };
-
-        let _reset = Reset { key: inner, val: prev };
-        cb()
-    }
-
-    /// Gets a value out of this scoped variable.
-    ///
-    /// This function takes a closure which receives the value of this
-    /// variable.
-    ///
-    /// # Panics
-    ///
-    /// This function will panic if `set` has not previously been called.
-    ///
-    /// # Examples
-    ///
-    /// ```no_run
-    /// #![feature(scoped_tls)]
-    ///
-    /// scoped_thread_local!(static FOO: u32);
-    ///
-    /// FOO.with(|slot| {
-    ///     // work with `slot`
-    /// });
-    /// ```
-    pub fn with<R, F>(&'static self, cb: F) -> R where
-        F: FnOnce(&T) -> R
-    {
-        unsafe {
-            let ptr = (self.inner)().get();
-            assert!(!ptr.is_null(), "cannot access a scoped thread local \
-                                     variable without calling `set` first");
-            cb(&*ptr)
-        }
-    }
-
-    /// Test whether this TLS key has been `set` for the current thread.
-    pub fn is_set(&'static self) -> bool {
-        unsafe { !(self.inner)().get().is_null() }
-    }
-}
-
-#[cfg(target_thread_local)]
-#[doc(hidden)]
-mod imp {
-    use cell::Cell;
-    use ptr;
-
-    pub struct KeyInner<T> { inner: Cell<*mut T> }
-
-    unsafe impl<T> ::marker::Sync for KeyInner<T> { }
-
-    impl<T> KeyInner<T> {
-        pub const fn new() -> KeyInner<T> {
-            KeyInner { inner: Cell::new(ptr::null_mut()) }
-        }
-        pub unsafe fn set(&self, ptr: *mut T) { self.inner.set(ptr); }
-        pub unsafe fn get(&self) -> *mut T { self.inner.get() }
-    }
-}
-
-#[cfg(not(target_thread_local))]
-#[doc(hidden)]
-mod imp {
-    use cell::Cell;
-    use marker;
-    use sys_common::thread_local::StaticKey as OsStaticKey;
-
-    pub struct KeyInner<T> {
-        pub inner: OsStaticKey,
-        pub marker: marker::PhantomData<Cell<T>>,
-    }
-
-    unsafe impl<T> marker::Sync for KeyInner<T> { }
-
-    impl<T> KeyInner<T> {
-        pub const fn new() -> KeyInner<T> {
-            KeyInner {
-                inner: OsStaticKey::new(None),
-                marker: marker::PhantomData
-            }
-        }
-        pub unsafe fn set(&self, ptr: *mut T) { self.inner.set(ptr as *mut _) }
-        pub unsafe fn get(&self) -> *mut T { self.inner.get() as *mut _ }
-    }
-}
-
-
-#[cfg(test)]
-mod tests {
-    use cell::Cell;
-
-    scoped_thread_local!(static FOO: u32);
-
-    #[test]
-    fn smoke() {
-        scoped_thread_local!(static BAR: u32);
-
-        assert!(!BAR.is_set());
-        BAR.set(&1, || {
-            assert!(BAR.is_set());
-            BAR.with(|slot| {
-                assert_eq!(*slot, 1);
-            });
-        });
-        assert!(!BAR.is_set());
-    }
-
-    #[test]
-    fn cell_allowed() {
-        scoped_thread_local!(static BAR: Cell<u32>);
-
-        BAR.set(&Cell::new(1), || {
-            BAR.with(|slot| {
-                assert_eq!(slot.get(), 1);
-            });
-        });
-    }
-
-    #[test]
-    fn scope_item_allowed() {
-        assert!(!FOO.is_set());
-        FOO.set(&1, || {
-            assert!(FOO.is_set());
-            FOO.with(|slot| {
-                assert_eq!(*slot, 1);
-            });
-        });
-        assert!(!FOO.is_set());
-    }
-}
index 80963a9b735a5e32294d0753d953d8247f27d0e8..0e1508a1c4c28d130baf1813096a7c5ac2960cc8 100644 (file)
@@ -143,13 +143,6 @@ pub fn duration_since(&self, earlier: Instant) -> Duration {
         self.0.sub_instant(&earlier.0)
     }
 
-    /// Deprecated, renamed to `duration_since`
-    #[unstable(feature = "time2_old", issue = "29866")]
-    #[rustc_deprecated(since = "1.8.0", reason = "renamed to duration_since")]
-    pub fn duration_from_earlier(&self, earlier: Instant) -> Duration {
-        self.0.sub_instant(&earlier.0)
-    }
-
     /// Returns the amount of time elapsed since this instant was created.
     ///
     /// # Panics
@@ -235,14 +228,6 @@ pub fn duration_since(&self, earlier: SystemTime)
         self.0.sub_time(&earlier.0).map_err(SystemTimeError)
     }
 
-    /// Deprecated, renamed to `duration_since`
-    #[unstable(feature = "time2_old", issue = "29866")]
-    #[rustc_deprecated(since = "1.8.0", reason = "renamed to duration_since")]
-    pub fn duration_from_earlier(&self, earlier: SystemTime)
-                                 -> Result<Duration, SystemTimeError> {
-        self.0.sub_time(&earlier.0).map_err(SystemTimeError)
-    }
-
     /// Returns the amount of time elapsed since this system time was created.
     ///
     /// This function may fail as the underlying system clock is susceptible to
index c3202dbdbb49e149d589a247ec415210b15326b7..d3f5a573218fdd8f11fa4398e4263f52103f254f 100644 (file)
@@ -25,7 +25,6 @@
 use fold;
 use fold::*;
 use util::move_map::MoveMap;
-use parse;
 use parse::token::{fresh_mark, fresh_name, intern, keywords};
 use ptr::P;
 use util::small_vector::SmallVector;
@@ -1212,24 +1211,6 @@ fn mark_tts(tts: &[TokenTree], m: Mrk) -> Vec<TokenTree> {
     noop_fold_tts(tts, &mut Marker{mark:m, expn_id: None})
 }
 
-/// Check that there are no macro invocations left in the AST:
-pub fn check_for_macros(sess: &parse::ParseSess, krate: &ast::Crate) {
-    visit::walk_crate(&mut MacroExterminator{sess:sess}, krate);
-}
-
-/// A visitor that ensures that no macro invocations remain in an AST.
-struct MacroExterminator<'a>{
-    sess: &'a parse::ParseSess
-}
-
-impl<'a, 'v> Visitor<'v> for MacroExterminator<'a> {
-    fn visit_mac(&mut self, mac: &ast::Mac) {
-        self.sess.span_diagnostic.span_bug(mac.span,
-                                           "macro exterminator: expected AST \
-                                           with no macro invocations");
-    }
-}
-
 
 #[cfg(test)]
 mod tests {
index ee9a197ce56ccb84cc39167d1c2be5974543d5ff..871b0d4b1c023da205c3eb404c152ad87f7ddf57 100644 (file)
@@ -422,7 +422,7 @@ pub fn expand_quote_expr<'cx>(cx: &'cx mut ExtCtxt,
     base::MacEager::expr(expanded)
 }
 
-pub fn expand_quote_item<'cx>(cx: &mut ExtCtxt,
+pub fn expand_quote_item<'cx>(cx: &'cx mut ExtCtxt,
                               sp: Span,
                               tts: &[TokenTree])
                               -> Box<base::MacResult+'cx> {
index 3d5f32eadb3c4dec8d28d950a62e1ea7f194258f..09c23682cd73f79afb8f46a045c6ae42314eaf34 100644 (file)
@@ -17,7 +17,7 @@
 use syntax::parse::token::str_to_ident;
 use syntax::ptr::P;
 
-pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree])
+pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[TokenTree])
                               -> Box<base::MacResult+'cx> {
     if !cx.ecfg.enable_concat_idents() {
         feature_gate::emit_feature_err(&cx.parse_sess.span_diagnostic,
index 4638c60dedfa581fd5fa7c6420d8f32274c9ca0b..a3736a0a1907cbc8bf619708738815a5fd789c80 160000 (submodule)
@@ -1 +1 @@
-Subproject commit 4638c60dedfa581fd5fa7c6420d8f32274c9ca0b
+Subproject commit a3736a0a1907cbc8bf619708738815a5fd789c80
diff --git a/src/test/compile-fail/E0162.rs b/src/test/compile-fail/E0162.rs
new file mode 100644 (file)
index 0000000..e13b0af
--- /dev/null
@@ -0,0 +1,18 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+struct Irrefutable(i32);
+
+fn main() {
+    let irr = Irrefutable(0);
+    if let Irrefutable(x) = irr { //~ ERROR E0162
+        println!("{}", x);
+    }
+}
diff --git a/src/test/compile-fail/E0163.rs b/src/test/compile-fail/E0163.rs
new file mode 100644 (file)
index 0000000..5cb6f4d
--- /dev/null
@@ -0,0 +1,20 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+enum Foo { B(u32) }
+
+fn bar(foo: Foo) -> u32 {
+    match foo {
+        Foo::B { i } => i, //~ ERROR E0163
+    }
+}
+
+fn main() {
+}
diff --git a/src/test/compile-fail/E0164.rs b/src/test/compile-fail/E0164.rs
new file mode 100644 (file)
index 0000000..491b2e9
--- /dev/null
@@ -0,0 +1,20 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+enum Foo { B { i: u32 } }
+
+fn bar(foo: Foo) -> u32 {
+    match foo {
+        Foo::B(i) => i, //~ ERROR E0164
+    }
+}
+
+fn main() {
+}
diff --git a/src/test/compile-fail/E0165.rs b/src/test/compile-fail/E0165.rs
new file mode 100644 (file)
index 0000000..cca714b
--- /dev/null
@@ -0,0 +1,18 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+struct Irrefutable(i32);
+
+fn main() {
+    let irr = Irrefutable(0);
+    while let Irrefutable(x) = irr { //~ ERROR E0165
+        // ...
+    }
+}
diff --git a/src/test/compile-fail/E0166.rs b/src/test/compile-fail/E0166.rs
new file mode 100644 (file)
index 0000000..9fa4124
--- /dev/null
@@ -0,0 +1,14 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn foo() -> ! { return; } //~ ERROR E0166
+
+fn main() {
+}
diff --git a/src/test/compile-fail/E0172.rs b/src/test/compile-fail/E0172.rs
new file mode 100644 (file)
index 0000000..7011bf0
--- /dev/null
@@ -0,0 +1,14 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn foo(bar: i32+std::fmt::Display) {} //~ ERROR E0172
+
+fn main() {
+}
diff --git a/src/test/compile-fail/E0178.rs b/src/test/compile-fail/E0178.rs
new file mode 100644 (file)
index 0000000..f34f383
--- /dev/null
@@ -0,0 +1,21 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+trait Foo {}
+
+struct Bar<'a> {
+    w: &'a Foo + Copy, //~ ERROR E0178
+    x: &'a Foo + 'a, //~ ERROR E0178
+    y: &'a mut Foo + 'a, //~ ERROR E0178
+    z: fn() -> Foo + 'a, //~ ERROR E0178
+}
+
+fn main() {
+}
diff --git a/src/test/compile-fail/E0184.rs b/src/test/compile-fail/E0184.rs
new file mode 100644 (file)
index 0000000..5d72d00
--- /dev/null
@@ -0,0 +1,20 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#[derive(Copy)] //~ ERROR E0184
+struct Foo;
+
+impl Drop for Foo {
+    fn drop(&mut self) {
+    }
+}
+
+fn main() {
+}
diff --git a/src/test/compile-fail/E0185.rs b/src/test/compile-fail/E0185.rs
new file mode 100644 (file)
index 0000000..0e33687
--- /dev/null
@@ -0,0 +1,22 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+trait Foo {
+    fn foo();
+}
+
+struct Bar;
+
+impl Foo for Bar {
+    fn foo(&self) {} //~ ERROR E0185
+}
+
+fn main() {
+}
diff --git a/src/test/compile-fail/E0186.rs b/src/test/compile-fail/E0186.rs
new file mode 100644 (file)
index 0000000..aa0a38b
--- /dev/null
@@ -0,0 +1,22 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+trait Foo {
+    fn foo(&self);
+}
+
+struct Bar;
+
+impl Foo for Bar {
+    fn foo() {} //~ ERROR E0186
+}
+
+fn main() {
+}
diff --git a/src/test/compile-fail/E0191.rs b/src/test/compile-fail/E0191.rs
new file mode 100644 (file)
index 0000000..489ebb0
--- /dev/null
@@ -0,0 +1,18 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+trait Trait {
+    type Bar;
+}
+
+type Foo = Trait; //~ ERROR E0191
+
+fn main() {
+}
diff --git a/src/test/compile-fail/E0192.rs b/src/test/compile-fail/E0192.rs
new file mode 100644 (file)
index 0000000..92f5876
--- /dev/null
@@ -0,0 +1,22 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(optin_builtin_traits)]
+
+trait Trait {
+    type Bar;
+}
+
+struct Foo;
+
+impl !Trait for Foo { } //~ ERROR E0192
+
+fn main() {
+}
diff --git a/src/test/compile-fail/E0194.rs b/src/test/compile-fail/E0194.rs
new file mode 100644 (file)
index 0000000..96b3062
--- /dev/null
@@ -0,0 +1,17 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+trait Foo<T> {
+    fn do_something(&self) -> T;
+    fn do_something_else<T: Clone>(&self, bar: T); //~ ERROR E0194
+}
+
+fn main() {
+}
diff --git a/src/test/compile-fail/E0195.rs b/src/test/compile-fail/E0195.rs
new file mode 100644 (file)
index 0000000..0630dfe
--- /dev/null
@@ -0,0 +1,23 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+trait Trait {
+    fn bar<'a,'b:'a>(x: &'a str, y: &'b str);
+}
+
+struct Foo;
+
+impl Trait for Foo {
+    fn bar<'a,'b>(x: &'a str, y: &'b str) { //~ ERROR E0195
+    }
+}
+
+fn main() {
+}
diff --git a/src/test/compile-fail/E0197.rs b/src/test/compile-fail/E0197.rs
new file mode 100644 (file)
index 0000000..f25fa9b
--- /dev/null
@@ -0,0 +1,16 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+struct Foo;
+
+unsafe impl Foo { } //~ ERROR E0197
+
+fn main() {
+}
diff --git a/src/test/compile-fail/E0199.rs b/src/test/compile-fail/E0199.rs
new file mode 100644 (file)
index 0000000..8bd3ffd
--- /dev/null
@@ -0,0 +1,18 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(optin_builtin_traits)]
+
+struct Foo;
+
+unsafe impl !Clone for Foo { } //~ ERROR E0199
+
+fn main() {
+}
diff --git a/src/test/compile-fail/E0200.rs b/src/test/compile-fail/E0200.rs
new file mode 100644 (file)
index 0000000..6bfea0e
--- /dev/null
@@ -0,0 +1,18 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+struct Foo;
+
+unsafe trait Bar { }
+
+impl Bar for Foo { } //~ ERROR E0200
+
+fn main() {
+}
diff --git a/src/test/compile-fail/associated-types/cache/chrono-scan.rs b/src/test/compile-fail/associated-types/cache/chrono-scan.rs
new file mode 100644 (file)
index 0000000..a753527
--- /dev/null
@@ -0,0 +1,39 @@
+// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(rustc_attrs)]
+#![allow(warnings)]
+
+pub type ParseResult<T> = Result<T, ()>;
+
+pub enum Item<'a> {     Literal(&'a str),
+ }
+
+pub fn colon_or_space(s: &str) -> ParseResult<&str> {
+    unimplemented!()
+}
+
+pub fn timezone_offset_zulu<F>(s: &str, colon: F) -> ParseResult<(&str, i32)>
+        where F: FnMut(&str) -> ParseResult<&str> {
+    unimplemented!()
+}
+
+pub fn parse<'a, I>(mut s: &str, items: I) -> ParseResult<()>
+        where I: Iterator<Item=Item<'a>> {
+    macro_rules! try_consume {
+        ($e:expr) => ({ let (s_, v) = try!($e); s = s_; v })
+    }
+    let offset = try_consume!(timezone_offset_zulu(s.trim_left(), colon_or_space));
+    let offset = try_consume!(timezone_offset_zulu(s.trim_left(), colon_or_space));
+    Ok(())
+}
+
+#[rustc_error]
+fn main() { } //~ ERROR compilation successful
diff --git a/src/test/compile-fail/associated-types/cache/elision.rs b/src/test/compile-fail/associated-types/cache/elision.rs
new file mode 100644 (file)
index 0000000..d111732
--- /dev/null
@@ -0,0 +1,34 @@
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(rustc_attrs)]
+#![allow(warnings)]
+
+// Check that you are allowed to implement using elision but write
+// trait without elision (a bug in this cropped up during
+// bootstrapping, so this is a regression test).
+
+pub struct SplitWhitespace<'a> {
+    x: &'a u8
+}
+
+pub trait UnicodeStr {
+    fn split_whitespace<'a>(&'a self) -> SplitWhitespace<'a>;
+}
+
+impl UnicodeStr for str {
+    #[inline]
+    fn split_whitespace(&self) -> SplitWhitespace {
+        unimplemented!()
+    }
+}
+
+#[rustc_error]
+fn main() { } //~ ERROR compilation successful
diff --git a/src/test/compile-fail/associated-types/cache/project-fn-ret-contravariant.rs b/src/test/compile-fail/associated-types/cache/project-fn-ret-contravariant.rs
new file mode 100644 (file)
index 0000000..c5557ce
--- /dev/null
@@ -0,0 +1,65 @@
+// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(unboxed_closures)]
+#![feature(rustc_attrs)]
+
+// Test for projection cache. We should be able to project distinct
+// lifetimes from `foo` as we reinstantiate it multiple times, but not
+// if we do it just once. In this variant, the region `'a` is used in
+// an contravariant position, which affects the results.
+
+// revisions: ok oneuse transmute krisskross
+
+#![allow(dead_code, unused_variables)]
+
+fn foo<'a>() -> &'a u32 { loop { } }
+
+fn bar<T>(t: T, x: T::Output) -> T::Output
+    where T: FnOnce<()>
+{
+    t()
+}
+
+#[cfg(ok)] // two instantiations: OK
+fn baz<'a,'b>(x: &'a u32, y: &'b u32) -> (&'a u32, &'b u32) {
+    let a = bar(foo, x);
+    let b = bar(foo, y);
+    (a, b)
+}
+
+#[cfg(oneuse)] // one instantiation: OK (surprisingly)
+fn baz<'a,'b>(x: &'a u32, y: &'b u32) -> (&'a u32, &'b u32) {
+    let f /* : fn() -> &'static u32 */ = foo; // <-- inferred type annotated
+    let a = bar(f, x); // this is considered ok because fn args are contravariant...
+    let b = bar(f, y); // ...and hence we infer T to distinct values in each call.
+    (a, b)
+}
+
+// FIXME(#32330)
+//#[cfg(transmute)] // one instantiations: BAD
+//fn baz<'a,'b>(x: &'a u32) -> &'static u32 {
+//    bar(foo, x) //[transmute] ERROR E0495
+//}
+
+// FIXME(#32330)
+//#[cfg(krisskross)] // two instantiations, mixing and matching: BAD
+//fn transmute<'a,'b>(x: &'a u32, y: &'b u32) -> (&'a u32, &'b u32) {
+//    let a = bar(foo, y); //[krisskross] ERROR E0495
+//    let b = bar(foo, x); //[krisskross] ERROR E0495
+//    (a, b)
+//}
+
+#[rustc_error]
+fn main() { }
+//[ok]~^ ERROR compilation successful
+//[oneuse]~^^ ERROR compilation successful
+//[transmute]~^^^ ERROR compilation successful
+//[krisskross]~^^^^ ERROR compilation successful
diff --git a/src/test/compile-fail/associated-types/cache/project-fn-ret-invariant.rs b/src/test/compile-fail/associated-types/cache/project-fn-ret-invariant.rs
new file mode 100644 (file)
index 0000000..a15422e
--- /dev/null
@@ -0,0 +1,76 @@
+// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(unboxed_closures)]
+#![feature(rustc_attrs)]
+
+// Test for projection cache. We should be able to project distinct
+// lifetimes from `foo` as we reinstantiate it multiple times, but not
+// if we do it just once. In this variant, the region `'a` is used in
+// an invariant position, which affects the results.
+
+// revisions: ok oneuse transmute krisskross
+
+#![allow(dead_code, unused_variables)]
+
+use std::marker::PhantomData;
+
+struct Type<'a> {
+    // Invariant
+    data: PhantomData<fn(&'a u32) -> &'a u32>
+}
+
+fn foo<'a>() -> Type<'a> { loop { } }
+
+fn bar<T>(t: T, x: T::Output) -> T::Output
+    where T: FnOnce<()>
+{
+    t()
+}
+
+#[cfg(ok)] // two instantiations: OK
+fn baz<'a,'b>(x: Type<'a>, y: Type<'b>) -> (Type<'a>, Type<'b>) {
+    let a = bar(foo, x);
+    let b = bar(foo, y);
+    (a, b)
+}
+
+// FIXME(#32330)
+//#[cfg(oneuse)] // one instantiation: BAD
+//fn baz<'a,'b>(x: Type<'a>, y: Type<'b>) -> (Type<'a>, Type<'b>) {
+//    let f = foo; // <-- No consistent type can be inferred for `f` here.
+//    let a = bar(f, x); //[oneuse] ERROR E0495
+//    let b = bar(f, y);
+//    (a, b)
+//}
+
+// FIXME(#32330)
+//#[cfg(transmute)] // one instantiations: BAD
+//fn baz<'a,'b>(x: Type<'a>) -> Type<'static> {
+//    // Cannot instantiate `foo` with any lifetime other than `'a`,
+//    // since it is provided as input.
+//
+//    bar(foo, x) //[transmute] ERROR E0495
+//}
+
+// FIXME(#32330)
+//#[cfg(krisskross)] // two instantiations, mixing and matching: BAD
+//fn transmute<'a,'b>(x: Type<'a>, y: Type<'b>) -> (Type<'a>, Type<'b>) {
+//    let a = bar(foo, y); //[krisskross] ERROR E0495
+//    let b = bar(foo, x); //[krisskross] ERROR E0495
+//    (a, b)
+//}
+
+#[rustc_error]
+fn main() { }
+//[ok]~^ ERROR compilation successful
+//[oneuse]~^^ ERROR compilation successful
+//[transmute]~^^^ ERROR compilation successful
+//[krisskross]~^^^^ ERROR compilation successful
diff --git a/src/test/compile-fail/associated-types/cache/wasm-issue-32330.rs b/src/test/compile-fail/associated-types/cache/wasm-issue-32330.rs
new file mode 100644 (file)
index 0000000..01db477
--- /dev/null
@@ -0,0 +1,49 @@
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// This test was derived from the wasm and parsell crates.  They
+// stopped compiling when #32330 is fixed.
+
+#![allow(dead_code, unused_variables)]
+#![deny(hr_lifetime_in_assoc_type)]
+#![feature(unboxed_closures)]
+
+use std::str::Chars;
+
+pub trait HasOutput<Ch, Str> {
+    type Output;
+}
+
+#[derive(Clone, PartialEq, Eq, Hash, Ord, PartialOrd, Debug)]
+pub enum Token<'a> {
+    Begin(&'a str)
+}
+
+fn mk_unexpected_char_err<'a>() -> Option<&'a i32> {
+    unimplemented!()
+}
+
+fn foo<'a>(data: &mut Chars<'a>) {
+    bar(mk_unexpected_char_err)
+    //~^ ERROR lifetime parameter `'a` declared on fn `mk_unexpected_char_err`
+    //~| WARNING hard error in a future release
+}
+
+fn bar<F>(t: F)
+    // No type can satisfy this requirement, since `'a` does not
+    // appear in any of the input types:
+    where F: for<'a> Fn() -> Option<&'a i32>
+    //~^ ERROR associated type `Output` references lifetime `'a`, which does not
+    //~| WARNING hard error in a future release
+{
+}
+
+fn main() {
+}
diff --git a/src/test/compile-fail/hr-subtype.rs b/src/test/compile-fail/hr-subtype.rs
new file mode 100644 (file)
index 0000000..95e469e
--- /dev/null
@@ -0,0 +1,119 @@
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Targeted tests for the higher-ranked subtyping code.
+
+#![feature(rustc_attrs)]
+#![allow(dead_code)]
+
+// revisions: bound_a_vs_bound_a
+// revisions: bound_a_vs_bound_b
+// revisions: bound_inv_a_vs_bound_inv_b
+// revisions: bound_co_a_vs_bound_co_b
+// revisions: bound_a_vs_free_x
+// revisions: free_x_vs_free_x
+// revisions: free_x_vs_free_y
+// revisions: free_inv_x_vs_free_inv_y
+// revisions: bound_a_b_vs_bound_a
+// revisions: bound_co_a_b_vs_bound_co_a
+// revisions: bound_contra_a_contra_b_ret_co_a
+// revisions: bound_co_a_co_b_ret_contra_a
+// revisions: bound_inv_a_b_vs_bound_inv_a
+// revisions: bound_a_b_ret_a_vs_bound_a_ret_a
+
+fn gimme<T>(_: Option<T>) { }
+
+struct Inv<'a> { x: *mut &'a u32 }
+
+struct Co<'a> { x: fn(&'a u32) }
+
+struct Contra<'a> { x: &'a u32 }
+
+macro_rules! check {
+    ($rev:ident: ($t1:ty, $t2:ty)) => {
+        #[cfg($rev)]
+        fn subtype<'x,'y:'x,'z:'y>() {
+            gimme::<$t2>(None::<$t1>);
+            //[free_inv_x_vs_free_inv_y]~^ ERROR mismatched types
+        }
+
+        #[cfg($rev)]
+        fn supertype<'x,'y:'x,'z:'y>() {
+            gimme::<$t1>(None::<$t2>);
+            //[bound_a_vs_free_x]~^ ERROR mismatched types
+            //[free_x_vs_free_y]~^^ ERROR mismatched types
+            //[bound_inv_a_b_vs_bound_inv_a]~^^^ ERROR mismatched types
+            //[bound_a_b_ret_a_vs_bound_a_ret_a]~^^^^ ERROR mismatched types
+            //[free_inv_x_vs_free_inv_y]~^^^^^ ERROR mismatched types
+            //[bound_a_b_vs_bound_a]~^^^^^^ ERROR mismatched types
+            //[bound_co_a_b_vs_bound_co_a]~^^^^^^^ ERROR mismatched types
+            //[bound_contra_a_contra_b_ret_co_a]~^^^^^^^^ ERROR mismatched types
+            //[bound_co_a_co_b_ret_contra_a]~^^^^^^^^^ ERROR mismatched types
+        }
+    }
+}
+
+// If both have bound regions, they are equivalent, regardless of
+// variant.
+check! { bound_a_vs_bound_a: (for<'a> fn(&'a u32),
+                              for<'a> fn(&'a u32)) }
+check! { bound_a_vs_bound_b: (for<'a> fn(&'a u32),
+                              for<'b> fn(&'b u32)) }
+check! { bound_inv_a_vs_bound_inv_b: (for<'a> fn(Inv<'a>),
+                                      for<'b> fn(Inv<'b>)) }
+check! { bound_co_a_vs_bound_co_b: (for<'a> fn(Co<'a>),
+                                    for<'b> fn(Co<'b>)) }
+
+// Bound is a subtype of free.
+check! { bound_a_vs_free_x: (for<'a> fn(&'a u32),
+                             fn(&'x u32)) }
+
+// Two free regions are relatable if subtyping holds.
+check! { free_x_vs_free_x: (fn(&'x u32),
+                            fn(&'x u32)) }
+check! { free_x_vs_free_y: (fn(&'x u32),
+                            fn(&'y u32)) }
+check! { free_inv_x_vs_free_inv_y: (fn(Inv<'x>),
+                                    fn(Inv<'y>)) }
+
+// Somewhat surprisingly, a fn taking two distinct bound lifetimes and
+// a fn taking one bound lifetime can be interchangable, but only if
+// we are co- or contra-variant with respect to both lifetimes.
+//
+// The reason is:
+// - if we are covariant, then 'a and 'b can be set to the call-site
+//   intersection;
+// - if we are contravariant, then 'a can be inferred to 'static.
+//
+// FIXME(#32330) this is true, but we are not currently impl'ing this
+// full semantics
+check! { bound_a_b_vs_bound_a: (for<'a,'b> fn(&'a u32, &'b u32),
+                                for<'a>    fn(&'a u32, &'a u32)) }
+check! { bound_co_a_b_vs_bound_co_a: (for<'a,'b> fn(Co<'a>, Co<'b>),
+                                      for<'a>    fn(Co<'a>, Co<'a>)) }
+check! { bound_contra_a_contra_b_ret_co_a: (for<'a,'b> fn(Contra<'a>, Contra<'b>) -> Co<'a>,
+                                            for<'a>    fn(Contra<'a>, Contra<'a>) -> Co<'a>) }
+check! { bound_co_a_co_b_ret_contra_a: (for<'a,'b> fn(Co<'a>, Co<'b>) -> Contra<'a>,
+                                        for<'a>    fn(Co<'a>, Co<'a>) -> Contra<'a>) }
+
+// If we make those lifetimes invariant, then the two types are not interchangable.
+check! { bound_inv_a_b_vs_bound_inv_a: (for<'a,'b> fn(Inv<'a>, Inv<'b>),
+                                        for<'a>    fn(Inv<'a>, Inv<'a>)) }
+check! { bound_a_b_ret_a_vs_bound_a_ret_a: (for<'a,'b> fn(&'a u32, &'b u32) -> &'a u32,
+                                            for<'a>    fn(&'a u32, &'a u32) -> &'a u32) }
+
+#[rustc_error]
+fn main() {
+//[bound_a_vs_bound_a]~^ ERROR compilation successful
+//[bound_a_vs_bound_b]~^^ ERROR compilation successful
+//[bound_inv_a_vs_bound_inv_b]~^^^ ERROR compilation successful
+//[bound_co_a_vs_bound_co_b]~^^^^ ERROR compilation successful
+//[free_x_vs_free_x]~^^^^^ ERROR compilation successful
+}
index 46b7b933d8796fd38ababd396a2a3a833f30827f..ceac7e968f65c1bceaa43574b75712d3ccd8e7ec 100644 (file)
@@ -28,7 +28,7 @@ fn new(buf: &'a mut [u8]) -> Foo<'a> {
 
 impl<'a> NoLifetime for Foo<'a> {
     fn get<'p, T : Test<'a>>(&self) -> T {
-//~^ ERROR lifetime parameters or bounds on method `get` do not match the trait declaration
+//~^ ERROR E0195
         return *self as T;
     }
 }
index da6d081a7acb57e654e2fdadd9862246db842b2e..c6ce0c4c95b8bcbf97a3e0024e391ac4b9e34ccf 100644 (file)
@@ -8,12 +8,15 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-use std::raw::Slice;
+struct Slice<T> {
+    data: *const T,
+    len: usize,
+}
 
 fn main() {
     let Slice { data: data, len: len } = "foo";
     //~^ ERROR mismatched types
     //~| expected type `&str`
-    //~| found type `std::raw::Slice<_>`
-    //~| expected &-ptr, found struct `std::raw::Slice`
+    //~| found type `Slice<_>`
+    //~| expected &-ptr, found struct `Slice`
 }
index df272a71cee4f70af8daf73c6cb9475e79c76cd6..3060bbea43c3bf8ea0b2520ef1e8ba60136382ba 100644 (file)
@@ -8,15 +8,18 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-use std::raw::Slice;
+struct Slice<T> {
+    data: *const T,
+    len: usize,
+}
 
 fn main() {
     match () {
         Slice { data: data, len: len } => (),
         //~^ ERROR mismatched types
         //~| expected type `()`
-        //~| found type `std::raw::Slice<_>`
-        //~| expected (), found struct `std::raw::Slice`
+        //~| found type `Slice<_>`
+        //~| expected (), found struct `Slice`
         _ => unreachable!()
     }
 }
index 3f96a9c342283d0678db124b5d7bfe0e4ca200bc..dac1625159748d81fcb72e92062ee1b505b9107f 100644 (file)
@@ -39,7 +39,6 @@ fn subscribe(&mut self, t : Box<Subscriber<Input=<Self as Publisher>::Output> +
         // Not obvious, but there is an implicit lifetime here -------^
         //~^^ ERROR cannot infer
         //~|  ERROR cannot infer
-        //~|  ERROR cannot infer
         //
         // The fact that `Publisher` is using an implicit lifetime is
         // what was causing the debruijn accounting to be off, so
index 5e816bcfa61e44493aa733db8782595ea55a13c6..d7ec1ed67397fe939d1585dc32b60f535a8958f2 100644 (file)
@@ -88,6 +88,6 @@ pub fn grow_older(other:usize) {
 fn main() {
     self += 1;
     //~^ ERROR: unresolved name `self`
-    //~| HELP: Module
+    //~| HELP: module `self`
     // it's a bug if this suggests a missing `self` as we're not in a method
 }
diff --git a/src/test/compile-fail/issue-33876.rs b/src/test/compile-fail/issue-33876.rs
new file mode 100644 (file)
index 0000000..d958907
--- /dev/null
@@ -0,0 +1,26 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(reflect_marker)]
+
+use std::marker::Reflect;
+use std::any::Any;
+
+struct Foo;
+
+trait Bar {}
+
+impl Bar for Foo {}
+
+fn main() {
+    let any: &Any = &Bar; //~ ERROR E0425
+                          //~| HELP trait `Bar`
+    if any.is::<u32>() { println!("u32"); }
+}
index e0ea1ed74340783001fd75971980b68550f2e7bd..6b22d434804ffecafc2b56e71c8140e1ae6e5428 100644 (file)
@@ -14,7 +14,7 @@
 
 struct Bar<'x, 'y, 'z> { bar: &'y i32, baz: i32, marker: PhantomData<(&'x(),&'y(),&'z())> }
 fn bar1<'a>(x: &Bar) -> (&'a i32, &'a i32, &'a i32) {
-    //~^ HELP: consider using an explicit lifetime parameter as shown: fn bar1<'a>(x: &'a Bar) -> (&'a i32, &'a i32, &'a i32)
+    //~^ HELP consider using an explicit lifetime parameter as shown: fn bar1<'b, 'c, 'a>(x: &'a Bar<'b, 'a, 'c>) -> (&'a i32, &'a i32, &'a i32)
     (x.bar, &x.baz, &x.baz)
     //~^ ERROR E0312
     //~| ERROR cannot infer
index 73d89beb2202f84369e048588a346945b6cd2179..e34a3c4569d0a4c451d053ff13837bd0e9c7abad 100644 (file)
@@ -49,7 +49,7 @@ struct Baz<'x> {
 
 impl<'a> Baz<'a> {
     fn baz2<'b>(&self, x: &isize) -> (&'b isize, &'b isize) {
-        //~^ HELP consider using an explicit lifetime parameter as shown: fn baz2<'b>(&self, x: &'b isize) -> (&'a isize, &'a isize)
+        //~^ HELP consider using an explicit lifetime parameter as shown: fn baz2<'b>(&self, x: &'a isize) -> (&'a isize, &'a isize)
         (self.bar, x) //~ ERROR E0312
         //~^ ERROR E0312
     }
index 58c0791b84ec5b25c05e30dc063c1a6a444e9b4c..7107211fc914b47479b2569515fee614191cca08 100644 (file)
@@ -9,13 +9,12 @@
 // except according to those terms.
 
 #![allow(dead_code)]
-#![feature(recover)]
 
-use std::panic::RecoverSafe;
+use std::panic::UnwindSafe;
 use std::rc::Rc;
 use std::cell::RefCell;
 
-fn assert<T: RecoverSafe + ?Sized>() {}
+fn assert<T: UnwindSafe + ?Sized>() {}
 
 fn main() {
     assert::<Rc<RefCell<i32>>>();
index 481ffb802812a02f30718384c32fa9b4f6690d57..76c34e4dc0b448613ddaef1d2041ad8bc15f18dd 100644 (file)
@@ -9,13 +9,12 @@
 // except according to those terms.
 
 #![allow(dead_code)]
-#![feature(recover)]
 
-use std::panic::RecoverSafe;
+use std::panic::UnwindSafe;
 use std::sync::Arc;
 use std::cell::RefCell;
 
-fn assert<T: RecoverSafe + ?Sized>() {}
+fn assert<T: UnwindSafe + ?Sized>() {}
 
 fn main() {
     assert::<Arc<RefCell<i32>>>();
index 47302d3af78b2909d8159a4d3fb5dab267267e23..177a43e2a7f71650a74955496b37f75cfc1b508f 100644 (file)
@@ -9,12 +9,11 @@
 // except according to those terms.
 
 #![allow(dead_code)]
-#![feature(recover)]
 
-use std::panic::RecoverSafe;
+use std::panic::UnwindSafe;
 use std::cell::RefCell;
 
-fn assert<T: RecoverSafe + ?Sized>() {}
+fn assert<T: UnwindSafe + ?Sized>() {}
 
 fn main() {
     assert::<&RefCell<i32>>();
index 0301c8dd935c7fe71876b226c9d18041028c5959..627a0fe78cf06ae9bb8cb6c56fdbd637e4eb27e6 100644 (file)
@@ -9,12 +9,11 @@
 // except according to those terms.
 
 #![allow(dead_code)]
-#![feature(recover)]
 
-use std::panic::RecoverSafe;
+use std::panic::UnwindSafe;
 use std::cell::UnsafeCell;
 
-fn assert<T: RecoverSafe + ?Sized>() {}
+fn assert<T: UnwindSafe + ?Sized>() {}
 
 fn main() {
     assert::<*const UnsafeCell<i32>>(); //~ ERROR E0277
index fe13b0a75c9eb18dd7f7de6ad99b7e42c74e7543..f03e1d545a8083786f324118f27ebc25eb9f9a94 100644 (file)
@@ -9,12 +9,11 @@
 // except according to those terms.
 
 #![allow(dead_code)]
-#![feature(recover)]
 
-use std::panic::RecoverSafe;
+use std::panic::UnwindSafe;
 use std::cell::RefCell;
 
-fn assert<T: RecoverSafe + ?Sized>() {}
+fn assert<T: UnwindSafe + ?Sized>() {}
 
 fn main() {
     assert::<*mut RefCell<i32>>();
index 924044647d84a08473c547d8da2366d2bb8b34b6..b70ec59420db1f7cf0347ccd993194285521b1e7 100644 (file)
@@ -19,7 +19,7 @@
 fn make_object1<A:SomeTrait>(v: A) -> Box<SomeTrait+'static> {
     box v as Box<SomeTrait+'static>
         //~^ ERROR the parameter type `A` may not live long enough
-        //~^^ ERROR the parameter type `A` may not live long enough
+        //~| ERROR the parameter type `A` may not live long enough
 }
 
 fn make_object2<'a,A:SomeTrait+'a>(v: A) -> Box<SomeTrait+'a> {
@@ -29,7 +29,7 @@ fn make_object2<'a,A:SomeTrait+'a>(v: A) -> Box<SomeTrait+'a> {
 fn make_object3<'a,'b,A:SomeTrait+'a>(v: A) -> Box<SomeTrait+'b> {
     box v as Box<SomeTrait+'b>
         //~^ ERROR the parameter type `A` may not live long enough
-        //~^^ ERROR the parameter type `A` may not live long enough
+        //~| ERROR the parameter type `A` may not live long enough
 }
 
 fn main() { }
index f6a0c86de6626ec01f16d39d37ad13d375acdb07..eaf9a750570dbdb8966c100fe112068a9f02827c 100644 (file)
@@ -28,11 +28,7 @@ fn get(&self) -> &'a isize {
 impl<'a> Box<'a> {
     fn or<'b,G:GetRef<'b>>(&self, g2: G) -> &'a isize {
         g2.get()
-        //~^ ERROR mismatched types
-        //~| expected type `&'a isize`
-        //~| found type `&'b isize`
-        //~| lifetime mismatch
-
+        //~^ ERROR E0312
     }
 }
 
index 1fc3b4b3c6a620279653f39d68b0143be1961817..90a3395004776a2310e0164af183973d322ab323 100644 (file)
@@ -27,7 +27,7 @@ fn get(&self) -> &'a T {
 
 fn get<'a,'b,G:GetRef<'a, isize>>(g1: G, b: &'b isize) -> &'b isize {
     g1.get()
-    //~^ ERROR mismatched types
+    //~^ ERROR E0312
 }
 
 fn main() {
index ced0afcebd97908554e2aecb1210e59c31cdcfd5..87b5efbfadd865c5e1afa30690f1106f65b460b0 100644 (file)
@@ -10,7 +10,7 @@
 
 
 struct Invariant<'a> {
-    f: Box<for<'b> FnOnce() -> &'b mut &'a isize + 'static>,
+    f: Box<FnOnce() -> *mut &'a isize + 'static>,
 }
 
 fn to_same_lifetime<'r>(b_isize: Invariant<'r>) {
index 412c90fd214c17ae5c3a585e0cd3d22fc032d4d9..4a816ea75727669eef145b565c648e82b4a8d9b7 100644 (file)
@@ -26,51 +26,51 @@ pub fn g() -> i32 { 4 }
 fn h1() -> i32 {
     a.I
         //~^ ERROR E0425
-        //~| HELP To reference an item from the `a` module, use `a::I`
+        //~| HELP to reference an item from the `a` module, use `a::I`
 }
 
 fn h2() -> i32 {
     a.g()
         //~^ ERROR E0425
-        //~| HELP To call a function from the `a` module, use `a::g(..)`
+        //~| HELP to call a function from the `a` module, use `a::g(..)`
 }
 
 fn h3() -> i32 {
     a.b.J
         //~^ ERROR E0425
-        //~| HELP To reference an item from the `a` module, use `a::b`
+        //~| HELP to reference an item from the `a` module, use `a::b`
 }
 
 fn h4() -> i32 {
     a::b.J
         //~^ ERROR E0425
-        //~| HELP To reference an item from the `a::b` module, use `a::b::J`
+        //~| HELP to reference an item from the `a::b` module, use `a::b::J`
 }
 
 fn h5() {
     a.b.f();
         //~^ ERROR E0425
-        //~| HELP To reference an item from the `a` module, use `a::b`
+        //~| HELP to reference an item from the `a` module, use `a::b`
     let v = Vec::new();
     v.push(a::b);
         //~^ ERROR E0425
-        //~| HELP Module `a::b` cannot be used as an expression
+        //~| HELP module `a::b` cannot be used as an expression
 }
 
 fn h6() -> i32 {
     a::b.f()
         //~^ ERROR E0425
-        //~| HELP To call a function from the `a::b` module, use `a::b::f(..)`
+        //~| HELP to call a function from the `a::b` module, use `a::b::f(..)`
 }
 
 fn h7() {
     a::b
         //~^ ERROR E0425
-        //~| HELP Module `a::b` cannot be used as an expression
+        //~| HELP module `a::b` cannot be used as an expression
 }
 
 fn h8() -> i32 {
     a::b()
         //~^ ERROR E0425
-        //~| HELP Module `a::b` cannot be used as an expression
+        //~| HELP module `a::b` cannot be used as an expression
 }
diff --git a/src/test/incremental/struct_add_field.rs b/src/test/incremental/struct_add_field.rs
new file mode 100644 (file)
index 0000000..cc8ef8a
--- /dev/null
@@ -0,0 +1,48 @@
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Test incremental compilation tracking where we change field names
+// in between revisions (hashing should be stable).
+
+// revisions:rpass1 rpass2
+
+#![feature(rustc_attrs)]
+
+pub struct X {
+    pub x: u32,
+
+    #[cfg(rpass2)]
+    pub x2: u32,
+}
+
+pub struct EmbedX {
+    x: X
+}
+
+pub struct Y {
+    pub y: char
+}
+
+#[rustc_dirty(label="TypeckItemBody", cfg="rpass2")]
+pub fn use_X(x: X) -> u32 {
+    x.x as u32
+}
+
+#[rustc_dirty(label="TypeckItemBody", cfg="rpass2")]
+pub fn use_EmbedX(embed: EmbedX) -> u32 {
+    embed.x.x as u32
+}
+
+#[rustc_clean(label="TypeckItemBody", cfg="rpass2")]
+pub fn use_Y() {
+    let x: Y = Y { y: 'c' };
+}
+
+pub fn main() { }
diff --git a/src/test/incremental/struct_change_field_name.rs b/src/test/incremental/struct_change_field_name.rs
new file mode 100644 (file)
index 0000000..fe29ad6
--- /dev/null
@@ -0,0 +1,55 @@
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Test incremental compilation tracking where we change field names
+// in between revisions (hashing should be stable).
+
+// revisions:rpass1 cfail2
+
+#![feature(rustc_attrs)]
+
+#[cfg(rpass1)]
+pub struct X {
+    pub x: u32
+}
+
+#[cfg(cfail2)]
+pub struct X {
+    pub y: u32
+}
+
+pub struct EmbedX {
+    x: X
+}
+
+pub struct Y {
+    pub y: char
+}
+
+#[rustc_dirty(label="TypeckItemBody", cfg="cfail2")]
+pub fn use_X() -> u32 {
+    let x: X = X { x: 22 };
+    //[cfail2]~^ ERROR structure `X` has no field named `x`
+    x.x as u32
+    //[cfail2]~^ ERROR attempted access of field `x`
+}
+
+#[rustc_dirty(label="TypeckItemBody", cfg="cfail2")]
+pub fn use_EmbedX(embed: EmbedX) -> u32 {
+    embed.x.x as u32
+    //[cfail2]~^ ERROR attempted access of field `x`
+}
+
+#[rustc_clean(label="TypeckItemBody", cfg="cfail2")]
+pub fn use_Y() {
+    let x: Y = Y { y: 'c' };
+}
+
+pub fn main() { }
diff --git a/src/test/incremental/struct_change_field_type.rs b/src/test/incremental/struct_change_field_type.rs
new file mode 100644 (file)
index 0000000..1a50d51
--- /dev/null
@@ -0,0 +1,53 @@
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Test incremental compilation tracking where we change nothing
+// in between revisions (hashing should be stable).
+
+// revisions:rpass1 rpass2
+
+#![feature(rustc_attrs)]
+
+#[cfg(rpass1)]
+pub struct X {
+    pub x: u32
+}
+
+#[cfg(rpass2)]
+pub struct X {
+    pub x: i32
+}
+
+pub struct EmbedX {
+    x: X
+}
+
+pub struct Y {
+    pub y: char
+}
+
+#[rustc_dirty(label="TypeckItemBody", cfg="rpass2")]
+pub fn use_X() -> u32 {
+    let x: X = X { x: 22 };
+    x.x as u32
+}
+
+#[rustc_dirty(label="TypeckItemBody", cfg="rpass2")]
+pub fn use_EmbedX(x: EmbedX) -> u32 {
+    let x: X = X { x: 22 };
+    x.x as u32
+}
+
+#[rustc_clean(label="TypeckItemBody", cfg="rpass2")]
+pub fn use_Y() {
+    let x: Y = Y { y: 'c' };
+}
+
+pub fn main() { }
diff --git a/src/test/incremental/struct_change_field_type_cross_crate/auxiliary/a.rs b/src/test/incremental/struct_change_field_type_cross_crate/auxiliary/a.rs
new file mode 100644 (file)
index 0000000..2ddcaf1
--- /dev/null
@@ -0,0 +1,29 @@
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![crate_type="rlib"]
+
+ #[cfg(rpass1)]
+pub struct X {
+    pub x: u32
+}
+
+#[cfg(rpass2)]
+pub struct X {
+    pub x: i32
+}
+
+pub struct EmbedX {
+    pub x: X
+}
+
+pub struct Y {
+    pub y: char
+}
diff --git a/src/test/incremental/struct_change_field_type_cross_crate/b.rs b/src/test/incremental/struct_change_field_type_cross_crate/b.rs
new file mode 100644 (file)
index 0000000..7a4900d
--- /dev/null
@@ -0,0 +1,36 @@
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// aux-build:a.rs
+// revisions:rpass1 rpass2
+
+#![feature(rustc_attrs)]
+
+extern crate a;
+
+use a::*;
+
+#[rustc_dirty(label="TypeckItemBody", cfg="rpass2")]
+pub fn use_X() -> u32 {
+    let x: X = X { x: 22 };
+    x.x as u32
+}
+
+#[rustc_dirty(label="TypeckItemBody", cfg="rpass2")]
+pub fn use_EmbedX(embed: EmbedX) -> u32 {
+    embed.x.x as u32
+}
+
+#[rustc_clean(label="TypeckItemBody", cfg="rpass2")]
+pub fn use_Y() {
+    let x: Y = Y { y: 'c' };
+}
+
+pub fn main() { }
diff --git a/src/test/incremental/struct_change_nothing.rs b/src/test/incremental/struct_change_nothing.rs
new file mode 100644 (file)
index 0000000..8095e1e
--- /dev/null
@@ -0,0 +1,53 @@
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Test incremental compilation tracking where we change nothing
+// in between revisions (hashing should be stable).
+
+// revisions:rpass1 rpass2
+
+#![feature(rustc_attrs)]
+
+#[cfg(rpass1)]
+pub struct X {
+    pub x: u32
+}
+
+#[cfg(rpass2)]
+pub struct X {
+    pub x: u32
+}
+
+pub struct EmbedX {
+    x: X
+}
+
+pub struct Y {
+    pub y: char
+}
+
+#[rustc_clean(label="TypeckItemBody", cfg="rpass2")]
+pub fn use_X() -> u32 {
+    let x: X = X { x: 22 };
+    x.x as u32
+}
+
+#[rustc_clean(label="TypeckItemBody", cfg="rpass2")]
+pub fn use_EmbedX(x: EmbedX) -> u32 {
+    let x: X = X { x: 22 };
+    x.x as u32
+}
+
+#[rustc_clean(label="TypeckItemBody", cfg="rpass2")]
+pub fn use_Y() {
+    let x: Y = Y { y: 'c' };
+}
+
+pub fn main() { }
diff --git a/src/test/incremental/struct_remove_field.rs b/src/test/incremental/struct_remove_field.rs
new file mode 100644 (file)
index 0000000..ae63994
--- /dev/null
@@ -0,0 +1,52 @@
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Test incremental compilation tracking where we change field names
+// in between revisions (hashing should be stable).
+
+// revisions:rpass1 rpass2
+
+#![feature(rustc_attrs)]
+
+#[cfg(rpass1)]
+pub struct X {
+    pub x: u32,
+    pub x2: u32,
+}
+
+#[cfg(rpass2)]
+pub struct X {
+    pub x: u32,
+}
+
+pub struct EmbedX {
+    x: X
+}
+
+pub struct Y {
+    pub y: char
+}
+
+#[rustc_dirty(label="TypeckItemBody", cfg="rpass2")]
+pub fn use_X(x: X) -> u32 {
+    x.x as u32
+}
+
+#[rustc_dirty(label="TypeckItemBody", cfg="rpass2")]
+pub fn use_EmbedX(embed: EmbedX) -> u32 {
+    embed.x.x as u32
+}
+
+#[rustc_clean(label="TypeckItemBody", cfg="rpass2")]
+pub fn use_Y() {
+    let x: Y = Y { y: 'c' };
+}
+
+pub fn main() { }
index 446d97e5bc0548bb995d3153842148bdf54c27dd..e1dba1317703d6c448b969d1327a3884b8cc113c 100644 (file)
@@ -16,4 +16,8 @@
 #[cfg(rpass2)]
 pub type X = i32;
 
+// this version doesn't actually change anything:
+#[cfg(rpass3)]
+pub type X = i32;
+
 pub type Y = char;
index b4e9b7601010a9e69c5b0f1d5ff0b41f278f768c..c5421fcbf5cb2310362327ad5ffe0f9e1e8b470e 100644 (file)
@@ -9,19 +9,21 @@
 // except according to those terms.
 
 // aux-build:a.rs
-// revisions:rpass1 rpass2
+// revisions:rpass1 rpass2 rpass3
 
 #![feature(rustc_attrs)]
 
 extern crate a;
 
 #[rustc_dirty(label="TypeckItemBody", cfg="rpass2")]
+#[rustc_clean(label="TypeckItemBody", cfg="rpass3")]
 pub fn use_X() -> u32 {
     let x: a::X = 22;
     x as u32
 }
 
 #[rustc_clean(label="TypeckItemBody", cfg="rpass2")]
+#[rustc_clean(label="TypeckItemBody", cfg="rpass3")]
 pub fn use_Y() {
     let x: a::Y = 'c';
 }
index b8ff1be71bf2b0e231161fd90680656c0f412d9c..87f77681fa02dd3b2a36b3191aab3a76b8cdee0f 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// error-pattern:thread '<main>' panicked at
+// error-pattern:thread 'main' panicked at
 
 fn main() {
     panic!()
index c989cc594536ba11c02a1a65a218eef1dc01929c..26cc9eda04634d2ec8d49ae73048f78c82a5038b 100644 (file)
@@ -10,7 +10,7 @@
 
 // ignore-pretty : (#23623) problems when  ending with // comments
 
-// error-pattern:thread '<main>' panicked at 'arithmetic operation overflowed'
+// error-pattern:thread 'main' panicked at 'arithmetic operation overflowed'
 // compile-flags: -C debug-assertions
 
 
index a27210112982a6a76f932f21264f2d757ce8efe8..4648f5c9c79d502db564f1318935a8b133703000 100644 (file)
@@ -10,7 +10,7 @@
 
 // ignore-pretty : (#23623) problems when  ending with // comments
 
-// error-pattern:thread '<main>' panicked at 'shift operation overflowed'
+// error-pattern:thread 'main' panicked at 'shift operation overflowed'
 // compile-flags: -C debug-assertions
 
 #![warn(exceeding_bitshifts)]
index fe0bcc5b98545f62ec02fae195cd229d648a4437..12741864eda9e80bed623d3e4702001e304cf9f3 100644 (file)
@@ -10,7 +10,7 @@
 
 // ignore-pretty : (#23623) problems when  ending with // comments
 
-// error-pattern:thread '<main>' panicked at 'shift operation overflowed'
+// error-pattern:thread 'main' panicked at 'shift operation overflowed'
 // compile-flags: -C debug-assertions
 
 #![warn(exceeding_bitshifts)]
index aac220d32d9ce4dfa51e12670fb866914d31d5b7..76e029bab5219c89f7593f148dc80e11b7f0915c 100644 (file)
@@ -10,7 +10,7 @@
 
 // ignore-pretty : (#23623) problems when  ending with // comments
 
-// error-pattern:thread '<main>' panicked at 'shift operation overflowed'
+// error-pattern:thread 'main' panicked at 'shift operation overflowed'
 // compile-flags: -C debug-assertions
 
 #![warn(exceeding_bitshifts)]
index 7e8b266da49bee83dcbd81e26ba980107861a028..a9ee4b882532b1326bfae28ae1d40ab9c686b837 100644 (file)
@@ -10,7 +10,7 @@
 
 // ignore-pretty : (#23623) problems when  ending with // comments
 
-// error-pattern:thread '<main>' panicked at 'shift operation overflowed'
+// error-pattern:thread 'main' panicked at 'shift operation overflowed'
 // compile-flags: -C debug-assertions
 
 // This function is checking that our automatic truncation does not
index 8cba700bbf9a3e0b59faa56765dadaf25407a4b4..179622e49a6287ffa45bd31d53b5c802b328484b 100644 (file)
@@ -10,7 +10,7 @@
 
 // ignore-pretty : (#23623) problems when  ending with // comments
 
-// error-pattern:thread '<main>' panicked at 'arithmetic operation overflowed'
+// error-pattern:thread 'main' panicked at 'arithmetic operation overflowed'
 // compile-flags: -C debug-assertions
 
 #![feature(rustc_attrs)]
index 2d9d746bef324517277816ca0a88064601a8ea48..2bc625f692e53449e1e18ee6a891a097e2d83467 100644 (file)
@@ -10,7 +10,7 @@
 
 // ignore-pretty : (#23623) problems when  ending with // comments
 
-// error-pattern:thread '<main>' panicked at 'attempted to negate with overflow'
+// error-pattern:thread 'main' panicked at 'attempted to negate with overflow'
 // compile-flags: -C debug-assertions
 
 #![feature(rustc_attrs)]
index 15335b8dfb12e4d6a9912e8db3fe297cc1af1b75..e9fea9e1141edb801b59078aabca03202c69439e 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// error-pattern:thread '<main>' panicked at 'arithmetic operation overflowed'
+// error-pattern:thread 'main' panicked at 'arithmetic operation overflowed'
 // compile-flags: -C debug-assertions
 
 fn main() {
index 63c808dc80a4ecebe1b0caa986d2f519d70d0b57..d37ea693a9fcf382627ea7a44be462c17737bc08 100644 (file)
@@ -10,7 +10,7 @@
 
 // ignore-pretty : (#23623) problems when  ending with // comments
 
-// error-pattern:thread '<main>' panicked at 'shift operation overflowed'
+// error-pattern:thread 'main' panicked at 'shift operation overflowed'
 // compile-flags: -C debug-assertions
 
 #![warn(exceeding_bitshifts)]
index 8b89e57c85bb5e33e254d83c4ae244e271dc4347..a4b7028a474dc97f4406140ddf9194a4674cf1dd 100644 (file)
@@ -10,7 +10,7 @@
 
 // ignore-pretty : (#23623) problems when  ending with // comments
 
-// error-pattern:thread '<main>' panicked at 'shift operation overflowed'
+// error-pattern:thread 'main' panicked at 'shift operation overflowed'
 // compile-flags: -C debug-assertions
 
 #![warn(exceeding_bitshifts)]
index 8874587064c35ec26f45bf7cc1e4ebdc09f565ba..199da59eb53fd2336745e1d37e1858a022b57221 100644 (file)
@@ -10,7 +10,7 @@
 
 // ignore-pretty : (#23623) problems when  ending with // comments
 
-// error-pattern:thread '<main>' panicked at 'shift operation overflowed'
+// error-pattern:thread 'main' panicked at 'shift operation overflowed'
 // compile-flags: -C debug-assertions
 
 #![warn(exceeding_bitshifts)]
index d74fd8a6b8e417abcd3d87c568668898dbea34c0..d0d89a310e26b7e78db0a68c6459eb522cfaaef0 100644 (file)
@@ -10,7 +10,7 @@
 
 // ignore-pretty : (#23623) problems when  ending with // comments
 
-// error-pattern:thread '<main>' panicked at 'shift operation overflowed'
+// error-pattern:thread 'main' panicked at 'shift operation overflowed'
 // compile-flags: -C debug-assertions
 
 // This function is checking that our (type-based) automatic
index 249b952a5dca2643fec1d751066561c8327a3e64..03588c3576ad3952883991555a431f27542148e0 100644 (file)
@@ -10,7 +10,7 @@
 
 // ignore-pretty : (#23623) problems when  ending with // comments
 
-// error-pattern:thread '<main>' panicked at 'shift operation overflowed'
+// error-pattern:thread 'main' panicked at 'shift operation overflowed'
 // compile-flags: -C debug-assertions
 
 #![warn(exceeding_bitshifts)]
index 1227f35444a60399a1f60125f405c1e3079fbf6c..914f6d2b5c4ce0115ecd29bea95b5e9e31200cc6 100644 (file)
@@ -10,7 +10,7 @@
 
 // ignore-pretty : (#23623) problems when  ending with // comments
 
-// error-pattern:thread '<main>' panicked at 'shift operation overflowed'
+// error-pattern:thread 'main' panicked at 'shift operation overflowed'
 // compile-flags: -C debug-assertions
 
 #![warn(exceeding_bitshifts)]
index ce243a50e0b66f2065b84c038af2279041d09404..7eec7699d99c4d102d976f23398dcd2f3809ef99 100644 (file)
@@ -10,7 +10,7 @@
 
 // ignore-pretty : (#23623) problems when  ending with // comments
 
-// error-pattern:thread '<main>' panicked at 'arithmetic operation overflowed'
+// error-pattern:thread 'main' panicked at 'arithmetic operation overflowed'
 // compile-flags: -C debug-assertions
 
 #![feature(rustc_attrs)]
index bfeb407dd25a439dbfb51497fb1fe2883de0cc4e..b589544ae156359eb62e260d9f232856090ebf89 100644 (file)
 
 // error-pattern:greetings from the panic handler
 
-#![feature(std_panic, panic_handler)]
+#![feature(panic_handler)]
+
 use std::panic;
 use std::io::{self, Write};
 
 fn main() {
-    panic::set_handler(|i| {
+    panic::set_hook(Box::new(|i| {
         write!(io::stderr(), "greetings from the panic handler");
-    });
+    }));
     panic!("foobar");
 }
index 6999aa715e791b7d2346581afe5cb9c2bab60b15..6741c2d9c2c2028d40bf1c252e6e22f0738e9e15 100644 (file)
@@ -8,16 +8,17 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// error-pattern:thread '<main>' panicked at 'foobar'
+// error-pattern:thread 'main' panicked at 'foobar'
+
+#![feature(panic_handler)]
 
-#![feature(std_panic, panic_handler)]
 use std::panic;
 use std::io::{self, Write};
 
 fn main() {
-    panic::set_handler(|i| {
+    panic::set_hook(Box::new(|i| {
         write!(io::stderr(), "greetings from the panic handler");
-    });
-    panic::take_handler();
+    }));
+    panic::take_hook();
     panic!("foobar");
 }
index fec1db24adf09cc50446bff45b74ef0eb0a2f6b7..0add63c6d64f22db63633884f735b119c47f4676 100644 (file)
@@ -8,12 +8,13 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// error-pattern:thread '<main>' panicked at 'foobar'
+// error-pattern:thread 'main' panicked at 'foobar'
+
+#![feature(panic_handler)]
 
-#![feature(std_panic, panic_handler)]
 use std::panic;
 
 fn main() {
-    panic::take_handler();
+    panic::take_hook();
     panic!("foobar");
 }
diff --git a/src/test/run-make/dep-info-no-analysis/Makefile b/src/test/run-make/dep-info-no-analysis/Makefile
deleted file mode 100644 (file)
index 5d2cfad..0000000
+++ /dev/null
@@ -1,6 +0,0 @@
--include ../tools.mk
-
-all:
-       $(RUSTC) -o $(TMPDIR)/input.dd -Z no-analysis --emit dep-info input.rs
-       sed -i'.bak' 's/^.*input.dd/input.dd/g' $(TMPDIR)/input.dd
-       diff -u $(TMPDIR)/input.dd input.dd
diff --git a/src/test/run-make/dep-info-no-analysis/input.dd b/src/test/run-make/dep-info-no-analysis/input.dd
deleted file mode 100644 (file)
index f2c8676..0000000
+++ /dev/null
@@ -1,3 +0,0 @@
-input.dd: input.rs
-
-input.rs:
diff --git a/src/test/run-make/dep-info-no-analysis/input.rs b/src/test/run-make/dep-info-no-analysis/input.rs
deleted file mode 100644 (file)
index 523b0f0..0000000
+++ /dev/null
@@ -1,14 +0,0 @@
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// Tests that dep info can be emitted without resolving external crates.
-extern crate not_there;
-
-fn main() {}
index 0e84a0f52218c4bb75c9e01cec89c21135dfa2e5..8a7959212f54adb97b4e0b9ebd96fa8ad43da400 100644 (file)
@@ -238,15 +238,9 @@ fn compile_program(input: &str, sysroot: PathBuf)
 
         let krate = panictry!(driver::phase_1_parse_input(&sess, cfg, &input));
 
-        let krate = driver::phase_2_configure_and_expand(&sess, &cstore, krate, &id, None)
-            .expect("phase_2 returned `None`");
-
-        let krate = driver::assign_node_ids(&sess, krate);
-        let mut defs = ast_map::collect_definitions(&krate);
-        read_local_crates(&sess, &cstore, &defs, &krate, &id, &dep_graph);
-        let (analysis, resolutions, mut hir_forest) = {
-            driver::lower_and_resolve(&sess, &id, &mut defs, &krate,
-                                      &sess.dep_graph, MakeGlobMap::No)
+        let driver::ExpansionResult { defs, analysis, resolutions, mut hir_forest, .. } = {
+            driver::phase_2_configure_and_expand(&sess, &cstore, krate, &id, None, MakeGlobMap::No)
+                .expect("phase_2 returned `None`")
         };
 
         let arenas = ty::CtxtArenas::new();
index 7fbd8dc4786baf1de47c2d7735e6700c8fe11b18..93d3345a8099121041059f62c9fbdf535ece6218 100644 (file)
@@ -8,10 +8,10 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-#![feature(recover, rand, std_panic)]
+#![feature(rand, std_panic)]
 
 use std::__rand::{thread_rng, Rng};
-use std::panic::{self, AssertRecoverSafe};
+use std::panic::{self, AssertUnwindSafe};
 
 use std::collections::BinaryHeap;
 use std::cmp;
@@ -70,8 +70,8 @@ fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> {
             {
                 // push the panicking item to the heap and catch the panic
                 let thread_result = {
-                    let mut heap_ref = AssertRecoverSafe(&mut heap);
-                    panic::recover(move || {
+                    let mut heap_ref = AssertUnwindSafe(&mut heap);
+                    panic::catch_unwind(move || {
                         heap_ref.push(panic_item);
                     })
                 };
index 082a39f56312ec5a56b5607a2509941289e95d56..eb04514271c7532891401dcbaf8e7e82f9307aa3 100644 (file)
@@ -15,10 +15,10 @@ trait Contravariant {
     fn foo(&self) { }
 }
 
-impl Contravariant for for<'a,'b> fn(&'a u8, &'b u8) {
+impl Contravariant for for<'a,'b> fn(&'a u8, &'b u8) -> &'a u8 {
 }
 
-impl Contravariant for for<'a> fn(&'a u8, &'a u8) {
+impl Contravariant for for<'a> fn(&'a u8, &'a u8) -> &'a u8 {
 }
 
 ///////////////////////////////////////////////////////////////////////////
@@ -27,10 +27,10 @@ trait Covariant {
     fn foo(&self) { }
 }
 
-impl Covariant for for<'a,'b> fn(&'a u8, &'b u8) {
+impl Covariant for for<'a,'b> fn(&'a u8, &'b u8) -> &'a u8 {
 }
 
-impl Covariant for for<'a> fn(&'a u8, &'a u8) {
+impl Covariant for for<'a> fn(&'a u8, &'a u8) -> &'a u8 {
 }
 
 ///////////////////////////////////////////////////////////////////////////
@@ -39,10 +39,10 @@ trait Invariant {
     fn foo(&self) { }
 }
 
-impl Invariant for for<'a,'b> fn(&'a u8, &'b u8) {
+impl Invariant for for<'a,'b> fn(&'a u8, &'b u8) -> &'a u8 {
 }
 
-impl Invariant for for<'a> fn(&'a u8, &'a u8) {
+impl Invariant for for<'a> fn(&'a u8, &'a u8) -> &'a u8 {
 }
 
 fn main() { }
index a31df0fd93e16a1aaca8508d97feec1bf52ae3bb..2f8ecbe693f0752263d959df10d476b3db94554b 100644 (file)
@@ -458,7 +458,7 @@ struct S<'a> {
 }
 
 impl<'a> Named for S<'a> {
-    fn new<'b>(name: &'static str) -> S<'b> {
+    fn new(name: &'static str) -> S<'a> {
         S { name: name, mark: Cell::new(0), next: Cell::new(None) }
     }
     fn name(&self) -> &str { self.name }
@@ -476,7 +476,7 @@ struct S2<'a> {
 }
 
 impl<'a> Named for S2<'a> {
-    fn new<'b>(name: &'static str) -> S2<'b> {
+    fn new(name: &'static str) -> S2<'a> {
         S2 { name: name, mark: Cell::new(0), next: Cell::new((None, None)) }
     }
     fn name(&self) -> &str { self.name }
@@ -496,7 +496,7 @@ struct V<'a> {
 }
 
 impl<'a> Named for V<'a> {
-    fn new<'b>(name: &'static str) -> V<'b> {
+    fn new(name: &'static str) -> V<'a> {
         V { name: name,
             mark: Cell::new(0),
             contents: vec![Cell::new(None), Cell::new(None)]
@@ -518,7 +518,7 @@ struct H<'a> {
 }
 
 impl<'a> Named for H<'a> {
-    fn new<'b>(name: &'static str) -> H<'b> {
+    fn new(name: &'static str) -> H<'a> {
         H { name: name, mark: Cell::new(0), next: Cell::new(None) }
     }
     fn name(&self) -> &str { self.name }
@@ -549,7 +549,7 @@ struct HM<'a> {
 }
 
 impl<'a> Named for HM<'a> {
-    fn new<'b>(name: &'static str) -> HM<'b> {
+    fn new(name: &'static str) -> HM<'a> {
         HM { name: name,
              mark: Cell::new(0),
              contents: Cell::new(None)
@@ -583,7 +583,7 @@ struct VD<'a> {
 }
 
 impl<'a> Named for VD<'a> {
-    fn new<'b>(name: &'static str) -> VD<'b> {
+    fn new(name: &'static str) -> VD<'a> {
         VD { name: name,
              mark: Cell::new(0),
              contents: Cell::new(None)
@@ -604,7 +604,7 @@ struct VM<'a> {
 }
 
 impl<'a> Named for VM<'a> {
-    fn new<'b>(name: &'static str) -> VM<'b> {
+    fn new(name: &'static str) -> VM<'a> {
         VM { name: name,
              mark: Cell::new(0),
              contents: Cell::new(None)
@@ -625,7 +625,7 @@ struct LL<'a> {
 }
 
 impl<'a> Named for LL<'a> {
-    fn new<'b>(name: &'static str) -> LL<'b> {
+    fn new(name: &'static str) -> LL<'a> {
         LL { name: name,
              mark: Cell::new(0),
              contents: Cell::new(None)
@@ -646,7 +646,7 @@ struct BH<'a> {
 }
 
 impl<'a> Named for BH<'a> {
-    fn new<'b>(name: &'static str) -> BH<'b> {
+    fn new(name: &'static str) -> BH<'a> {
         BH { name: name,
              mark: Cell::new(0),
              contents: Cell::new(None)
@@ -687,7 +687,7 @@ struct BTM<'a> {
 }
 
 impl<'a> Named for BTM<'a> {
-    fn new<'b>(name: &'static str) -> BTM<'b> {
+    fn new(name: &'static str) -> BTM<'a> {
         BTM { name: name,
              mark: Cell::new(0),
              contents: Cell::new(None)
@@ -728,7 +728,7 @@ struct BTS<'a> {
 }
 
 impl<'a> Named for BTS<'a> {
-    fn new<'b>(name: &'static str) -> BTS<'b> {
+    fn new(name: &'static str) -> BTS<'a> {
         BTS { name: name,
              mark: Cell::new(0),
              contents: Cell::new(None)
diff --git a/src/test/run-pass/exhaustive-bool-match-sanity.rs b/src/test/run-pass/exhaustive-bool-match-sanity.rs
new file mode 100644 (file)
index 0000000..d88a5f1
--- /dev/null
@@ -0,0 +1,34 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Issue #33540
+// We previously used to generate a 3-armed boolean `SwitchInt` in the
+// MIR of the function `foo` below. #33583 changed rustc to
+// generate an `If` terminator instead. This test is to just ensure
+// sanity in that we generate an if-else chain giving the correct
+// results.
+
+#![feature(rustc_attrs)]
+
+#[rustc_mir]
+fn foo(x: bool, y: bool) -> u32 {
+    match (x, y) {
+        (false, _) => 0,
+        (_, false) => 1,
+        (true, true) => 2
+    }
+}
+
+fn main() {
+    assert_eq!(foo(false, true), 0);
+    assert_eq!(foo(false, false), 0);
+    assert_eq!(foo(true, false), 1);
+    assert_eq!(foo(true, true), 2);
+}
diff --git a/src/test/run-pass/issue-33770.rs b/src/test/run-pass/issue-33770.rs
new file mode 100644 (file)
index 0000000..f5635fd
--- /dev/null
@@ -0,0 +1,100 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use std::process::{Command, Stdio};
+use std::env;
+use std::sync::{Mutex, RwLock};
+use std::time::Duration;
+use std::thread;
+
+fn test_mutex() {
+    let m = Mutex::new(0);
+    let _g = m.lock().unwrap();
+    let _g2 = m.lock().unwrap();
+}
+
+fn test_try_mutex() {
+    let m = Mutex::new(0);
+    let _g = m.lock().unwrap();
+    let _g2 = m.try_lock().unwrap();
+}
+
+fn test_rwlock_ww() {
+    let m = RwLock::new(0);
+    let _g = m.write().unwrap();
+    let _g2 = m.write().unwrap();
+}
+
+fn test_try_rwlock_ww() {
+    let m = RwLock::new(0);
+    let _g = m.write().unwrap();
+    let _g2 = m.try_write().unwrap();
+}
+
+fn test_rwlock_rw() {
+    let m = RwLock::new(0);
+    let _g = m.read().unwrap();
+    let _g2 = m.write().unwrap();
+}
+
+fn test_try_rwlock_rw() {
+    let m = RwLock::new(0);
+    let _g = m.read().unwrap();
+    let _g2 = m.try_write().unwrap();
+}
+
+fn test_rwlock_wr() {
+    let m = RwLock::new(0);
+    let _g = m.write().unwrap();
+    let _g2 = m.read().unwrap();
+}
+
+fn test_try_rwlock_wr() {
+    let m = RwLock::new(0);
+    let _g = m.write().unwrap();
+    let _g2 = m.try_read().unwrap();
+}
+
+fn main() {
+    let args: Vec<String> = env::args().collect();
+    if args.len() > 1 {
+        match &*args[1] {
+            "mutex" => test_mutex(),
+            "try_mutex" => test_try_mutex(),
+            "rwlock_ww" => test_rwlock_ww(),
+            "try_rwlock_ww" => test_try_rwlock_ww(),
+            "rwlock_rw" => test_rwlock_rw(),
+            "try_rwlock_rw" => test_try_rwlock_rw(),
+            "rwlock_wr" => test_rwlock_wr(),
+            "try_rwlock_wr" => test_try_rwlock_wr(),
+            _ => unreachable!(),
+        }
+        // If we reach this point then the test failed
+        println!("TEST FAILED: {}", args[1]);
+    } else {
+        let mut v = vec![];
+        v.push(Command::new(&args[0]).arg("mutex").stderr(Stdio::null()).spawn().unwrap());
+        v.push(Command::new(&args[0]).arg("try_mutex").stderr(Stdio::null()).spawn().unwrap());
+        v.push(Command::new(&args[0]).arg("rwlock_ww").stderr(Stdio::null()).spawn().unwrap());
+        v.push(Command::new(&args[0]).arg("try_rwlock_ww").stderr(Stdio::null()).spawn().unwrap());
+        v.push(Command::new(&args[0]).arg("rwlock_rw").stderr(Stdio::null()).spawn().unwrap());
+        v.push(Command::new(&args[0]).arg("try_rwlock_rw").stderr(Stdio::null()).spawn().unwrap());
+        v.push(Command::new(&args[0]).arg("rwlock_wr").stderr(Stdio::null()).spawn().unwrap());
+        v.push(Command::new(&args[0]).arg("try_rwlock_wr").stderr(Stdio::null()).spawn().unwrap());
+
+        thread::sleep(Duration::new(1, 0));
+
+        // Make sure all subprocesses either panicked or were killed because they deadlocked
+        for mut c in v {
+            c.kill().ok();
+            assert!(!c.wait().unwrap().success());
+        }
+    }
+}
index 8e0b14128c83b0896cb93b218ad8c5b61829c460..93e2a854ccb2a3efe0bbf5b0e544e8a94f1cd381 100644 (file)
@@ -15,7 +15,7 @@ fn check_for_no_backtrace(test: std::process::Output) {
 
     assert_eq!(it.next().map(|l| l.starts_with("thread '<unnamed>' panicked at")), Some(true));
     assert_eq!(it.next(), Some("note: Run with `RUST_BACKTRACE=1` for a backtrace."));
-    assert_eq!(it.next().map(|l| l.starts_with("thread '<main>' panicked at")), Some(true));
+    assert_eq!(it.next().map(|l| l.starts_with("thread 'main' panicked at")), Some(true));
     assert_eq!(it.next(), None);
 }
 
index 2c87c6b92686ca6beff5ec22a731364b745bb352..0210017b47efb63f20519c12d648a24777f4f353 100644 (file)
 
 // ignore-emscripten no threads support
 
-#![feature(std_panic, recover, panic_propagate, panic_handler, const_fn)]
+#![feature(panic_handler)]
 
-use std::sync::atomic::{AtomicUsize, Ordering};
+use std::sync::atomic::{AtomicUsize, Ordering, ATOMIC_USIZE_INIT};
 use std::panic;
 use std::thread;
 
-static A: AtomicUsize = AtomicUsize::new(0);
+static A: AtomicUsize = ATOMIC_USIZE_INIT;
 
 fn main() {
-    panic::set_handler(|_| {
+    panic::set_hook(Box::new(|_| {
         A.fetch_add(1, Ordering::SeqCst);
-    });
+    }));
 
     let result = thread::spawn(|| {
-        let result = panic::recover(|| {
+        let result = panic::catch_unwind(|| {
             panic!("hi there");
         });
 
-        panic::propagate(result.unwrap_err());
+        panic::resume_unwind(result.unwrap_err());
     }).join();
 
     let msg = *result.unwrap_err().downcast::<&'static str>().unwrap();
diff --git a/src/test/run-pass/project-cache-issue-31849.rs b/src/test/run-pass/project-cache-issue-31849.rs
new file mode 100644 (file)
index 0000000..d03424b
--- /dev/null
@@ -0,0 +1,75 @@
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Regression test for #31849: the problem here was actually a performance
+// cliff, but I'm adding the test for reference.
+
+pub trait Upcast<T> {
+    fn upcast(self) -> T;
+}
+
+impl<S1, S2, T1, T2> Upcast<(T1, T2)> for (S1,S2)
+    where S1: Upcast<T1>,
+          S2: Upcast<T2>,
+{
+    fn upcast(self) -> (T1, T2) { (self.0.upcast(), self.1.upcast()) }
+}
+
+impl Upcast<()> for ()
+{
+    fn upcast(self) -> () { () }
+}
+
+pub trait ToStatic {
+    type Static: 'static;
+    fn to_static(self) -> Self::Static where Self: Sized;
+}
+
+impl<T, U> ToStatic for (T, U)
+    where T: ToStatic,
+          U: ToStatic
+{
+    type Static = (T::Static, U::Static);
+    fn to_static(self) -> Self::Static { (self.0.to_static(), self.1.to_static()) }
+}
+
+impl ToStatic for ()
+{
+    type Static = ();
+    fn to_static(self) -> () { () }
+}
+
+
+trait Factory {
+    type Output;
+    fn build(&self) -> Self::Output;
+}
+
+impl<S,T> Factory for (S, T)
+    where S: Factory,
+          T: Factory,
+          S::Output: ToStatic,
+          <S::Output as ToStatic>::Static: Upcast<S::Output>,
+{
+    type Output = (S::Output, T::Output);
+    fn build(&self) -> Self::Output { (self.0.build().to_static().upcast(), self.1.build()) }
+}
+
+impl Factory for () {
+    type Output = ();
+    fn build(&self) -> Self::Output { () }
+}
+
+fn main() {
+    // More parens, more time.
+    let it = ((((((((((),()),()),()),()),()),()),()),()),());
+    it.build();
+}
+
index 8a23403359f9a148f16ecafd46a0ad802b7e099b..75a2e36ffb7a4a3f1ad0d5497fab556a5a0c4063 100644 (file)
@@ -10,8 +10,6 @@
 
 // aux-build:reachable-unnameable-items.rs
 
-#![feature(recover)]
-
 extern crate reachable_unnameable_items;
 use reachable_unnameable_items::*;
 
@@ -37,5 +35,5 @@ fn main() {
 
     let none = None;
     function_accepting_unnameable_type(none);
-    let _guard = std::panic::recover(|| none.unwrap().method_of_unnameable_type3());
+    let _guard = std::panic::catch_unwind(|| none.unwrap().method_of_unnameable_type3());
 }
index c67bc8c8368e8738191e24ed30e0fac935892095..23d5a08e216443bf5e8a5b74c04fc4e5ab969491 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-#![feature(std_panic, recover, start)]
+#![feature(start)]
 
 use std::ffi::CStr;
 use std::process::{Command, Output};
@@ -22,8 +22,8 @@ fn start(argc: isize, argv: *const *const u8) -> isize {
             match **argv.offset(1) as char {
                 '1' => {}
                 '2' => println!("foo"),
-                '3' => assert!(panic::recover(|| {}).is_ok()),
-                '4' => assert!(panic::recover(|| panic!()).is_err()),
+                '3' => assert!(panic::catch_unwind(|| {}).is_ok()),
+                '4' => assert!(panic::catch_unwind(|| panic!()).is_err()),
                 '5' => assert!(Command::new("test").spawn().is_err()),
                 _ => panic!()
             }
index 4d369ba4d872db10d68159c890bd72c48e6ed335..0f751501293f538cdda52298994a86599dc74363 100644 (file)
@@ -23,8 +23,6 @@ pub fn main() {
     assert_eq!(s.chars().count(), 4);
     assert_eq!(schs.len(), 4);
     assert_eq!(schs.iter().cloned().collect::<String>(), s);
-    assert_eq!(s.char_at(0), 'e');
-    assert_eq!(s.char_at(1), 'é');
 
     assert!((str::from_utf8(s.as_bytes()).is_ok()));
     // invalid prefix
index abd1709825c974b605f03b011c5113f0bb3afc25..0131563d36d35e05838561731a5ca21c156b7fbd 100644 (file)
@@ -8,60 +8,39 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// ignore-msvc -- sprintf isn't a symbol in msvcrt? maybe a #define?
-
-#![feature(libc, std_misc)]
-
-extern crate libc;
-
-use std::ffi::{CStr, CString};
-use libc::{c_char, c_int};
-
-
+#[link(name = "rust_test_helpers")]
 extern {
-    fn sprintf(s: *mut c_char, format: *const c_char, ...) -> c_int;
-}
-
-unsafe fn check<T, F>(expected: &str, f: F) where F: FnOnce(*mut c_char) -> T {
-    let mut x = [0 as c_char; 50];
-    f(&mut x[0] as *mut c_char);
-    assert_eq!(expected.as_bytes(), CStr::from_ptr(x.as_ptr()).to_bytes());
+    fn rust_interesting_average(_: u64, ...) -> f64;
 }
 
 pub fn main() {
-
+    // Call without variadic arguments
     unsafe {
-        // Call with just the named parameter
-        let c = CString::new(&b"Hello World\n"[..]).unwrap();
-        check("Hello World\n", |s| sprintf(s, c.as_ptr()));
-
-        // Call with variable number of arguments
-        let c = CString::new(&b"%d %f %c %s\n"[..]).unwrap();
-        check("42 42.500000 a %d %f %c %s\n\n", |s| {
-            sprintf(s, c.as_ptr(), 42, 42.5f64, 'a' as c_int, c.as_ptr());
-        });
+        assert!(rust_interesting_average(0).is_nan());
+    }
 
-        // Make a function pointer
-        let x: unsafe extern fn(*mut c_char, *const c_char, ...) -> c_int = sprintf;
+    // Call with direct arguments
+    unsafe {
+        assert_eq!(rust_interesting_average(1, 10i64, 10.0f64) as i64, 20);
+    }
 
-        // A function that takes a function pointer
-        unsafe fn call(fp: unsafe extern fn(*mut c_char, *const c_char, ...) -> c_int) {
-            // Call with just the named parameter
-            let c = CString::new(&b"Hello World\n"[..]).unwrap();
-            check("Hello World\n", |s| fp(s, c.as_ptr()));
+    // Call with named arguments, variable number of them
+    let (x1, x2, x3, x4) = (10i64, 10.0f64, 20i64, 20.0f64);
+    unsafe {
+        assert_eq!(rust_interesting_average(2, x1, x2, x3, x4) as i64, 30);
+    }
 
-            // Call with variable number of arguments
-            let c = CString::new(&b"%d %f %c %s\n"[..]).unwrap();
-            check("42 42.500000 a %d %f %c %s\n\n", |s| {
-                fp(s, c.as_ptr(), 42, 42.5f64, 'a' as c_int, c.as_ptr());
-            });
-        }
+    // A function that takes a function pointer
+    unsafe fn call(fp: unsafe extern fn(u64, ...) -> f64) {
+        let (x1, x2, x3, x4) = (10i64, 10.0f64, 20i64, 20.0f64);
+        assert_eq!(fp(2, x1, x2, x3, x4) as i64, 30);
+    }
 
-        // Pass sprintf directly
-        call(sprintf);
+    unsafe {
+        call(rust_interesting_average);
 
-        // Pass sprintf indirectly
+        // Make a function pointer, pass indirectly
+        let x: unsafe extern fn(u64, ...) -> f64 = rust_interesting_average;
         call(x);
     }
-
 }
index 7e688b5ec6b461c62c2c8fa16c53ca524d904876..9332a8e5f6c06eb4e4da7aa5ad2851bd72cd4b8a 100644 (file)
@@ -25,7 +25,7 @@ struct Test {
     Test {
         name: "cargo",
         repo: "https://github.com/rust-lang/cargo",
-        sha: "26288f799427f9cc6e8bdddd782a17a8156ebc64",
+        sha: "7d79da08238e3d47e0bc4406155bdcc45ccb8c82",
         lock: None,
     },
     Test {