]> git.lizzy.rs Git - rust.git/commitdiff
Rollup merge of #51839 - oli-obk:const_shift_overflow, r=nikomatsakis
authorMark Rousskov <mark.simulacrum@gmail.com>
Thu, 28 Jun 2018 22:07:14 +0000 (16:07 -0600)
committerGitHub <noreply@github.com>
Thu, 28 Jun 2018 22:07:14 +0000 (16:07 -0600)
Detect overflows of non u32 shifts

275 files changed:
src/Cargo.lock
src/liballoc/lib.rs
src/liballoc/str.rs
src/libcore/alloc.rs
src/libcore/cell.rs
src/libcore/num/mod.rs
src/libcore/slice/mod.rs
src/libcore/str/mod.rs
src/libcore/tests/num/mod.rs
src/librustc/dep_graph/dep_node.rs
src/librustc/hir/intravisit.rs
src/librustc/hir/lowering.rs
src/librustc/hir/map/blocks.rs
src/librustc/hir/map/collector.rs
src/librustc/hir/map/def_collector.rs
src/librustc/hir/map/mod.rs
src/librustc/hir/mod.rs
src/librustc/hir/pat_util.rs
src/librustc/hir/print.rs
src/librustc/ich/fingerprint.rs
src/librustc/ich/impls_hir.rs
src/librustc/ich/impls_syntax.rs
src/librustc/ich/impls_ty.rs
src/librustc/infer/canonical.rs [deleted file]
src/librustc/infer/canonical/canonicalizer.rs [new file with mode: 0644]
src/librustc/infer/canonical/mod.rs [new file with mode: 0644]
src/librustc/infer/canonical/query_result.rs [new file with mode: 0644]
src/librustc/infer/canonical/substitute.rs [new file with mode: 0644]
src/librustc/infer/error_reporting/mod.rs
src/librustc/infer/error_reporting/need_type_info.rs
src/librustc/infer/error_reporting/nice_region_error/different_lifetimes.rs
src/librustc/infer/error_reporting/nice_region_error/mod.rs
src/librustc/infer/error_reporting/nice_region_error/named_anon_conflict.rs
src/librustc/infer/error_reporting/nice_region_error/static_impl_trait.rs [new file with mode: 0644]
src/librustc/infer/error_reporting/nice_region_error/util.rs
src/librustc/infer/mod.rs
src/librustc/infer/outlives/bounds.rs
src/librustc/infer/outlives/mod.rs
src/librustc/infer/outlives/obligations.rs
src/librustc/lib.rs
src/librustc/macros.rs
src/librustc/middle/const_val.rs [deleted file]
src/librustc/middle/dead.rs
src/librustc/middle/liveness.rs
src/librustc/middle/mem_categorization.rs
src/librustc/middle/resolve_lifetime.rs
src/librustc/middle/stability.rs
src/librustc/middle/weak_lang_items.rs
src/librustc/mir/interpret/error.rs
src/librustc/mir/interpret/mod.rs
src/librustc/mir/interpret/value.rs
src/librustc/mir/mod.rs
src/librustc/traits/engine.rs
src/librustc/traits/error_reporting.rs
src/librustc/traits/fulfill.rs
src/librustc/traits/mod.rs
src/librustc/traits/object_safety.rs
src/librustc/traits/project.rs
src/librustc/traits/query/dropck_outlives.rs
src/librustc/traits/query/evaluate_obligation.rs
src/librustc/traits/query/mod.rs
src/librustc/traits/query/normalize.rs
src/librustc/traits/query/type_op/custom.rs [new file with mode: 0644]
src/librustc/traits/query/type_op/eq.rs [new file with mode: 0644]
src/librustc/traits/query/type_op/mod.rs [new file with mode: 0644]
src/librustc/traits/query/type_op/normalize.rs [new file with mode: 0644]
src/librustc/traits/query/type_op/outlives.rs [new file with mode: 0644]
src/librustc/traits/query/type_op/prove_predicate.rs [new file with mode: 0644]
src/librustc/traits/query/type_op/subtype.rs [new file with mode: 0644]
src/librustc/traits/specialize/mod.rs
src/librustc/traits/specialize/specialization_graph.rs
src/librustc/traits/structural_impls.rs
src/librustc/ty/codec.rs
src/librustc/ty/context.rs
src/librustc/ty/flags.rs
src/librustc/ty/fold.rs
src/librustc/ty/mod.rs
src/librustc/ty/query/config.rs
src/librustc/ty/query/keys.rs
src/librustc/ty/query/mod.rs
src/librustc/ty/query/on_disk_cache.rs
src/librustc/ty/query/plumbing.rs
src/librustc/ty/relate.rs
src/librustc/ty/structural_impls.rs
src/librustc/ty/sty.rs
src/librustc/ty/util.rs
src/librustc/ty/walk.rs
src/librustc/ty/wf.rs
src/librustc/util/ppaux.rs
src/librustc_allocator/expand.rs
src/librustc_borrowck/borrowck/gather_loans/gather_moves.rs
src/librustc_borrowck/borrowck/unused.rs
src/librustc_codegen_llvm/back/wasm.rs
src/librustc_codegen_llvm/base.rs
src/librustc_codegen_llvm/mir/constant.rs
src/librustc_codegen_llvm/mir/operand.rs
src/librustc_driver/lib.rs
src/librustc_incremental/persist/file_format.rs
src/librustc_incremental/persist/save.rs
src/librustc_lint/bad_style.rs
src/librustc_lint/builtin.rs
src/librustc_metadata/decoder.rs
src/librustc_metadata/encoder.rs
src/librustc_metadata/index.rs
src/librustc_metadata/lib.rs
src/librustc_mir/borrow_check/mod.rs
src/librustc_mir/borrow_check/nll/facts.rs
src/librustc_mir/borrow_check/nll/mod.rs
src/librustc_mir/borrow_check/nll/region_infer/mod.rs
src/librustc_mir/borrow_check/nll/type_check/constraint_conversion.rs
src/librustc_mir/borrow_check/nll/type_check/input_output.rs
src/librustc_mir/borrow_check/nll/type_check/liveness.rs
src/librustc_mir/borrow_check/nll/type_check/mod.rs
src/librustc_mir/borrow_check/nll/universal_regions.rs
src/librustc_mir/build/mod.rs
src/librustc_mir/hair/cx/mod.rs
src/librustc_mir/hair/pattern/_match.rs
src/librustc_mir/hair/pattern/check_match.rs
src/librustc_mir/hair/pattern/mod.rs
src/librustc_mir/interpret/const_eval.rs
src/librustc_mir/interpret/eval_context.rs
src/librustc_mir/interpret/memory.rs
src/librustc_mir/monomorphize/collector.rs
src/librustc_mir/monomorphize/item.rs
src/librustc_mir/transform/const_prop.rs
src/librustc_mir/transform/qualify_consts.rs
src/librustc_plugin/registry.rs
src/librustc_privacy/lib.rs
src/librustc_resolve/lib.rs
src/librustc_resolve/macros.rs
src/librustc_save_analysis/dump_visitor.rs
src/librustc_save_analysis/lib.rs
src/librustc_traits/chalk_context.rs
src/librustc_traits/dropck_outlives.rs
src/librustc_traits/evaluate_obligation.rs
src/librustc_traits/lib.rs
src/librustc_traits/lowering.rs
src/librustc_traits/normalize_erasing_regions.rs
src/librustc_traits/normalize_projection_ty.rs
src/librustc_traits/type_op.rs [new file with mode: 0644]
src/librustc_traits/util.rs [deleted file]
src/librustc_typeck/astconv.rs
src/librustc_typeck/check/autoderef.rs
src/librustc_typeck/check/callee.rs
src/librustc_typeck/check/coercion.rs
src/librustc_typeck/check/compare_method.rs
src/librustc_typeck/check/demand.rs
src/librustc_typeck/check/dropck.rs
src/librustc_typeck/check/method/mod.rs
src/librustc_typeck/check/method/probe.rs
src/librustc_typeck/check/method/suggest.rs
src/librustc_typeck/check/mod.rs
src/librustc_typeck/check/op.rs
src/librustc_typeck/check/wfcheck.rs
src/librustc_typeck/coherence/inherent_impls_overlap.rs
src/librustc_typeck/collect.rs
src/librustc_typeck/impl_wf_check.rs
src/librustc_typeck/lib.rs
src/librustdoc/clean/auto_trait.rs
src/librustdoc/clean/inline.rs
src/librustdoc/clean/mod.rs
src/librustdoc/test.rs
src/libserialize/leb128.rs
src/libserialize/lib.rs
src/libserialize/opaque.rs
src/libstd/f32.rs
src/libstd/f64.rs
src/libstd/sys/unix/rand.rs
src/libstd/thread/local.rs
src/libsyntax/codemap.rs
src/libsyntax/ext/base.rs
src/libsyntax/ext/derive.rs
src/libsyntax/ext/expand.rs
src/libsyntax/ext/source_util.rs
src/libsyntax/ext/tt/macro_rules.rs
src/libsyntax/feature_gate.rs
src/libsyntax/parse/lexer/comments.rs
src/libsyntax/parse/lexer/mod.rs
src/libsyntax/parse/parser.rs
src/libsyntax/std_inject.rs
src/libsyntax/test.rs
src/libsyntax/test_snippet.rs
src/libsyntax_ext/lib.rs
src/libsyntax_ext/proc_macro_registrar.rs
src/libsyntax_pos/Cargo.toml
src/libsyntax_pos/analyze_filemap.rs [new file with mode: 0644]
src/libsyntax_pos/hygiene.rs
src/libsyntax_pos/lib.rs
src/libsyntax_pos/symbol.rs
src/test/compile-fail-fulldeps/proc-macro/macros-in-extern.rs
src/test/compile-fail/const-err-early.rs
src/test/compile-fail/const-err-multi.rs
src/test/compile-fail/const-err.rs
src/test/compile-fail/const-eval-overflow2.rs
src/test/compile-fail/const-eval-overflow2b.rs
src/test/compile-fail/const-eval-overflow2c.rs
src/test/compile-fail/edition-raw-pointer-method-2015.rs
src/test/compile-fail/edition-raw-pointer-method-2018.rs
src/test/compile-fail/macros-in-extern.rs
src/test/compile-fail/not-panic-safe-2.rs
src/test/compile-fail/not-panic-safe-3.rs
src/test/compile-fail/not-panic-safe-4.rs
src/test/compile-fail/not-panic-safe-6.rs
src/test/compile-fail/rfc-2126-extern-absolute-paths/non-existent-1.rs
src/test/compile-fail/rfc-2126-extern-absolute-paths/non-existent-2.rs
src/test/compile-fail/rfc-2126-extern-absolute-paths/non-existent-3.rs
src/test/compile-fail/rfc-2126-extern-absolute-paths/single-segment.rs
src/test/run-make-fulldeps/issue-51671/Makefile [new file with mode: 0644]
src/test/run-make-fulldeps/issue-51671/app.rs [new file with mode: 0644]
src/test/run-pass-fulldeps/auxiliary/plugin_args.rs
src/test/run-pass-fulldeps/issue-11881.rs
src/test/run-pass/async-await.rs
src/test/run-pass/auxiliary/edition-kw-macro-2015.rs
src/test/run-pass/auxiliary/edition-kw-macro-2018.rs
src/test/run-pass/edition-keywords-2015-2015.rs
src/test/run-pass/edition-keywords-2015-2018.rs
src/test/run-pass/edition-keywords-2018-2015.rs
src/test/run-pass/edition-keywords-2018-2018.rs
src/test/run-pass/rfc-2126-extern-absolute-paths/basic.rs
src/test/run-pass/rfc-2126-extern-absolute-paths/test.rs
src/test/ui/async-fn-multiple-lifetimes.rs
src/test/ui/auxiliary/edition-kw-macro-2015.rs
src/test/ui/auxiliary/edition-kw-macro-2018.rs
src/test/ui/const-eval/conditional_array_execution.nll.stderr
src/test/ui/const-eval/conditional_array_execution.rs
src/test/ui/const-eval/conditional_array_execution.stderr
src/test/ui/const-eval/ice-generic-assoc-const.rs [new file with mode: 0644]
src/test/ui/const-eval/issue-43197.nll.stderr
src/test/ui/const-eval/issue-43197.rs
src/test/ui/const-eval/issue-43197.stderr
src/test/ui/const-eval/issue-44578.nll.stderr
src/test/ui/const-eval/issue-44578.stderr
src/test/ui/const-eval/issue-50814-2.stderr
src/test/ui/const-eval/issue-50814.stderr
src/test/ui/const-eval/pub_const_err.rs
src/test/ui/const-eval/pub_const_err.stderr
src/test/ui/const-eval/pub_const_err_bin.rs
src/test/ui/const-eval/pub_const_err_bin.stderr
src/test/ui/const-len-underflow-separate-spans.rs
src/test/ui/const-len-underflow-separate-spans.stderr
src/test/ui/edition-keywords-2015-2015-expansion.rs
src/test/ui/edition-keywords-2015-2015-parsing.rs
src/test/ui/edition-keywords-2015-2018-expansion.rs
src/test/ui/edition-keywords-2015-2018-parsing.rs
src/test/ui/edition-keywords-2018-2015-expansion.rs
src/test/ui/edition-keywords-2018-2015-parsing.rs
src/test/ui/edition-keywords-2018-2018-expansion.rs
src/test/ui/edition-keywords-2018-2018-parsing.rs
src/test/ui/feature-gate-async-await-2015-edition.rs
src/test/ui/feature-gate-async-await.rs
src/test/ui/feature-gate-async-await.stderr
src/test/ui/feature-gate-macro_at_most_once_rep.rs
src/test/ui/feature-gate-macro_at_most_once_rep.stderr
src/test/ui/feature-gate-macros_in_extern.rs
src/test/ui/feature-gate-macros_in_extern.stderr
src/test/ui/feature-gate-unsized_tuple_coercion.rs
src/test/ui/feature-gate-unsized_tuple_coercion.stderr
src/test/ui/hygiene/auxiliary/local_inner_macros.rs [new file with mode: 0644]
src/test/ui/hygiene/local_inner_macros.rs [new file with mode: 0644]
src/test/ui/hygiene/local_inner_macros_disabled.rs [new file with mode: 0644]
src/test/ui/hygiene/local_inner_macros_disabled.stderr [new file with mode: 0644]
src/test/ui/impl-trait/static-return-lifetime-infered.nll.stderr [new file with mode: 0644]
src/test/ui/impl-trait/static-return-lifetime-infered.rs [new file with mode: 0644]
src/test/ui/impl-trait/static-return-lifetime-infered.stderr [new file with mode: 0644]
src/test/ui/lint-anon-param-edition.fixed
src/test/ui/lint-anon-param-edition.rs
src/test/ui/nll/closure-requirements/propagate-approximated-shorter-to-static-no-bound.stderr
src/test/ui/nll/closure-requirements/propagate-approximated-shorter-to-static-wrong-bound.stderr
src/test/ui/nll/issue-50716-1.rs [new file with mode: 0644]
src/test/ui/nll/issue-50716.rs [new file with mode: 0644]
src/test/ui/nll/issue-50716.stderr [new file with mode: 0644]
src/test/ui/no-args-non-move-async-closure.rs
src/test/ui/suggestions/str-array-assignment.stderr
src/tools/compiletest/src/header.rs
src/tools/compiletest/src/runtest.rs

index b74587e566210aabb4878f9d67285590465753b5..a93390552641ce7b1306cd9a6402dbb13f0aad83 100644 (file)
@@ -2779,6 +2779,7 @@ name = "syntax_pos"
 version = "0.0.0"
 dependencies = [
  "arena 0.0.0",
+ "cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
  "rustc_data_structures 0.0.0",
  "scoped-tls 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "serialize 0.0.0",
index e25742a4a61eb5f709c4d56bfabe7ccaa3cf7366..ec9b5eba561060a21efc741df7d0d9867d40796a 100644 (file)
 #![cfg_attr(stage0, feature(repr_transparent))]
 #![feature(rustc_attrs)]
 #![feature(specialization)]
+#![feature(split_ascii_whitespace)]
 #![feature(staged_api)]
 #![feature(str_internals)]
 #![feature(trusted_len)]
index 32ca8d1fa5eba835c83378ac15c52589310e72d5..ec9c39c916c4720badead169b96ad7a321588696 100644 (file)
@@ -78,6 +78,8 @@
 pub use core::str::pattern;
 #[stable(feature = "encode_utf16", since = "1.8.0")]
 pub use core::str::EncodeUtf16;
+#[unstable(feature = "split_ascii_whitespace", issue = "48656")]
+pub use core::str::SplitAsciiWhitespace;
 
 #[unstable(feature = "slice_concat_ext",
            reason = "trait should not have to exist",
index 0c074582281d6414617a30050bd82d5a20290160..91447e01ad4fabb6932022f22a8bd7f01f83c46f 100644 (file)
@@ -67,6 +67,8 @@ impl Layout {
     /// or returns `LayoutErr` if either of the following conditions
     /// are not met:
     ///
+    /// * `align` must not be zero,
+    ///
     /// * `align` must be a power of two,
     ///
     /// * `size`, when rounded up to the nearest multiple of `align`,
index dd8fce17cff903709409e3886d6d73626dcbccc0..21edc6dfee4e591677ff8f4eed37e317032c2a63 100644 (file)
@@ -570,20 +570,31 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
     }
 }
 
-// Values [1, MIN_WRITING-1] represent the number of `Ref` active. Values in
-// [MIN_WRITING, MAX-1] represent the number of `RefMut` active. Multiple
-// `RefMut`s can only be active at a time if they refer to distinct,
-// nonoverlapping components of a `RefCell` (e.g., different ranges of a slice).
+// Positive values represent the number of `Ref` active. Negative values
+// represent the number of `RefMut` active. Multiple `RefMut`s can only be
+// active at a time if they refer to distinct, nonoverlapping components of a
+// `RefCell` (e.g., different ranges of a slice).
 //
 // `Ref` and `RefMut` are both two words in size, and so there will likely never
 // be enough `Ref`s or `RefMut`s in existence to overflow half of the `usize`
-// range. Thus, a `BorrowFlag` will probably never overflow. However, this is
-// not a guarantee, as a pathological program could repeatedly create and then
-// mem::forget `Ref`s or `RefMut`s. Thus, all code must explicitly check for
-// overflow in order to avoid unsafety.
-type BorrowFlag = usize;
+// range. Thus, a `BorrowFlag` will probably never overflow or underflow.
+// However, this is not a guarantee, as a pathological program could repeatedly
+// create and then mem::forget `Ref`s or `RefMut`s. Thus, all code must
+// explicitly check for overflow and underflow in order to avoid unsafety, or at
+// least behave correctly in the event that overflow or underflow happens (e.g.,
+// see BorrowRef::new).
+type BorrowFlag = isize;
 const UNUSED: BorrowFlag = 0;
-const MIN_WRITING: BorrowFlag = (!0)/2 + 1; // 0b1000...
+
+#[inline(always)]
+fn is_writing(x: BorrowFlag) -> bool {
+    x < UNUSED
+}
+
+#[inline(always)]
+fn is_reading(x: BorrowFlag) -> bool {
+    x > UNUSED
+}
 
 impl<T> RefCell<T> {
     /// Creates a new `RefCell` containing `value`.
@@ -1022,12 +1033,11 @@ impl<'b> BorrowRef<'b> {
     #[inline]
     fn new(borrow: &'b Cell<BorrowFlag>) -> Option<BorrowRef<'b>> {
         let b = borrow.get();
-        if b >= MIN_WRITING {
+        if is_writing(b) || b == isize::max_value() {
+            // If there's currently a writing borrow, or if incrementing the
+            // refcount would overflow into a writing borrow.
             None
         } else {
-            // Prevent the borrow counter from overflowing into
-            // a writing borrow.
-            assert!(b < MIN_WRITING - 1);
             borrow.set(b + 1);
             Some(BorrowRef { borrow })
         }
@@ -1038,7 +1048,7 @@ impl<'b> Drop for BorrowRef<'b> {
     #[inline]
     fn drop(&mut self) {
         let borrow = self.borrow.get();
-        debug_assert!(borrow < MIN_WRITING && borrow != UNUSED);
+        debug_assert!(is_reading(borrow));
         self.borrow.set(borrow - 1);
     }
 }
@@ -1047,12 +1057,12 @@ impl<'b> Clone for BorrowRef<'b> {
     #[inline]
     fn clone(&self) -> BorrowRef<'b> {
         // Since this Ref exists, we know the borrow flag
-        // is not set to WRITING.
+        // is a reading borrow.
         let borrow = self.borrow.get();
-        debug_assert!(borrow != UNUSED);
+        debug_assert!(is_reading(borrow));
         // Prevent the borrow counter from overflowing into
         // a writing borrow.
-        assert!(borrow < MIN_WRITING - 1);
+        assert!(borrow != isize::max_value());
         self.borrow.set(borrow + 1);
         BorrowRef { borrow: self.borrow }
     }
@@ -1251,12 +1261,8 @@ impl<'b> Drop for BorrowRefMut<'b> {
     #[inline]
     fn drop(&mut self) {
         let borrow = self.borrow.get();
-        debug_assert!(borrow >= MIN_WRITING);
-        self.borrow.set(if borrow == MIN_WRITING {
-            UNUSED
-        } else {
-            borrow - 1
-        });
+        debug_assert!(is_writing(borrow));
+        self.borrow.set(borrow + 1);
     }
 }
 
@@ -1266,10 +1272,10 @@ fn new(borrow: &'b Cell<BorrowFlag>) -> Option<BorrowRefMut<'b>> {
         // NOTE: Unlike BorrowRefMut::clone, new is called to create the initial
         // mutable reference, and so there must currently be no existing
         // references. Thus, while clone increments the mutable refcount, here
-        // we simply go directly from UNUSED to MIN_WRITING.
+        // we explicitly only allow going from UNUSED to UNUSED - 1.
         match borrow.get() {
             UNUSED => {
-                borrow.set(MIN_WRITING);
+                borrow.set(UNUSED - 1);
                 Some(BorrowRefMut { borrow: borrow })
             },
             _ => None,
@@ -1284,10 +1290,10 @@ fn new(borrow: &'b Cell<BorrowFlag>) -> Option<BorrowRefMut<'b>> {
     #[inline]
     fn clone(&self) -> BorrowRefMut<'b> {
         let borrow = self.borrow.get();
-        debug_assert!(borrow >= MIN_WRITING);
-        // Prevent the borrow counter from overflowing.
-        assert!(borrow != !0);
-        self.borrow.set(borrow + 1);
+        debug_assert!(is_writing(borrow));
+        // Prevent the borrow counter from underflowing.
+        assert!(borrow != isize::min_value());
+        self.borrow.set(borrow - 1);
         BorrowRefMut { borrow: self.borrow }
     }
 }
index 1168126c47c93a538c4a35a304533a0e3c20302b..7e2dd304d7f5a23b2fa2644d949eaf2711c9c1d5 100644 (file)
@@ -1996,12 +1996,10 @@ pub fn is_negative(self) -> bool { self < 0 }
         /// # Examples
         ///
         /// ```
-        /// #![feature(int_to_from_bytes)]
-        ///
         /// let bytes = i32::min_value().to_be().to_bytes();
         /// assert_eq!(bytes, [0x80, 0, 0, 0]);
         /// ```
-        #[unstable(feature = "int_to_from_bytes", issue = "49792")]
+        #[stable(feature = "int_to_from_bytes", since = "1.29.0")]
         #[inline]
         pub fn to_bytes(self) -> [u8; mem::size_of::<Self>()] {
             unsafe { mem::transmute(self) }
@@ -2018,12 +2016,10 @@ pub fn is_negative(self) -> bool { self < 0 }
         /// # Examples
         ///
         /// ```
-        /// #![feature(int_to_from_bytes)]
-        ///
         /// let int = i32::from_be(i32::from_bytes([0x80, 0, 0, 0]));
         /// assert_eq!(int, i32::min_value());
         /// ```
-        #[unstable(feature = "int_to_from_bytes", issue = "49792")]
+        #[stable(feature = "int_to_from_bytes", since = "1.29.0")]
         #[inline]
         pub fn from_bytes(bytes: [u8; mem::size_of::<Self>()]) -> Self {
             unsafe { mem::transmute(bytes) }
@@ -3702,12 +3698,10 @@ pub fn wrapping_next_power_of_two(self) -> Self {
         /// # Examples
         ///
         /// ```
-        /// #![feature(int_to_from_bytes)]
-        ///
         /// let bytes = 0x1234_5678_u32.to_be().to_bytes();
         /// assert_eq!(bytes, [0x12, 0x34, 0x56, 0x78]);
         /// ```
-        #[unstable(feature = "int_to_from_bytes", issue = "49792")]
+        #[stable(feature = "int_to_from_bytes", since = "1.29.0")]
         #[inline]
         pub fn to_bytes(self) -> [u8; mem::size_of::<Self>()] {
             unsafe { mem::transmute(self) }
@@ -3724,12 +3718,10 @@ pub fn wrapping_next_power_of_two(self) -> Self {
         /// # Examples
         ///
         /// ```
-        /// #![feature(int_to_from_bytes)]
-        ///
         /// let int = u32::from_be(u32::from_bytes([0x12, 0x34, 0x56, 0x78]));
         /// assert_eq!(int, 0x1234_5678_u32);
         /// ```
-        #[unstable(feature = "int_to_from_bytes", issue = "49792")]
+        #[stable(feature = "int_to_from_bytes", since = "1.29.0")]
         #[inline]
         pub fn from_bytes(bytes: [u8; mem::size_of::<Self>()]) -> Self {
             unsafe { mem::transmute(bytes) }
index e74e527927d7bc47cff1d9ea70820a31d651ac8c..0cbdbc4ad663af99c47a7838c2c66ff44e628641 100644 (file)
@@ -1642,8 +1642,8 @@ pub fn clone_from_slice(&mut self, src: &[T]) where T: Clone {
     /// [`split_at_mut`]: #method.split_at_mut
     #[stable(feature = "copy_from_slice", since = "1.9.0")]
     pub fn copy_from_slice(&mut self, src: &[T]) where T: Copy {
-        assert!(self.len() == src.len(),
-                "destination and source slices have different lengths");
+        assert_eq!(self.len(), src.len(),
+                   "destination and source slices have different lengths");
         unsafe {
             ptr::copy_nonoverlapping(
                 src.as_ptr(), self.as_mut_ptr(), self.len());
index 42fb1bc238b8e3504440dd7af8df2b7cd48199d4..5ae2f6349e5b7e335846eeafb779c40e7ca106eb 100644 (file)
@@ -21,7 +21,7 @@
 use fmt;
 use iter::{Map, Cloned, FusedIterator, TrustedLen, Filter};
 use iter_private::TrustedRandomAccess;
-use slice::{self, SliceIndex};
+use slice::{self, SliceIndex, Split as SliceSplit};
 use mem;
 
 pub mod pattern;
@@ -2722,7 +2722,10 @@ pub fn bytes(&self) -> Bytes {
     /// the original string slice, separated by any amount of whitespace.
     ///
     /// 'Whitespace' is defined according to the terms of the Unicode Derived
-    /// Core Property `White_Space`.
+    /// Core Property `White_Space`. If you only want to split on ASCII whitespace
+    /// instead, use [`split_ascii_whitespace`].
+    ///
+    /// [`split_ascii_whitespace`]: #method.split_ascii_whitespace
     ///
     /// # Examples
     ///
@@ -2756,6 +2759,53 @@ pub fn split_whitespace(&self) -> SplitWhitespace {
         SplitWhitespace { inner: self.split(IsWhitespace).filter(IsNotEmpty) }
     }
 
+    /// Split a string slice by ASCII whitespace.
+    ///
+    /// The iterator returned will return string slices that are sub-slices of
+    /// the original string slice, separated by any amount of ASCII whitespace.
+    ///
+    /// To split by Unicode `Whitespace` instead, use [`split_whitespace`].
+    ///
+    /// [`split_whitespace`]: #method.split_whitespace
+    ///
+    /// # Examples
+    ///
+    /// Basic usage:
+    ///
+    /// ```
+    /// #![feature(split_ascii_whitespace)]
+    /// let mut iter = "A few words".split_ascii_whitespace();
+    ///
+    /// assert_eq!(Some("A"), iter.next());
+    /// assert_eq!(Some("few"), iter.next());
+    /// assert_eq!(Some("words"), iter.next());
+    ///
+    /// assert_eq!(None, iter.next());
+    /// ```
+    ///
+    /// All kinds of ASCII whitespace are considered:
+    ///
+    /// ```
+    /// let mut iter = " Mary   had\ta little  \n\t lamb".split_whitespace();
+    /// assert_eq!(Some("Mary"), iter.next());
+    /// assert_eq!(Some("had"), iter.next());
+    /// assert_eq!(Some("a"), iter.next());
+    /// assert_eq!(Some("little"), iter.next());
+    /// assert_eq!(Some("lamb"), iter.next());
+    ///
+    /// assert_eq!(None, iter.next());
+    /// ```
+    #[unstable(feature = "split_ascii_whitespace", issue = "48656")]
+    #[inline]
+    pub fn split_ascii_whitespace(&self) -> SplitAsciiWhitespace {
+        let inner = self
+            .as_bytes()
+            .split(IsAsciiWhitespace)
+            .filter(IsNotEmpty)
+            .map(UnsafeBytesToStr);
+        SplitAsciiWhitespace { inner }
+    }
+
     /// An iterator over the lines of a string, as string slices.
     ///
     /// Lines are ended with either a newline (`\n`) or a carriage return with
@@ -3895,6 +3945,20 @@ pub struct SplitWhitespace<'a> {
     inner: Filter<Split<'a, IsWhitespace>, IsNotEmpty>,
 }
 
+/// An iterator over the non-ASCII-whitespace substrings of a string,
+/// separated by any amount of ASCII whitespace.
+///
+/// This struct is created by the [`split_ascii_whitespace`] method on [`str`].
+/// See its documentation for more.
+///
+/// [`split_ascii_whitespace`]: ../../std/primitive.str.html#method.split_ascii_whitespace
+/// [`str`]: ../../std/primitive.str.html
+#[unstable(feature = "split_ascii_whitespace", issue = "48656")]
+#[derive(Clone, Debug)]
+pub struct SplitAsciiWhitespace<'a> {
+    inner: Map<Filter<SliceSplit<'a, u8, IsAsciiWhitespace>, IsNotEmpty>, UnsafeBytesToStr>,
+}
+
 #[derive(Clone)]
 struct IsWhitespace;
 
@@ -3914,6 +3978,25 @@ extern "rust-call" fn call_mut(&mut self, arg: (char, )) -> bool {
     }
 }
 
+#[derive(Clone)]
+struct IsAsciiWhitespace;
+
+impl<'a> FnOnce<(&'a u8, )> for IsAsciiWhitespace {
+    type Output = bool;
+
+    #[inline]
+    extern "rust-call" fn call_once(mut self, arg: (&u8, )) -> bool {
+        self.call_mut(arg)
+    }
+}
+
+impl<'a> FnMut<(&'a u8, )> for IsAsciiWhitespace {
+    #[inline]
+    extern "rust-call" fn call_mut(&mut self, arg: (&u8, )) -> bool {
+        arg.0.is_ascii_whitespace()
+    }
+}
+
 #[derive(Clone)]
 struct IsNotEmpty;
 
@@ -3921,30 +4004,72 @@ impl<'a, 'b> FnOnce<(&'a &'b str, )> for IsNotEmpty {
     type Output = bool;
 
     #[inline]
-    extern "rust-call" fn call_once(mut self, arg: (&&str, )) -> bool {
+    extern "rust-call" fn call_once(mut self, arg: (&'a &'b str, )) -> bool {
         self.call_mut(arg)
     }
 }
 
 impl<'a, 'b> FnMut<(&'a &'b str, )> for IsNotEmpty {
     #[inline]
-    extern "rust-call" fn call_mut(&mut self, arg: (&&str, )) -> bool {
+    extern "rust-call" fn call_mut(&mut self, arg: (&'a &'b str, )) -> bool {
+        !arg.0.is_empty()
+    }
+}
+
+impl<'a, 'b> FnOnce<(&'a &'b [u8], )> for IsNotEmpty {
+    type Output = bool;
+
+    #[inline]
+    extern "rust-call" fn call_once(mut self, arg: (&'a &'b [u8], )) -> bool {
+        self.call_mut(arg)
+    }
+}
+
+impl<'a, 'b> FnMut<(&'a &'b [u8], )> for IsNotEmpty {
+    #[inline]
+    extern "rust-call" fn call_mut(&mut self, arg: (&'a &'b [u8], )) -> bool {
         !arg.0.is_empty()
     }
 }
 
+#[derive(Clone)]
+struct UnsafeBytesToStr;
+
+impl<'a> FnOnce<(&'a [u8], )> for UnsafeBytesToStr {
+    type Output = &'a str;
+
+    #[inline]
+    extern "rust-call" fn call_once(mut self, arg: (&'a [u8], )) -> &'a str {
+        self.call_mut(arg)
+    }
+}
+
+impl<'a> FnMut<(&'a [u8], )> for UnsafeBytesToStr {
+    #[inline]
+    extern "rust-call" fn call_mut(&mut self, arg: (&'a [u8], )) -> &'a str {
+        unsafe { from_utf8_unchecked(arg.0) }
+    }
+}
+
 
 #[stable(feature = "split_whitespace", since = "1.1.0")]
 impl<'a> Iterator for SplitWhitespace<'a> {
     type Item = &'a str;
 
+    #[inline]
     fn next(&mut self) -> Option<&'a str> {
         self.inner.next()
     }
+
+    #[inline]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        self.inner.size_hint()
+    }
 }
 
 #[stable(feature = "split_whitespace", since = "1.1.0")]
 impl<'a> DoubleEndedIterator for SplitWhitespace<'a> {
+    #[inline]
     fn next_back(&mut self) -> Option<&'a str> {
         self.inner.next_back()
     }
@@ -3953,6 +4078,32 @@ fn next_back(&mut self) -> Option<&'a str> {
 #[stable(feature = "fused", since = "1.26.0")]
 impl<'a> FusedIterator for SplitWhitespace<'a> {}
 
+#[unstable(feature = "split_ascii_whitespace", issue = "48656")]
+impl<'a> Iterator for SplitAsciiWhitespace<'a> {
+    type Item = &'a str;
+
+    #[inline]
+    fn next(&mut self) -> Option<&'a str> {
+        self.inner.next()
+    }
+
+    #[inline]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        self.inner.size_hint()
+    }
+}
+
+#[unstable(feature = "split_ascii_whitespace", issue = "48656")]
+impl<'a> DoubleEndedIterator for SplitAsciiWhitespace<'a> {
+    #[inline]
+    fn next_back(&mut self) -> Option<&'a str> {
+        self.inner.next_back()
+    }
+}
+
+#[unstable(feature = "split_ascii_whitespace", issue = "48656")]
+impl<'a> FusedIterator for SplitAsciiWhitespace<'a> {}
+
 /// An iterator of [`u16`] over the string encoded as UTF-16.
 ///
 /// [`u16`]: ../../std/primitive.u16.html
index b5e6a019a228c6b4e87932323fbc0114ccd29631..24fe96a2b82559313605802cdcba06bd6ea01b34 100644 (file)
@@ -574,6 +574,25 @@ fn max() {
             assert_eq!((-9.0 as $fty).max($nan), -9.0);
             assert!(($nan as $fty).max($nan).is_nan());
         }
+        #[test]
+        fn mod_euc() {
+            let a: $fty = 42.0;
+            assert!($inf.mod_euc(a).is_nan());
+            assert_eq!(a.mod_euc($inf), a);
+            assert!(a.mod_euc($nan).is_nan());
+            assert!($inf.mod_euc($inf).is_nan());
+            assert!($inf.mod_euc($nan).is_nan());
+            assert!($nan.mod_euc($inf).is_nan());
+        }
+        #[test]
+        fn div_euc() {
+            let a: $fty = 42.0;
+            assert_eq!(a.div_euc($inf), 0.0);
+            assert!(a.div_euc($nan).is_nan());
+            assert!($inf.div_euc($inf).is_nan());
+            assert!($inf.div_euc($nan).is_nan());
+            assert!($nan.div_euc($inf).is_nan());
+        }
     } }
 }
 
index 3a152ccd0c97131f40fc71c802849313619cdce5..6cc61d748001a191f9bf4428080f1881dc2f285e 100644 (file)
@@ -60,7 +60,7 @@
 //! user of the `DepNode` API of having to know how to compute the expected
 //! fingerprint for a given set of node parameters.
 
-use mir::interpret::{GlobalId, ConstValue};
+use mir::interpret::GlobalId;
 use hir::def_id::{CrateNum, DefId, DefIndex, CRATE_DEF_INDEX};
 use hir::map::DefPathHash;
 use hir::{HirId, ItemLocalId};
 use std::fmt;
 use std::hash::Hash;
 use syntax_pos::symbol::InternedString;
-use traits::query::{CanonicalProjectionGoal,
-                    CanonicalTyGoal, CanonicalPredicateGoal};
-use ty::{TyCtxt, Instance, InstanceDef, ParamEnv, ParamEnvAnd, PolyTraitRef, Ty};
+use traits::query::{
+    CanonicalProjectionGoal, CanonicalTyGoal, CanonicalTypeOpEqGoal, CanonicalTypeOpSubtypeGoal,
+    CanonicalPredicateGoal, CanonicalTypeOpProvePredicateGoal, CanonicalTypeOpNormalizeGoal,
+};
+use ty::{TyCtxt, FnSig, Instance, InstanceDef,
+         ParamEnv, ParamEnvAnd, Predicate, PolyFnSig, PolyTraitRef, Ty, self};
 use ty::subst::Substs;
 
 // erase!() just makes tokens go away. It's used to specify which macro argument
@@ -629,7 +632,7 @@ pub fn fingerprint_needed_for_crate_hash(self) -> bool {
     // queries). Making them anonymous avoids hashing the result, which
     // may save a bit of time.
     [anon] EraseRegionsTy { ty: Ty<'tcx> },
-    [anon] ConstValueToAllocation { val: ConstValue<'tcx>, ty: Ty<'tcx> },
+    [anon] ConstValueToAllocation { val: &'tcx ty::Const<'tcx> },
 
     [input] Freevars(DefId),
     [input] MaybeUnusedTraitImport(DefId),
@@ -647,6 +650,13 @@ pub fn fingerprint_needed_for_crate_hash(self) -> bool {
     [] NormalizeTyAfterErasingRegions(ParamEnvAnd<'tcx, Ty<'tcx>>),
     [] DropckOutlives(CanonicalTyGoal<'tcx>),
     [] EvaluateObligation(CanonicalPredicateGoal<'tcx>),
+    [] TypeOpEq(CanonicalTypeOpEqGoal<'tcx>),
+    [] TypeOpSubtype(CanonicalTypeOpSubtypeGoal<'tcx>),
+    [] TypeOpProvePredicate(CanonicalTypeOpProvePredicateGoal<'tcx>),
+    [] TypeOpNormalizeTy(CanonicalTypeOpNormalizeGoal<'tcx, Ty<'tcx>>),
+    [] TypeOpNormalizePredicate(CanonicalTypeOpNormalizeGoal<'tcx, Predicate<'tcx>>),
+    [] TypeOpNormalizePolyFnSig(CanonicalTypeOpNormalizeGoal<'tcx, PolyFnSig<'tcx>>),
+    [] TypeOpNormalizeFnSig(CanonicalTypeOpNormalizeGoal<'tcx, FnSig<'tcx>>),
 
     [] SubstituteNormalizeAndTestPredicates { key: (DefId, &'tcx Substs<'tcx>) },
 
index 2a351d99841ff1a5acd4faab6a9e3fd21a4b3ca5..f792a24503a3e054f536ab86f46ebf6c16eb75f0 100644 (file)
@@ -57,7 +57,7 @@ pub enum FnKind<'a> {
     ItemFn(Name, &'a Generics, FnHeader, &'a Visibility, &'a [Attribute]),
 
     /// fn foo(&self)
-    Method(Name, &'a MethodSig, Option<&'a Visibility>, &'a [Attribute]),
+    Method(Ident, &'a MethodSig, Option<&'a Visibility>, &'a [Attribute]),
 
     /// |x, y| {}
     Closure(&'a [Attribute]),
@@ -426,14 +426,14 @@ pub fn walk_ident<'v, V: Visitor<'v>>(visitor: &mut V, ident: Ident) {
 }
 
 pub fn walk_label<'v, V: Visitor<'v>>(visitor: &mut V, label: &'v Label) {
-    visitor.visit_name(label.span, label.name);
+    visitor.visit_ident(label.ident);
 }
 
 pub fn walk_lifetime<'v, V: Visitor<'v>>(visitor: &mut V, lifetime: &'v Lifetime) {
     visitor.visit_id(lifetime.id);
     match lifetime.name {
-        LifetimeName::Param(ParamName::Plain(name)) => {
-            visitor.visit_name(lifetime.span, name);
+        LifetimeName::Param(ParamName::Plain(ident)) => {
+            visitor.visit_ident(ident);
         }
         LifetimeName::Param(ParamName::Fresh(_)) |
         LifetimeName::Static |
@@ -644,7 +644,7 @@ pub fn walk_path<'v, V: Visitor<'v>>(visitor: &mut V, path: &'v Path) {
 pub fn walk_path_segment<'v, V: Visitor<'v>>(visitor: &mut V,
                                              path_span: Span,
                                              segment: &'v PathSegment) {
-    visitor.visit_name(path_span, segment.name);
+    visitor.visit_ident(segment.ident);
     if let Some(ref args) = segment.args {
         visitor.visit_generic_args(path_span, args);
     }
@@ -660,7 +660,7 @@ pub fn walk_generic_args<'v, V: Visitor<'v>>(visitor: &mut V,
 pub fn walk_assoc_type_binding<'v, V: Visitor<'v>>(visitor: &mut V,
                                                    type_binding: &'v TypeBinding) {
     visitor.visit_id(type_binding.id);
-    visitor.visit_name(type_binding.span, type_binding.name);
+    visitor.visit_ident(type_binding.ident);
     visitor.visit_ty(&type_binding.ty);
 }
 
@@ -689,9 +689,9 @@ pub fn walk_pat<'v, V: Visitor<'v>>(visitor: &mut V, pattern: &'v Pat) {
         PatKind::Ref(ref subpattern, _) => {
             visitor.visit_pat(subpattern)
         }
-        PatKind::Binding(_, canonical_id, ref pth1, ref optional_subpattern) => {
+        PatKind::Binding(_, canonical_id, ident, ref optional_subpattern) => {
             visitor.visit_def_mention(Def::Local(canonical_id));
-            visitor.visit_name(pth1.span, pth1.node);
+            visitor.visit_ident(ident);
             walk_list!(visitor, visit_pat, optional_subpattern);
         }
         PatKind::Lit(ref expression) => visitor.visit_expr(expression),
@@ -714,11 +714,11 @@ pub fn walk_foreign_item<'v, V: Visitor<'v>>(visitor: &mut V, foreign_item: &'v
     visitor.visit_name(foreign_item.span, foreign_item.name);
 
     match foreign_item.node {
-        ForeignItemFn(ref function_declaration, ref names, ref generics) => {
+        ForeignItemFn(ref function_declaration, ref param_names, ref generics) => {
             visitor.visit_generics(generics);
             visitor.visit_fn_decl(function_declaration);
-            for name in names {
-                visitor.visit_name(name.span, name.node);
+            for &param_name in param_names {
+                visitor.visit_ident(param_name);
             }
         }
         ForeignItemStatic(ref typ, _) => visitor.visit_ty(typ),
@@ -741,7 +741,7 @@ pub fn walk_generic_param<'v, V: Visitor<'v>>(visitor: &mut V, param: &'v Generi
     visitor.visit_id(param.id);
     walk_list!(visitor, visit_attribute, &param.attrs);
     match param.name {
-        ParamName::Plain(name) => visitor.visit_name(param.span, name),
+        ParamName::Plain(ident) => visitor.visit_ident(ident),
         ParamName::Fresh(_) => {}
     }
     match param.kind {
@@ -823,7 +823,7 @@ pub fn walk_fn<'v, V: Visitor<'v>>(visitor: &mut V,
 }
 
 pub fn walk_trait_item<'v, V: Visitor<'v>>(visitor: &mut V, trait_item: &'v TraitItem) {
-    visitor.visit_name(trait_item.span, trait_item.name);
+    visitor.visit_ident(trait_item.ident);
     walk_list!(visitor, visit_attribute, &trait_item.attrs);
     visitor.visit_generics(&trait_item.generics);
     match trait_item.node {
@@ -832,15 +832,15 @@ pub fn walk_trait_item<'v, V: Visitor<'v>>(visitor: &mut V, trait_item: &'v Trai
             visitor.visit_ty(ty);
             walk_list!(visitor, visit_nested_body, default);
         }
-        TraitItemKind::Method(ref sig, TraitMethod::Required(ref names)) => {
+        TraitItemKind::Method(ref sig, TraitMethod::Required(ref param_names)) => {
             visitor.visit_id(trait_item.id);
             visitor.visit_fn_decl(&sig.decl);
-            for name in names {
-                visitor.visit_name(name.span, name.node);
+            for &param_name in param_names {
+                visitor.visit_ident(param_name);
             }
         }
         TraitItemKind::Method(ref sig, TraitMethod::Provided(body_id)) => {
-            visitor.visit_fn(FnKind::Method(trait_item.name,
+            visitor.visit_fn(FnKind::Method(trait_item.ident,
                                             sig,
                                             None,
                                             &trait_item.attrs),
@@ -859,9 +859,9 @@ pub fn walk_trait_item<'v, V: Visitor<'v>>(visitor: &mut V, trait_item: &'v Trai
 
 pub fn walk_trait_item_ref<'v, V: Visitor<'v>>(visitor: &mut V, trait_item_ref: &'v TraitItemRef) {
     // NB: Deliberately force a compilation error if/when new fields are added.
-    let TraitItemRef { id, name, ref kind, span, ref defaultness } = *trait_item_ref;
+    let TraitItemRef { id, ident, ref kind, span: _, ref defaultness } = *trait_item_ref;
     visitor.visit_nested_trait_item(id);
-    visitor.visit_name(span, name);
+    visitor.visit_ident(ident);
     visitor.visit_associated_item_kind(kind);
     visitor.visit_defaultness(defaultness);
 }
@@ -871,16 +871,16 @@ pub fn walk_impl_item<'v, V: Visitor<'v>>(visitor: &mut V, impl_item: &'v ImplIt
     let ImplItem {
         id: _,
         hir_id: _,
-        name,
+        ident,
         ref vis,
         ref defaultness,
         ref attrs,
         ref generics,
         ref node,
-        span
+        span: _,
     } = *impl_item;
 
-    visitor.visit_name(span, name);
+    visitor.visit_ident(ident);
     visitor.visit_vis(vis);
     visitor.visit_defaultness(defaultness);
     walk_list!(visitor, visit_attribute, attrs);
@@ -892,7 +892,7 @@ pub fn walk_impl_item<'v, V: Visitor<'v>>(visitor: &mut V, impl_item: &'v ImplIt
             visitor.visit_nested_body(body);
         }
         ImplItemKind::Method(ref sig, body_id) => {
-            visitor.visit_fn(FnKind::Method(impl_item.name,
+            visitor.visit_fn(FnKind::Method(impl_item.ident,
                                             sig,
                                             Some(&impl_item.vis),
                                             &impl_item.attrs),
@@ -910,9 +910,9 @@ pub fn walk_impl_item<'v, V: Visitor<'v>>(visitor: &mut V, impl_item: &'v ImplIt
 
 pub fn walk_impl_item_ref<'v, V: Visitor<'v>>(visitor: &mut V, impl_item_ref: &'v ImplItemRef) {
     // NB: Deliberately force a compilation error if/when new fields are added.
-    let ImplItemRef { id, name, ref kind, span, ref vis, ref defaultness } = *impl_item_ref;
+    let ImplItemRef { id, ident, ref kind, span: _, ref vis, ref defaultness } = *impl_item_ref;
     visitor.visit_nested_impl_item(id);
-    visitor.visit_name(span, name);
+    visitor.visit_ident(ident);
     visitor.visit_associated_item_kind(kind);
     visitor.visit_vis(vis);
     visitor.visit_defaultness(defaultness);
index 110ebf6b215a15ab4d840564d6ee0c3210d892a1..260ad00328344210dab0d6ca555640647571036c 100644 (file)
@@ -52,7 +52,7 @@
 use rustc_data_structures::indexed_vec::IndexVec;
 use session::Session;
 use util::common::FN_OUTPUT_NAME;
-use util::nodemap::{DefIdMap, FxHashMap, NodeMap};
+use util::nodemap::{DefIdMap, NodeMap};
 
 use std::collections::{BTreeMap, HashSet};
 use std::fmt::Debug;
@@ -85,7 +85,6 @@ pub struct LoweringContext<'a> {
     cstore: &'a CrateStore,
 
     resolver: &'a mut Resolver,
-    name_map: FxHashMap<Ident, Name>,
 
     /// The items being lowered are collected here.
     items: BTreeMap<NodeId, hir::Item>,
@@ -138,7 +137,7 @@ pub struct LoweringContext<'a> {
     // When `is_collectin_in_band_lifetimes` is true, each lifetime is checked
     // against this list to see if it is already in-scope, or if a definition
     // needs to be created for it.
-    in_scope_lifetimes: Vec<Name>,
+    in_scope_lifetimes: Vec<Ident>,
 
     type_def_lifetime_params: DefIdMap<usize>,
 
@@ -210,7 +209,6 @@ pub fn lower_crate(
         sess,
         cstore,
         resolver,
-        name_map: FxHashMap(),
         items: BTreeMap::new(),
         trait_items: BTreeMap::new(),
         impl_items: BTreeMap::new(),
@@ -604,8 +602,8 @@ fn diagnostic(&self) -> &errors::Handler {
         self.sess.diagnostic()
     }
 
-    fn str_to_ident(&self, s: &'static str) -> Name {
-        Symbol::gensym(s)
+    fn str_to_ident(&self, s: &'static str) -> Ident {
+        Ident::with_empty_ctxt(Symbol::gensym(s))
     }
 
     fn allow_internal_unstable(&self, reason: CompilerDesugaringKind, span: Span) -> Span {
@@ -616,6 +614,7 @@ fn allow_internal_unstable(&self, reason: CompilerDesugaringKind, span: Span) ->
             format: codemap::CompilerDesugaring(reason),
             allow_internal_unstable: true,
             allow_internal_unsafe: false,
+            local_inner_macros: false,
             edition: codemap::hygiene::default_edition(),
         });
         span.with_ctxt(SyntaxContext::empty().apply_mark(mark))
@@ -677,15 +676,15 @@ fn collect_in_band_defs<T, F>(
                 // that collisions are ok here and this shouldn't
                 // really show up for end-user.
                 let str_name = match hir_name {
-                    ParamName::Plain(name) => name.as_str(),
-                    ParamName::Fresh(_) => keywords::UnderscoreLifetime.name().as_str(),
+                    ParamName::Plain(ident) => ident.as_interned_str(),
+                    ParamName::Fresh(_) => keywords::UnderscoreLifetime.name().as_interned_str(),
                 };
 
                 // Add a definition for the in-band lifetime def
                 self.resolver.definitions().create_def_with_parent(
                     parent_id.index,
                     def_node_id,
-                    DefPathData::LifetimeParam(str_name.as_interned_str()),
+                    DefPathData::LifetimeParam(str_name),
                     DefIndexAddressSpace::High,
                     Mark::root(),
                     span,
@@ -711,22 +710,23 @@ fn collect_in_band_defs<T, F>(
     /// lifetimes are enabled, then we want to push that lifetime into
     /// the vector of names to define later. In that case, it will get
     /// added to the appropriate generics.
-    fn maybe_collect_in_band_lifetime(&mut self, span: Span, name: Name) {
+    fn maybe_collect_in_band_lifetime(&mut self, ident: Ident) {
         if !self.is_collecting_in_band_lifetimes {
             return;
         }
 
-        if self.in_scope_lifetimes.contains(&name) {
+        if self.in_scope_lifetimes.contains(&ident.modern()) {
             return;
         }
 
-        let hir_name = ParamName::Plain(name);
+        let hir_name = ParamName::Plain(ident);
 
-        if self.lifetimes_to_define.iter().any(|(_, lt_name)| *lt_name == hir_name) {
+        if self.lifetimes_to_define.iter()
+                                   .any(|(_, lt_name)| lt_name.modern() == hir_name.modern()) {
             return;
         }
 
-        self.lifetimes_to_define.push((span, hir_name));
+        self.lifetimes_to_define.push((ident.span, hir_name));
     }
 
     /// When we have either an elided or `'_` lifetime in an impl
@@ -749,7 +749,7 @@ fn with_in_scope_lifetime_defs<T, F>(&mut self, params: &Vec<GenericParam>, f: F
     {
         let old_len = self.in_scope_lifetimes.len();
         let lt_def_names = params.iter().filter_map(|param| match param.kind {
-            GenericParamKind::Lifetime { .. } => Some(param.ident.name),
+            GenericParamKind::Lifetime { .. } => Some(param.ident.modern()),
             _ => None,
         });
         self.in_scope_lifetimes.extend(lt_def_names);
@@ -773,7 +773,7 @@ fn with_parent_impl_lifetime_defs<T, F>(&mut self,
     {
         let old_len = self.in_scope_lifetimes.len();
         let lt_def_names = params.iter().filter_map(|param| match param.kind {
-            hir::GenericParamKind::Lifetime { .. } => Some(param.name.name()),
+            hir::GenericParamKind::Lifetime { .. } => Some(param.name.ident().modern()),
             _ => None,
         });
         self.in_scope_lifetimes.extend(lt_def_names);
@@ -955,20 +955,9 @@ fn def_key(&mut self, id: DefId) -> DefKey {
         }
     }
 
-    fn lower_ident(&mut self, ident: Ident) -> Name {
-        let ident = ident.modern();
-        if ident.span.ctxt() == SyntaxContext::empty() {
-            return ident.name;
-        }
-        *self.name_map
-            .entry(ident)
-            .or_insert_with(|| Symbol::from_ident(ident))
-    }
-
     fn lower_label(&mut self, label: Option<Label>) -> Option<hir::Label> {
         label.map(|label| hir::Label {
-            name: label.ident.name,
-            span: label.ident.span,
+            ident: label.ident,
         })
     }
 
@@ -1062,7 +1051,7 @@ fn lower_arm(&mut self, arm: &Arm) -> hir::Arm {
     fn lower_ty_binding(&mut self, b: &TypeBinding, itctx: ImplTraitContext) -> hir::TypeBinding {
         hir::TypeBinding {
             id: self.lower_node_id(b.id).node_id,
-            name: self.lower_ident(b.ident),
+            ident: b.ident,
             ty: self.lower_ty(&b.ty, itctx),
             span: b.span,
         }
@@ -1074,11 +1063,15 @@ fn lower_generic_arg(&mut self,
                         -> hir::GenericArg {
         match arg {
             ast::GenericArg::Lifetime(lt) => GenericArg::Lifetime(self.lower_lifetime(&lt)),
-            ast::GenericArg::Type(ty) => GenericArg::Type(self.lower_ty(&ty, itctx)),
+            ast::GenericArg::Type(ty) => GenericArg::Type(self.lower_ty_direct(&ty, itctx)),
         }
     }
 
     fn lower_ty(&mut self, t: &Ty, itctx: ImplTraitContext) -> P<hir::Ty> {
+        P(self.lower_ty_direct(t, itctx))
+    }
+
+    fn lower_ty_direct(&mut self, t: &Ty, itctx: ImplTraitContext) -> hir::Ty {
         let kind = match t.node {
             TyKind::Infer => hir::TyInfer,
             TyKind::Err => hir::TyErr,
@@ -1115,10 +1108,10 @@ fn lower_ty(&mut self, t: &Ty, itctx: ImplTraitContext) -> P<hir::Ty> {
             ),
             TyKind::Never => hir::TyNever,
             TyKind::Tup(ref tys) => {
-                hir::TyTup(tys.iter().map(|ty| self.lower_ty(ty, itctx)).collect())
+                hir::TyTup(tys.iter().map(|ty| self.lower_ty_direct(ty, itctx)).collect())
             }
             TyKind::Paren(ref ty) => {
-                return self.lower_ty(ty, itctx);
+                return self.lower_ty_direct(ty, itctx);
             }
             TyKind::Path(ref qself, ref path) => {
                 let id = self.lower_node_id(t.id);
@@ -1133,7 +1126,7 @@ fn lower_ty(&mut self, t: &Ty, itctx: ImplTraitContext) -> P<hir::Ty> {
                 None,
                 P(hir::Path {
                     def: self.expect_full_def(t.id),
-                    segments: hir_vec![hir::PathSegment::from_name(keywords::SelfType.name())],
+                    segments: hir_vec![hir::PathSegment::from_ident(keywords::SelfType.ident())],
                     span: t.span,
                 }),
             )),
@@ -1189,14 +1182,14 @@ fn lower_ty(&mut self, t: &Ty, itctx: ImplTraitContext) -> P<hir::Ty> {
 
                         let hir_bounds = self.lower_param_bounds(bounds, itctx);
                         // Set the name to `impl Bound1 + Bound2`
-                        let name = Symbol::intern(&pprust::ty_to_string(t));
+                        let ident = Ident::from_str(&pprust::ty_to_string(t)).with_span_pos(span);
                         self.in_band_ty_params.push(hir::GenericParam {
                             id: def_node_id,
-                            name: ParamName::Plain(name),
-                            span,
+                            name: ParamName::Plain(ident),
                             pure_wrt_drop: false,
                             attrs: hir_vec![],
                             bounds: hir_bounds,
+                            span,
                             kind: hir::GenericParamKind::Type {
                                 default: None,
                                 synthetic: Some(hir::SyntheticTyParamKind::ImplTrait),
@@ -1208,7 +1201,7 @@ fn lower_ty(&mut self, t: &Ty, itctx: ImplTraitContext) -> P<hir::Ty> {
                             P(hir::Path {
                                 span,
                                 def: Def::TyParam(DefId::local(def_index)),
-                                segments: hir_vec![hir::PathSegment::from_name(name)],
+                                segments: hir_vec![hir::PathSegment::from_ident(ident)],
                             }),
                         ))
                     }
@@ -1228,12 +1221,12 @@ fn lower_ty(&mut self, t: &Ty, itctx: ImplTraitContext) -> P<hir::Ty> {
         };
 
         let LoweredNodeId { node_id, hir_id } = self.lower_node_id(t.id);
-        P(hir::Ty {
+        hir::Ty {
             id: node_id,
             node: kind,
             span: t.span,
             hir_id,
-        })
+        }
     }
 
     fn lower_existential_impl_trait(
@@ -1438,7 +1431,7 @@ fn visit_lifetime(&mut self, lifetime: &'v hir::Lifetime) {
                     self.context.resolver.definitions().create_def_with_parent(
                         self.parent,
                         def_node_id,
-                        DefPathData::LifetimeParam(name.name().as_interned_str()),
+                        DefPathData::LifetimeParam(name.ident().as_interned_str()),
                         DefIndexAddressSpace::High,
                         Mark::root(),
                         lifetime.span,
@@ -1446,7 +1439,7 @@ fn visit_lifetime(&mut self, lifetime: &'v hir::Lifetime) {
 
                     let name = match name {
                         hir::LifetimeName::Underscore => {
-                            hir::ParamName::Plain(keywords::UnderscoreLifetime.name())
+                            hir::ParamName::Plain(keywords::UnderscoreLifetime.ident())
                         }
                         hir::LifetimeName::Param(param_name) => param_name,
                         _ => bug!("expected LifetimeName::Param or ParamName::Plain"),
@@ -1636,7 +1629,7 @@ fn lower_qpath(
             // e.g. `Vec` in `Vec::new` or `<I as Iterator>::Item` in
             // `<I as Iterator>::Item::default`.
             let new_id = self.next_id();
-            self.ty_path(new_id, p.span, hir::QPath::Resolved(qself, path))
+            P(self.ty_path(new_id, p.span, hir::QPath::Resolved(qself, path)))
         };
 
         // Anything after the base path are associated "extensions",
@@ -1667,7 +1660,7 @@ fn lower_qpath(
 
             // Wrap the associated extension in another type node.
             let new_id = self.next_id();
-            ty = self.ty_path(new_id, p.span, qpath);
+            ty = P(self.ty_path(new_id, p.span, qpath));
         }
 
         // Should've returned in the for loop above.
@@ -1683,7 +1676,7 @@ fn lower_path_extra(
         &mut self,
         def: Def,
         p: &Path,
-        name: Option<Name>,
+        ident: Option<Ident>,
         param_mode: ParamMode,
     ) -> hir::Path {
         hir::Path {
@@ -1700,7 +1693,7 @@ fn lower_path_extra(
                         ImplTraitContext::Disallowed,
                     )
                 })
-                .chain(name.map(|name| hir::PathSegment::from_name(name)))
+                .chain(ident.map(|ident| hir::PathSegment::from_ident(ident)))
                 .collect(),
             span: p.span,
         }
@@ -1763,7 +1756,7 @@ fn lower_path_segment(
         }
 
         hir::PathSegment::new(
-            self.lower_ident(segment.ident),
+            segment.ident,
             generic_args,
             infer_types,
         )
@@ -1802,10 +1795,10 @@ fn lower_parenthesized_parameter_data(
             |this| {
                 const DISALLOWED: ImplTraitContext = ImplTraitContext::Disallowed;
                 let &ParenthesisedArgs { ref inputs, ref output, span } = data;
-                let inputs = inputs.iter().map(|ty| this.lower_ty(ty, DISALLOWED)).collect();
+                let inputs = inputs.iter().map(|ty| this.lower_ty_direct(ty, DISALLOWED)).collect();
                 let mk_tup = |this: &mut Self, tys, span| {
                     let LoweredNodeId { node_id, hir_id } = this.next_id();
-                    P(hir::Ty { node: hir::TyTup(tys), id: node_id, hir_id, span })
+                    hir::Ty { node: hir::TyTup(tys), id: node_id, hir_id, span }
                 };
 
                 (
@@ -1814,11 +1807,11 @@ fn lower_parenthesized_parameter_data(
                         bindings: hir_vec![
                             hir::TypeBinding {
                                 id: this.next_id().node_id,
-                                name: Symbol::intern(FN_OUTPUT_NAME),
+                                ident: Ident::from_str(FN_OUTPUT_NAME),
                                 ty: output
                                     .as_ref()
                                     .map(|ty| this.lower_ty(&ty, DISALLOWED))
-                                    .unwrap_or_else(|| mk_tup(this, hir::HirVec::new(), span)),
+                                    .unwrap_or_else(|| P(mk_tup(this, hir::HirVec::new(), span))),
                                 span: output.as_ref().map_or(span, |ty| ty.span),
                             }
                         ],
@@ -1862,12 +1855,12 @@ fn lower_arg(&mut self, arg: &Arg) -> hir::Arg {
         }
     }
 
-    fn lower_fn_args_to_names(&mut self, decl: &FnDecl) -> hir::HirVec<Spanned<Name>> {
+    fn lower_fn_args_to_names(&mut self, decl: &FnDecl) -> hir::HirVec<Ident> {
         decl.inputs
             .iter()
             .map(|arg| match arg.pat.node {
-                PatKind::Ident(_, ident, None) => respan(ident.span, ident.name),
-                _ => respan(arg.pat.span, keywords::Invalid.name()),
+                PatKind::Ident(_, ident, _) => ident,
+                _ => Ident::new(keywords::Invalid.name(), arg.pat.span),
             })
             .collect()
     }
@@ -1894,9 +1887,9 @@ fn lower_fn_decl(
             .iter()
             .map(|arg| {
                 if let Some(def_id) = fn_def_id {
-                    self.lower_ty(&arg.ty, ImplTraitContext::Universal(def_id))
+                    self.lower_ty_direct(&arg.ty, ImplTraitContext::Universal(def_id))
                 } else {
-                    self.lower_ty(&arg.ty, ImplTraitContext::Disallowed)
+                    self.lower_ty_direct(&arg.ty, ImplTraitContext::Disallowed)
                 }
             })
             .collect::<HirVec<_>>();
@@ -1936,7 +1929,7 @@ fn lower_fn_decl(
     // fn_def_id: DefId of the parent function. Used to create child impl trait definition.
     fn lower_async_fn_ret_ty(
         &mut self,
-        inputs: &[P<hir::Ty>],
+        inputs: &[hir::Ty],
         output: &FunctionRetTy,
         fn_def_id: DefId,
     ) -> hir::FunctionRetTy {
@@ -2109,7 +2102,7 @@ fn visit_lifetime(&mut self, lifetime: &'v hir::Lifetime) {
             let future_params = P(hir::GenericArgs {
                 args: hir_vec![],
                 bindings: hir_vec![hir::TypeBinding {
-                    name: Symbol::intern(FN_OUTPUT_NAME),
+                    ident: Ident::from_str(FN_OUTPUT_NAME),
                     ty: output_ty,
                     id: this.next_id().node_id,
                     span,
@@ -2171,21 +2164,23 @@ fn lower_param_bound(
 
     fn lower_lifetime(&mut self, l: &Lifetime) -> hir::Lifetime {
         let span = l.ident.span;
-        match self.lower_ident(l.ident) {
-            x if x == "'static" => self.new_named_lifetime(l.id, span, hir::LifetimeName::Static),
-            x if x == "'_" => match self.anonymous_lifetime_mode {
-                AnonymousLifetimeMode::CreateParameter => {
-                    let fresh_name = self.collect_fresh_in_band_lifetime(span);
-                    self.new_named_lifetime(l.id, span, hir::LifetimeName::Param(fresh_name))
-                }
+        match l.ident {
+            ident if ident.name == keywords::StaticLifetime.name() =>
+                self.new_named_lifetime(l.id, span, hir::LifetimeName::Static),
+            ident if ident.name == keywords::UnderscoreLifetime.name() =>
+                match self.anonymous_lifetime_mode {
+                    AnonymousLifetimeMode::CreateParameter => {
+                        let fresh_name = self.collect_fresh_in_band_lifetime(span);
+                        self.new_named_lifetime(l.id, span, hir::LifetimeName::Param(fresh_name))
+                    }
 
-                AnonymousLifetimeMode::PassThrough => {
-                    self.new_named_lifetime(l.id, span, hir::LifetimeName::Underscore)
-                }
-            },
-            name => {
-                self.maybe_collect_in_band_lifetime(span, name);
-                let param_name = ParamName::Plain(name);
+                    AnonymousLifetimeMode::PassThrough => {
+                        self.new_named_lifetime(l.id, span, hir::LifetimeName::Underscore)
+                    }
+                },
+            ident => {
+                self.maybe_collect_in_band_lifetime(ident);
+                let param_name = ParamName::Plain(ident);
                 self.new_named_lifetime(l.id, span, hir::LifetimeName::Param(param_name))
             }
         }
@@ -2227,7 +2222,7 @@ fn lower_generic_param(&mut self,
                 let lt = self.lower_lifetime(&Lifetime { id: param.id, ident: param.ident });
                 let param_name = match lt.name {
                     hir::LifetimeName::Param(param_name) => param_name,
-                    _ => hir::ParamName::Plain(lt.name.name()),
+                    _ => hir::ParamName::Plain(lt.name.ident()),
                 };
                 let param = hir::GenericParam {
                     id: lt.id,
@@ -2244,14 +2239,14 @@ fn lower_generic_param(&mut self,
                 param
             }
             GenericParamKind::Type { ref default, .. } => {
-                let mut name = self.lower_ident(param.ident);
-
                 // Don't expose `Self` (recovered "keyword used as ident" parse error).
                 // `rustc::ty` expects `Self` to be only used for a trait's `Self`.
                 // Instead, use gensym("Self") to create a distinct name that looks the same.
-                if name == keywords::SelfType.name() {
-                    name = Symbol::gensym("Self");
-                }
+                let ident = if param.ident.name == keywords::SelfType.name() {
+                    param.ident.gensym()
+                } else {
+                    param.ident
+                };
 
                 let add_bounds = add_bounds.get(&param.id).map_or(&[][..], |x| &x);
                 if !add_bounds.is_empty() {
@@ -2262,11 +2257,11 @@ fn lower_generic_param(&mut self,
 
                 hir::GenericParam {
                     id: self.lower_node_id(param.id).node_id,
-                    name: hir::ParamName::Plain(name),
-                    span: param.ident.span,
+                    name: hir::ParamName::Plain(ident),
                     pure_wrt_drop: attr::contains_name(&param.attrs, "may_dangle"),
                     attrs: self.lower_attrs(&param.attrs),
                     bounds,
+                    span: ident.span,
                     kind: hir::GenericParamKind::Type {
                         default: default.as_ref().map(|x| {
                             self.lower_ty(x, ImplTraitContext::Disallowed)
@@ -2956,7 +2951,7 @@ fn lower_trait_item(&mut self, i: &TraitItem) -> hir::TraitItem {
         hir::TraitItem {
             id: node_id,
             hir_id,
-            name: self.lower_ident(i.ident),
+            ident: i.ident,
             attrs: self.lower_attrs(&i.attrs),
             generics,
             node,
@@ -2982,7 +2977,7 @@ fn lower_trait_item_ref(&mut self, i: &TraitItem) -> hir::TraitItemRef {
         };
         hir::TraitItemRef {
             id: hir::TraitItemId { node_id: i.id },
-            name: self.lower_ident(i.ident),
+            ident: i.ident,
             span: i.span,
             defaultness: self.lower_defaultness(Defaultness::Default, has_default),
             kind,
@@ -3048,7 +3043,7 @@ fn lower_impl_item(&mut self, i: &ImplItem) -> hir::ImplItem {
         hir::ImplItem {
             id: node_id,
             hir_id,
-            name: self.lower_ident(i.ident),
+            ident: i.ident,
             attrs: self.lower_attrs(&i.attrs),
             generics,
             vis: self.lower_visibility(&i.vis, None),
@@ -3063,7 +3058,7 @@ fn lower_impl_item(&mut self, i: &ImplItem) -> hir::ImplItem {
     fn lower_impl_item_ref(&mut self, i: &ImplItem) -> hir::ImplItemRef {
         hir::ImplItemRef {
             id: hir::ImplItemId { node_id: i.id },
-            name: self.lower_ident(i.ident),
+            ident: i.ident,
             span: i.span,
             vis: self.lower_visibility(&i.vis, Some(i.id)),
             defaultness: self.lower_defaultness(i.defaultness, true /* [1] */),
@@ -3290,7 +3285,7 @@ fn lower_pat(&mut self, p: &Pat) -> P<hir::Pat> {
                         hir::PatKind::Binding(
                             self.lower_binding_mode(binding_mode),
                             canonical_id,
-                            respan(ident.span, ident.name),
+                            ident,
                             sub.as_ref().map(|x| self.lower_pat(x)),
                         )
                     }
@@ -3299,7 +3294,7 @@ fn lower_pat(&mut self, p: &Pat) -> P<hir::Pat> {
                         P(hir::Path {
                             span: ident.span,
                             def,
-                            segments: hir_vec![hir::PathSegment::from_name(ident.name)],
+                            segments: hir_vec![hir::PathSegment::from_ident(ident)],
                         }),
                     )),
                 }
@@ -3661,8 +3656,8 @@ fn lower_expr(&mut self, e: &Expr) -> hir::Expr {
                 let e1 = self.lower_expr(e1);
                 let e2 = self.lower_expr(e2);
                 let ty_path = P(self.std_path(span, &["ops", "RangeInclusive"], None, false));
-                let ty = self.ty_path(id, span, hir::QPath::Resolved(None, ty_path));
-                let new_seg = P(hir::PathSegment::from_name(Symbol::intern("new")));
+                let ty = P(self.ty_path(id, span, hir::QPath::Resolved(None, ty_path)));
+                let new_seg = P(hir::PathSegment::from_ident(Ident::from_str("new")));
                 let new_path = hir::QPath::TypeRelative(ty, new_seg);
                 let new = P(self.expr(span, hir::ExprPath(new_path), ThinVec::new()));
                 hir::ExprCall(new, hir_vec![e1, e2])
@@ -4332,14 +4327,14 @@ fn expr_call(
         self.expr(span, hir::ExprCall(e, args), ThinVec::new())
     }
 
-    fn expr_ident(&mut self, span: Span, id: Name, binding: NodeId) -> hir::Expr {
-        self.expr_ident_with_attrs(span, id, binding, ThinVec::new())
+    fn expr_ident(&mut self, span: Span, ident: Ident, binding: NodeId) -> hir::Expr {
+        self.expr_ident_with_attrs(span, ident, binding, ThinVec::new())
     }
 
     fn expr_ident_with_attrs(
         &mut self,
         span: Span,
-        id: Name,
+        ident: Ident,
         binding: NodeId,
         attrs: ThinVec<Attribute>,
     ) -> hir::Expr {
@@ -4348,7 +4343,7 @@ fn expr_ident_with_attrs(
             P(hir::Path {
                 span,
                 def: Def::Local(binding),
-                segments: hir_vec![hir::PathSegment::from_name(id)],
+                segments: hir_vec![hir::PathSegment::from_ident(ident)],
             }),
         ));
 
@@ -4430,7 +4425,7 @@ fn stmt_let(
         &mut self,
         sp: Span,
         mutbl: bool,
-        ident: Name,
+        ident: Ident,
         ex: P<hir::Expr>,
     ) -> (hir::Stmt, NodeId) {
         let pat = if mutbl {
@@ -4501,14 +4496,14 @@ fn pat_std_enum(
         self.pat(span, pt)
     }
 
-    fn pat_ident(&mut self, span: Span, name: Name) -> P<hir::Pat> {
-        self.pat_ident_binding_mode(span, name, hir::BindingAnnotation::Unannotated)
+    fn pat_ident(&mut self, span: Span, ident: Ident) -> P<hir::Pat> {
+        self.pat_ident_binding_mode(span, ident, hir::BindingAnnotation::Unannotated)
     }
 
     fn pat_ident_binding_mode(
         &mut self,
         span: Span,
-        name: Name,
+        ident: Ident,
         bm: hir::BindingAnnotation,
     ) -> P<hir::Pat> {
         let LoweredNodeId { node_id, hir_id } = self.next_id();
@@ -4516,7 +4511,7 @@ fn pat_ident_binding_mode(
         P(hir::Pat {
             id: node_id,
             hir_id,
-            node: hir::PatKind::Binding(bm, node_id, Spanned { span, node: name }, None),
+            node: hir::PatKind::Binding(bm, node_id, ident.with_span_pos(span), None),
             span,
         })
     }
@@ -4549,7 +4544,7 @@ fn std_path(
             .resolve_str_path(span, self.crate_root, components, params, is_value)
     }
 
-    fn ty_path(&mut self, id: LoweredNodeId, span: Span, qpath: hir::QPath) -> P<hir::Ty> {
+    fn ty_path(&mut self, id: LoweredNodeId, span: Span, qpath: hir::QPath) -> hir::Ty {
         let mut id = id;
         let node = match qpath {
             hir::QPath::Resolved(None, path) => {
@@ -4574,12 +4569,12 @@ fn ty_path(&mut self, id: LoweredNodeId, span: Span, qpath: hir::QPath) -> P<hir
             }
             _ => hir::TyPath(qpath),
         };
-        P(hir::Ty {
+        hir::Ty {
             id: id.node_id,
             hir_id: id.hir_id,
             node,
             span,
-        })
+        }
     }
 
     /// Invoked to create the lifetime argument for a type `&T`
index f665ced3dbcf12de404a66590c2bab7e9b33d70c..930db8b0ccc3f06bc0eec13c146562a069d665ce 100644 (file)
@@ -25,7 +25,7 @@
 use hir::map::{self, Node};
 use hir::{Expr, FnDecl};
 use hir::intravisit::FnKind;
-use syntax::ast::{Attribute, Name, NodeId};
+use syntax::ast::{Attribute, Ident, Name, NodeId};
 use syntax_pos::Span;
 
 /// An FnLikeNode is a Node that is like a fn, in that it has a decl
@@ -209,8 +209,8 @@ pub fn kind(self) -> FnKind<'a> {
         let closure = |c: ClosureParts<'a>| {
             FnKind::Closure(c.attrs)
         };
-        let method = |_, name: Name, sig: &'a ast::MethodSig, vis, _, _, attrs| {
-            FnKind::Method(name, sig, vis, attrs)
+        let method = |_, ident: Ident, sig: &'a ast::MethodSig, vis, _, _, attrs| {
+            FnKind::Method(ident, sig, vis, attrs)
         };
         self.handle(item, method, closure)
     }
@@ -218,7 +218,7 @@ pub fn kind(self) -> FnKind<'a> {
     fn handle<A, I, M, C>(self, item_fn: I, method: M, closure: C) -> A where
         I: FnOnce(ItemFnParts<'a>) -> A,
         M: FnOnce(NodeId,
-                  Name,
+                  Ident,
                   &'a ast::MethodSig,
                   Option<&'a ast::Visibility>,
                   ast::BodyId,
@@ -245,14 +245,14 @@ fn handle<A, I, M, C>(self, item_fn: I, method: M, closure: C) -> A where
             },
             map::NodeTraitItem(ti) => match ti.node {
                 ast::TraitItemKind::Method(ref sig, ast::TraitMethod::Provided(body)) => {
-                    method(ti.id, ti.name, sig, None, body, ti.span, &ti.attrs)
+                    method(ti.id, ti.ident, sig, None, body, ti.span, &ti.attrs)
                 }
                 _ => bug!("trait method FnLikeNode that is not fn-like"),
             },
             map::NodeImplItem(ii) => {
                 match ii.node {
                     ast::ImplItemKind::Method(ref sig, body) => {
-                        method(ii.id, ii.name, sig, Some(&ii.vis), body, ii.span, &ii.attrs)
+                        method(ii.id, ii.ident, sig, Some(&ii.vis), body, ii.span, &ii.attrs)
                     }
                     _ => {
                         bug!("impl method FnLikeNode that is not fn-like")
index 14cecba490d0ab3c99258ab9f7ab2429ce46d8f2..2d3e9804835efa0c94c63a3ad101a43d2a854ec4 100644 (file)
@@ -495,7 +495,7 @@ fn visit_trait_item_ref(&mut self, ii: &'hir TraitItemRef) {
         // map the actual nodes, not the duplicate ones in the *Ref.
         let TraitItemRef {
             id,
-            name: _,
+            ident: _,
             kind: _,
             span: _,
             defaultness: _,
@@ -509,7 +509,7 @@ fn visit_impl_item_ref(&mut self, ii: &'hir ImplItemRef) {
         // map the actual nodes, not the duplicate ones in the *Ref.
         let ImplItemRef {
             id,
-            name: _,
+            ident: _,
             kind: _,
             span: _,
             vis: _,
index 7c71401c8b2e8cbc6f54a734a70322ddcf9f303d..261b5d313eff0660037300c86bf4f260f3916066 100644 (file)
@@ -112,11 +112,11 @@ fn visit_item(&mut self, i: &'a Item) {
         // information we encapsulate into, the better
         let def_data = match i.node {
             ItemKind::Impl(..) => DefPathData::Impl,
-            ItemKind::Trait(..) => DefPathData::Trait(i.ident.name.as_interned_str()),
+            ItemKind::Trait(..) => DefPathData::Trait(i.ident.as_interned_str()),
             ItemKind::Enum(..) | ItemKind::Struct(..) | ItemKind::Union(..) |
             ItemKind::TraitAlias(..) |
             ItemKind::ExternCrate(..) | ItemKind::ForeignMod(..) | ItemKind::Ty(..) =>
-                DefPathData::TypeNs(i.ident.name.as_interned_str()),
+                DefPathData::TypeNs(i.ident.as_interned_str()),
             ItemKind::Mod(..) if i.ident == keywords::Invalid.ident() => {
                 return visit::walk_item(self, i);
             }
@@ -129,10 +129,10 @@ fn visit_item(&mut self, i: &'a Item) {
                     |this| visit::walk_item(this, i)
                 )
             }
-            ItemKind::Mod(..) => DefPathData::Module(i.ident.name.as_interned_str()),
+            ItemKind::Mod(..) => DefPathData::Module(i.ident.as_interned_str()),
             ItemKind::Static(..) | ItemKind::Const(..) | ItemKind::Fn(..) =>
-                DefPathData::ValueNs(i.ident.name.as_interned_str()),
-            ItemKind::MacroDef(..) => DefPathData::MacroDef(i.ident.name.as_interned_str()),
+                DefPathData::ValueNs(i.ident.as_interned_str()),
+            ItemKind::MacroDef(..) => DefPathData::MacroDef(i.ident.as_interned_str()),
             ItemKind::Mac(..) => return self.visit_macro_invoc(i.id),
             ItemKind::GlobalAsm(..) => DefPathData::Misc,
             ItemKind::Use(..) => {
@@ -169,7 +169,7 @@ fn visit_foreign_item(&mut self, foreign_item: &'a ForeignItem) {
         }
 
         let def = self.create_def(foreign_item.id,
-                                  DefPathData::ValueNs(foreign_item.ident.name.as_interned_str()),
+                                  DefPathData::ValueNs(foreign_item.ident.as_interned_str()),
                                   REGULAR_SPACE,
                                   foreign_item.span);
 
@@ -180,8 +180,7 @@ fn visit_foreign_item(&mut self, foreign_item: &'a ForeignItem) {
 
     fn visit_variant(&mut self, v: &'a Variant, g: &'a Generics, item_id: NodeId) {
         let def = self.create_def(v.node.data.id(),
-                                  DefPathData::EnumVariant(v.node.ident
-                                                            .name.as_interned_str()),
+                                  DefPathData::EnumVariant(v.node.ident.as_interned_str()),
                                   REGULAR_SPACE,
                                   v.span);
         self.with_parent(def, |this| visit::walk_variant(this, v, g, item_id));
@@ -201,7 +200,7 @@ fn visit_variant_data(&mut self, data: &'a VariantData, _: Ident,
     }
 
     fn visit_generic_param(&mut self, param: &'a GenericParam) {
-        let name = param.ident.name.as_interned_str();
+        let name = param.ident.as_interned_str();
         let def_path_data = match param.kind {
             GenericParamKind::Lifetime { .. } => DefPathData::LifetimeParam(name),
             GenericParamKind::Type { .. } => DefPathData::TypeParam(name),
@@ -214,9 +213,9 @@ fn visit_generic_param(&mut self, param: &'a GenericParam) {
     fn visit_trait_item(&mut self, ti: &'a TraitItem) {
         let def_data = match ti.node {
             TraitItemKind::Method(..) | TraitItemKind::Const(..) =>
-                DefPathData::ValueNs(ti.ident.name.as_interned_str()),
+                DefPathData::ValueNs(ti.ident.as_interned_str()),
             TraitItemKind::Type(..) => {
-                DefPathData::AssocTypeInTrait(ti.ident.name.as_interned_str())
+                DefPathData::AssocTypeInTrait(ti.ident.as_interned_str())
             },
             TraitItemKind::Macro(..) => return self.visit_macro_invoc(ti.id),
         };
@@ -239,8 +238,8 @@ fn visit_impl_item(&mut self, ii: &'a ImplItem) {
                 )
             }
             ImplItemKind::Method(..) | ImplItemKind::Const(..) =>
-                DefPathData::ValueNs(ii.ident.name.as_interned_str()),
-            ImplItemKind::Type(..) => DefPathData::AssocTypeInImpl(ii.ident.name.as_interned_str()),
+                DefPathData::ValueNs(ii.ident.as_interned_str()),
+            ImplItemKind::Type(..) => DefPathData::AssocTypeInImpl(ii.ident.as_interned_str()),
             ImplItemKind::Macro(..) => return self.visit_macro_invoc(ii.id),
         };
 
index 1e03381861b9b82d4ecc3f556385515dcd2a2aaa..b7071970a04b9e0bc01f6c25168718ce3cc603e8 100644 (file)
@@ -616,7 +616,7 @@ pub fn ty_param_name(&self, id: NodeId) -> Name {
             NodeItem(&Item { node: ItemTrait(..), .. }) => {
                 keywords::SelfType.name()
             }
-            NodeGenericParam(param) => param.name.name(),
+            NodeGenericParam(param) => param.name.ident().name,
             _ => bug!("ty_param_name: {} not a type parameter", self.node_to_string(id)),
         }
     }
@@ -949,13 +949,13 @@ pub fn name(&self, id: NodeId) -> Name {
         match self.get(id) {
             NodeItem(i) => i.name,
             NodeForeignItem(i) => i.name,
-            NodeImplItem(ii) => ii.name,
-            NodeTraitItem(ti) => ti.name,
+            NodeImplItem(ii) => ii.ident.name,
+            NodeTraitItem(ti) => ti.ident.name,
             NodeVariant(v) => v.node.name,
             NodeField(f) => f.ident.name,
-            NodeLifetime(lt) => lt.name.name(),
-            NodeGenericParam(param) => param.name.name(),
-            NodeBinding(&Pat { node: PatKind::Binding(_,_,l,_), .. }) => l.node,
+            NodeLifetime(lt) => lt.name.ident().name,
+            NodeGenericParam(param) => param.name.ident().name,
+            NodeBinding(&Pat { node: PatKind::Binding(_,_,l,_), .. }) => l.name,
             NodeStructCtor(_) => self.name(self.get_parent(id)),
             _ => bug!("no name for {}", self.node_to_string(id))
         }
@@ -1149,8 +1149,8 @@ impl Named for Item { fn name(&self) -> Name { self.name } }
 impl Named for ForeignItem { fn name(&self) -> Name { self.name } }
 impl Named for Variant_ { fn name(&self) -> Name { self.name } }
 impl Named for StructField { fn name(&self) -> Name { self.ident.name } }
-impl Named for TraitItem { fn name(&self) -> Name { self.name } }
-impl Named for ImplItem { fn name(&self) -> Name { self.name } }
+impl Named for TraitItem { fn name(&self) -> Name { self.ident.name } }
+impl Named for ImplItem { fn name(&self) -> Name { self.ident.name } }
 
 
 pub fn map_crate<'hir>(sess: &::session::Session,
@@ -1309,13 +1309,13 @@ fn node_id_to_string(map: &Map, id: NodeId, include_id: bool) -> String {
         Some(NodeImplItem(ii)) => {
             match ii.node {
                 ImplItemKind::Const(..) => {
-                    format!("assoc const {} in {}{}", ii.name, path_str(), id_str)
+                    format!("assoc const {} in {}{}", ii.ident, path_str(), id_str)
                 }
                 ImplItemKind::Method(..) => {
-                    format!("method {} in {}{}", ii.name, path_str(), id_str)
+                    format!("method {} in {}{}", ii.ident, path_str(), id_str)
                 }
                 ImplItemKind::Type(_) => {
-                    format!("assoc type {} in {}{}", ii.name, path_str(), id_str)
+                    format!("assoc type {} in {}{}", ii.ident, path_str(), id_str)
                 }
             }
         }
@@ -1326,7 +1326,7 @@ fn node_id_to_string(map: &Map, id: NodeId, include_id: bool) -> String {
                 TraitItemKind::Type(..) => "assoc type",
             };
 
-            format!("{} {} in {}{}", kind, ti.name, path_str(), id_str)
+            format!("{} {} in {}{}", kind, ti.ident, path_str(), id_str)
         }
         Some(NodeVariant(ref variant)) => {
             format!("variant {} in {}{}",
index 5f6921c7a87cc860d430b12f20f6fb10fae0474b..5416474721045acbe022baad0abae5e39e060b01 100644 (file)
@@ -175,13 +175,12 @@ fn index(self) -> usize {
 
 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)]
 pub struct Label {
-    pub name: Name,
-    pub span: Span,
+    pub ident: Ident,
 }
 
 impl fmt::Debug for Label {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        write!(f, "label({:?})", self.name)
+        write!(f, "label({:?})", self.ident)
     }
 }
 
@@ -202,7 +201,7 @@ pub struct Lifetime {
 #[derive(Debug, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)]
 pub enum ParamName {
     /// Some user-given name like `T` or `'x`.
-    Plain(Name),
+    Plain(Ident),
 
     /// Synthetic name generated when user elided a lifetime in an impl header,
     /// e.g. the lifetimes in cases like these:
@@ -221,10 +220,17 @@ pub enum ParamName {
 }
 
 impl ParamName {
-    pub fn name(&self) -> Name {
+    pub fn ident(&self) -> Ident {
+        match *self {
+            ParamName::Plain(ident) => ident,
+            ParamName::Fresh(_) => keywords::UnderscoreLifetime.ident(),
+        }
+    }
+
+    pub fn modern(&self) -> ParamName {
         match *self {
-            ParamName::Plain(name) => name,
-            ParamName::Fresh(_) => keywords::UnderscoreLifetime.name(),
+            ParamName::Plain(ident) => ParamName::Plain(ident.modern()),
+            param_name => param_name,
         }
     }
 }
@@ -245,33 +251,44 @@ pub enum LifetimeName {
 }
 
 impl LifetimeName {
-    pub fn name(&self) -> Name {
-        use self::LifetimeName::*;
+    pub fn ident(&self) -> Ident {
         match *self {
-            Implicit => keywords::Invalid.name(),
-            Underscore => keywords::UnderscoreLifetime.name(),
-            Static => keywords::StaticLifetime.name(),
-            Param(param_name) => param_name.name(),
+            LifetimeName::Implicit => keywords::Invalid.ident(),
+            LifetimeName::Underscore => keywords::UnderscoreLifetime.ident(),
+            LifetimeName::Static => keywords::StaticLifetime.ident(),
+            LifetimeName::Param(param_name) => param_name.ident(),
         }
     }
 
     pub fn is_elided(&self) -> bool {
-        use self::LifetimeName::*;
         match self {
-            Implicit | Underscore => true,
+            LifetimeName::Implicit | LifetimeName::Underscore => true,
 
             // It might seem surprising that `Fresh(_)` counts as
             // *not* elided -- but this is because, as far as the code
             // in the compiler is concerned -- `Fresh(_)` variants act
             // equivalently to "some fresh name". They correspond to
             // early-bound regions on an impl, in other words.
-            Param(_) | Static => false,
+            LifetimeName::Param(_) | LifetimeName::Static => false,
         }
     }
 
     fn is_static(&self) -> bool {
         self == &LifetimeName::Static
     }
+
+    pub fn modern(&self) -> LifetimeName {
+        match *self {
+            LifetimeName::Param(param_name) => LifetimeName::Param(param_name.modern()),
+            lifetime_name => lifetime_name,
+        }
+    }
+}
+
+impl fmt::Display for Lifetime {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        self.name.ident().fmt(f)
+    }
 }
 
 impl fmt::Debug for Lifetime {
@@ -307,7 +324,7 @@ pub struct Path {
 
 impl Path {
     pub fn is_global(&self) -> bool {
-        !self.segments.is_empty() && self.segments[0].name == keywords::CrateRoot.name()
+        !self.segments.is_empty() && self.segments[0].ident.name == keywords::CrateRoot.name()
     }
 }
 
@@ -328,7 +345,7 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
 pub struct PathSegment {
     /// The identifier portion of this path segment.
-    pub name: Name,
+    pub ident: Ident,
 
     /// Type/lifetime parameters attached to this path. They come in
     /// two flavors: `Path<A,B,C>` and `Path(A,B) -> C`. Note that
@@ -346,17 +363,17 @@ pub struct PathSegment {
 
 impl PathSegment {
     /// Convert an identifier to the corresponding segment.
-    pub fn from_name(name: Name) -> PathSegment {
+    pub fn from_ident(ident: Ident) -> PathSegment {
         PathSegment {
-            name,
+            ident,
             infer_types: true,
             args: None,
         }
     }
 
-    pub fn new(name: Name, args: GenericArgs, infer_types: bool) -> Self {
+    pub fn new(ident: Ident, args: GenericArgs, infer_types: bool) -> Self {
         PathSegment {
-            name,
+            ident,
             infer_types,
             args: if args.is_empty() {
                 None
@@ -383,7 +400,7 @@ pub fn with_generic_args<F, R>(&self, f: F) -> R
 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
 pub enum GenericArg {
     Lifetime(Lifetime),
-    Type(P<Ty>),
+    Type(Ty),
 }
 
 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
@@ -412,7 +429,7 @@ pub fn is_empty(&self) -> bool {
         self.args.is_empty() && self.bindings.is_empty() && !self.parenthesized
     }
 
-    pub fn inputs(&self) -> &[P<Ty>] {
+    pub fn inputs(&self) -> &[Ty] {
         if self.parenthesized {
             for arg in &self.args {
                 match arg {
@@ -859,7 +876,7 @@ pub enum PatKind {
     /// The `NodeId` is the canonical ID for the variable being bound,
     /// e.g. in `Ok(x) | Err(x)`, both `x` use the same canonical ID,
     /// which is the pattern ID of the first `x`.
-    Binding(BindingAnnotation, NodeId, Spanned<Name>, Option<P<Pat>>),
+    Binding(BindingAnnotation, NodeId, Ident, Option<P<Pat>>),
 
     /// A struct or struct variant pattern, e.g. `Variant {x, y, ..}`.
     /// The `bool` is `true` in the presence of a `..`.
@@ -1525,7 +1542,7 @@ pub struct TraitItemId {
 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
 pub struct TraitItem {
     pub id: NodeId,
-    pub name: Name,
+    pub ident: Ident,
     pub hir_id: HirId,
     pub attrs: HirVec<Attribute>,
     pub generics: Generics,
@@ -1537,7 +1554,7 @@ pub struct TraitItem {
 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
 pub enum TraitMethod {
     /// No default body in the trait, just a signature.
-    Required(HirVec<Spanned<Name>>),
+    Required(HirVec<Ident>),
 
     /// Both signature and body are provided in the trait.
     Provided(BodyId),
@@ -1568,7 +1585,7 @@ pub struct ImplItemId {
 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
 pub struct ImplItem {
     pub id: NodeId,
-    pub name: Name,
+    pub ident: Ident,
     pub hir_id: HirId,
     pub vis: Visibility,
     pub defaultness: Defaultness,
@@ -1594,7 +1611,7 @@ pub enum ImplItemKind {
 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
 pub struct TypeBinding {
     pub id: NodeId,
-    pub name: Name,
+    pub ident: Ident,
     pub ty: P<Ty>,
     pub span: Span,
 }
@@ -1632,7 +1649,7 @@ pub struct BareFnTy {
     pub abi: Abi,
     pub generic_params: HirVec<GenericParam>,
     pub decl: P<FnDecl>,
-    pub arg_names: HirVec<Spanned<Name>>,
+    pub arg_names: HirVec<Ident>,
 }
 
 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
@@ -1658,7 +1675,7 @@ pub enum Ty_ {
     /// The never type (`!`)
     TyNever,
     /// A tuple (`(A, B, C, D,...)`)
-    TyTup(HirVec<P<Ty>>),
+    TyTup(HirVec<Ty>),
     /// A path to a type definition (`module::module::...::Type`), or an
     /// associated type, e.g. `<Vec<T> as Trait>::Type` or `<T>::Target`.
     ///
@@ -1719,7 +1736,7 @@ pub struct Arg {
 /// Represents the header (not the body) of a function declaration
 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
 pub struct FnDecl {
-    pub inputs: HirVec<P<Ty>>,
+    pub inputs: HirVec<Ty>,
     pub output: FunctionRetTy,
     pub variadic: bool,
     /// True if this function has an `self`, `&self` or `&mut self` receiver
@@ -2129,7 +2146,7 @@ pub fn generics(&self) -> Option<&Generics> {
 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
 pub struct TraitItemRef {
     pub id: TraitItemId,
-    pub name: Name,
+    pub ident: Ident,
     pub kind: AssociatedItemKind,
     pub span: Span,
     pub defaultness: Defaultness,
@@ -2144,7 +2161,7 @@ pub struct TraitItemRef {
 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
 pub struct ImplItemRef {
     pub id: ImplItemId,
-    pub name: Name,
+    pub ident: Ident,
     pub kind: AssociatedItemKind,
     pub span: Span,
     pub vis: Visibility,
@@ -2172,7 +2189,7 @@ pub struct ForeignItem {
 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
 pub enum ForeignItem_ {
     /// A foreign function
-    ForeignItemFn(P<FnDecl>, HirVec<Spanned<Name>>, Generics),
+    ForeignItemFn(P<FnDecl>, HirVec<Ident>, Generics),
     /// A foreign static item (`static ext: u8`), with optional mutability
     /// (the boolean is true when mutable)
     ForeignItemStatic(P<Ty>, bool),
index 5a059b6a219a21e7e5468ab99ecaf3459b1cac37..14989f1ff7d8aa3e5a29e457a575d6feee764f33 100644 (file)
@@ -12,7 +12,6 @@
 use hir::def_id::DefId;
 use hir::{self, HirId, PatKind};
 use syntax::ast;
-use syntax::codemap::Spanned;
 use syntax_pos::Span;
 
 use std::iter::{Enumerate, ExactSizeIterator};
@@ -91,11 +90,11 @@ pub fn is_const(&self) -> bool {
     /// Call `f` on every "binding" in a pattern, e.g., on `a` in
     /// `match foo() { Some(a) => (), None => () }`
     pub fn each_binding<F>(&self, mut f: F)
-        where F: FnMut(hir::BindingAnnotation, HirId, Span, &Spanned<ast::Name>),
+        where F: FnMut(hir::BindingAnnotation, HirId, Span, ast::Ident),
     {
         self.walk(|p| {
-            if let PatKind::Binding(binding_mode, _, ref pth, _) = p.node {
-                f(binding_mode, p.hir_id, p.span, pth);
+            if let PatKind::Binding(binding_mode, _, ident, _) = p.node {
+                f(binding_mode, p.hir_id, p.span, ident);
             }
             true
         });
@@ -132,20 +131,10 @@ pub fn contains_bindings_or_wild(&self) -> bool {
         contains_bindings
     }
 
-    pub fn simple_name(&self) -> Option<ast::Name> {
+    pub fn simple_ident(&self) -> Option<ast::Ident> {
         match self.node {
-            PatKind::Binding(hir::BindingAnnotation::Unannotated, _, ref path1, None) |
-            PatKind::Binding(hir::BindingAnnotation::Mutable, _, ref path1, None) =>
-                Some(path1.node),
-            _ => None,
-        }
-    }
-
-    pub fn simple_span(&self) -> Option<Span> {
-        match self.node {
-            PatKind::Binding(hir::BindingAnnotation::Unannotated, _, ref path1, None) |
-            PatKind::Binding(hir::BindingAnnotation::Mutable, _, ref path1, None) =>
-                Some(path1.span),
+            PatKind::Binding(hir::BindingAnnotation::Unannotated, _, ident, None) |
+            PatKind::Binding(hir::BindingAnnotation::Mutable, _, ident, None) => Some(ident),
             _ => None,
         }
     }
index 6db4e987840bbb2b724e7008b8d07d354a15207c..c6f69a84d034d73f0657ee829189913ea8f29239 100644 (file)
@@ -12,7 +12,7 @@
 
 use rustc_target::spec::abi::Abi;
 use syntax::ast;
-use syntax::codemap::{CodeMap, Spanned};
+use syntax::codemap::CodeMap;
 use syntax::parse::ParseSess;
 use syntax::parse::lexer::comments;
 use syntax::print::pp::{self, Breaks};
@@ -497,14 +497,14 @@ pub fn print_foreign_item(&mut self, item: &hir::ForeignItem) -> io::Result<()>
     }
 
     fn print_associated_const(&mut self,
-                              name: ast::Name,
+                              ident: ast::Ident,
                               ty: &hir::Ty,
                               default: Option<hir::BodyId>,
                               vis: &hir::Visibility)
                               -> io::Result<()> {
         self.s.word(&visibility_qualified(vis, ""))?;
         self.word_space("const")?;
-        self.print_name(name)?;
+        self.print_ident(ident)?;
         self.word_space(":")?;
         self.print_type(ty)?;
         if let Some(expr) = default {
@@ -516,12 +516,12 @@ fn print_associated_const(&mut self,
     }
 
     fn print_associated_type(&mut self,
-                             name: ast::Name,
+                             ident: ast::Ident,
                              bounds: Option<&hir::GenericBounds>,
                              ty: Option<&hir::Ty>)
                              -> io::Result<()> {
         self.word_space("type")?;
-        self.print_name(name)?;
+        self.print_ident(ident)?;
         if let Some(bounds) = bounds {
             self.print_bounds(":", bounds)?;
         }
@@ -559,7 +559,7 @@ pub fn print_item(&mut self, item: &hir::Item) -> io::Result<()> {
 
                 match kind {
                     hir::UseKind::Single => {
-                        if path.segments.last().unwrap().name != item.name {
+                        if path.segments.last().unwrap().ident.name != item.name {
                             self.s.space()?;
                             self.word_space("as")?;
                             self.print_name(item.name)?;
@@ -845,7 +845,8 @@ pub fn print_visibility(&mut self, vis: &hir::Visibility) -> io::Result<()> {
             hir::Visibility::Crate(ast::CrateSugar::PubCrate) => self.word_nbsp("pub(crate)")?,
             hir::Visibility::Restricted { ref path, .. } => {
                 self.s.word("pub(")?;
-                if path.segments.len() == 1 && path.segments[0].name == keywords::Super.name() {
+                if path.segments.len() == 1 &&
+                   path.segments[0].ident.name == keywords::Super.name() {
                     // Special case: `super` can print like `pub(super)`.
                     self.s.word("super")?;
                 } else {
@@ -928,16 +929,16 @@ pub fn print_variant(&mut self, v: &hir::Variant) -> io::Result<()> {
         Ok(())
     }
     pub fn print_method_sig(&mut self,
-                            name: ast::Name,
+                            ident: ast::Ident,
                             m: &hir::MethodSig,
                             generics: &hir::Generics,
                             vis: &hir::Visibility,
-                            arg_names: &[Spanned<ast::Name>],
+                            arg_names: &[ast::Ident],
                             body_id: Option<hir::BodyId>)
                             -> io::Result<()> {
         self.print_fn(&m.decl,
                       m.header,
-                      Some(name),
+                      Some(ident.name),
                       generics,
                       vis,
                       arg_names,
@@ -951,16 +952,16 @@ pub fn print_trait_item(&mut self, ti: &hir::TraitItem) -> io::Result<()> {
         self.print_outer_attributes(&ti.attrs)?;
         match ti.node {
             hir::TraitItemKind::Const(ref ty, default) => {
-                self.print_associated_const(ti.name, &ty, default, &hir::Inherited)?;
+                self.print_associated_const(ti.ident, &ty, default, &hir::Inherited)?;
             }
             hir::TraitItemKind::Method(ref sig, hir::TraitMethod::Required(ref arg_names)) => {
-                self.print_method_sig(ti.name, sig, &ti.generics, &hir::Inherited, arg_names,
+                self.print_method_sig(ti.ident, sig, &ti.generics, &hir::Inherited, arg_names,
                     None)?;
                 self.s.word(";")?;
             }
             hir::TraitItemKind::Method(ref sig, hir::TraitMethod::Provided(body)) => {
                 self.head("")?;
-                self.print_method_sig(ti.name, sig, &ti.generics, &hir::Inherited, &[],
+                self.print_method_sig(ti.ident, sig, &ti.generics, &hir::Inherited, &[],
                     Some(body))?;
                 self.nbsp()?;
                 self.end()?; // need to close a box
@@ -968,7 +969,7 @@ pub fn print_trait_item(&mut self, ti: &hir::TraitItem) -> io::Result<()> {
                 self.ann.nested(self, Nested::Body(body))?;
             }
             hir::TraitItemKind::Type(ref bounds, ref default) => {
-                self.print_associated_type(ti.name,
+                self.print_associated_type(ti.ident,
                                            Some(bounds),
                                            default.as_ref().map(|ty| &**ty))?;
             }
@@ -985,18 +986,18 @@ pub fn print_impl_item(&mut self, ii: &hir::ImplItem) -> io::Result<()> {
 
         match ii.node {
             hir::ImplItemKind::Const(ref ty, expr) => {
-                self.print_associated_const(ii.name, &ty, Some(expr), &ii.vis)?;
+                self.print_associated_const(ii.ident, &ty, Some(expr), &ii.vis)?;
             }
             hir::ImplItemKind::Method(ref sig, body) => {
                 self.head("")?;
-                self.print_method_sig(ii.name, sig, &ii.generics, &ii.vis, &[], Some(body))?;
+                self.print_method_sig(ii.ident, sig, &ii.generics, &ii.vis, &[], Some(body))?;
                 self.nbsp()?;
                 self.end()?; // need to close a box
                 self.end()?; // need to close a box
                 self.ann.nested(self, Nested::Body(body))?;
             }
             hir::ImplItemKind::Type(ref ty) => {
-                self.print_associated_type(ii.name, None, Some(ty))?;
+                self.print_associated_type(ii.ident, None, Some(ty))?;
             }
         }
         self.ann.post(self, NodeSubItem(ii.id))
@@ -1266,7 +1267,7 @@ fn print_expr_method_call(&mut self,
         let base_args = &args[1..];
         self.print_expr_maybe_paren(&args[0], parser::PREC_POSTFIX)?;
         self.s.word(".")?;
-        self.print_name(segment.name)?;
+        self.print_ident(segment.ident)?;
 
         segment.with_generic_args(|generic_args| {
             if !generic_args.args.is_empty() || !generic_args.bindings.is_empty() {
@@ -1379,7 +1380,7 @@ pub fn print_expr(&mut self, expr: &hir::Expr) -> io::Result<()> {
             }
             hir::ExprWhile(ref test, ref blk, opt_label) => {
                 if let Some(label) = opt_label {
-                    self.print_name(label.name)?;
+                    self.print_ident(label.ident)?;
                     self.word_space(":")?;
                 }
                 self.head("while")?;
@@ -1389,7 +1390,7 @@ pub fn print_expr(&mut self, expr: &hir::Expr) -> io::Result<()> {
             }
             hir::ExprLoop(ref blk, opt_label, _) => {
                 if let Some(label) = opt_label {
-                    self.print_name(label.name)?;
+                    self.print_ident(label.ident)?;
                     self.word_space(":")?;
                 }
                 self.head("loop")?;
@@ -1425,7 +1426,7 @@ pub fn print_expr(&mut self, expr: &hir::Expr) -> io::Result<()> {
             }
             hir::ExprBlock(ref blk, opt_label) => {
                 if let Some(label) = opt_label {
-                    self.print_name(label.name)?;
+                    self.print_ident(label.ident)?;
                     self.word_space(":")?;
                 }
                 // containing cbox, will be closed by print-block at }
@@ -1467,7 +1468,7 @@ pub fn print_expr(&mut self, expr: &hir::Expr) -> io::Result<()> {
                 self.s.word("break")?;
                 self.s.space()?;
                 if let Some(label) = destination.label {
-                    self.print_name(label.name)?;
+                    self.print_ident(label.ident)?;
                     self.s.space()?;
                 }
                 if let Some(ref expr) = *opt_expr {
@@ -1479,7 +1480,7 @@ pub fn print_expr(&mut self, expr: &hir::Expr) -> io::Result<()> {
                 self.s.word("continue")?;
                 self.s.space()?;
                 if let Some(label) = destination.label {
-                    self.print_name(label.name)?;
+                    self.print_ident(label.ident)?;
                     self.s.space()?
                 }
             }
@@ -1614,7 +1615,7 @@ pub fn print_ident(&mut self, ident: ast::Ident) -> io::Result<()> {
     }
 
     pub fn print_name(&mut self, name: ast::Name) -> io::Result<()> {
-        self.print_ident(name.to_ident())
+        self.print_ident(ast::Ident::with_empty_ctxt(name))
     }
 
     pub fn print_for_decl(&mut self, loc: &hir::Local, coll: &hir::Expr) -> io::Result<()> {
@@ -1634,9 +1635,9 @@ pub fn print_path(&mut self,
             if i > 0 {
                 self.s.word("::")?
             }
-            if segment.name != keywords::CrateRoot.name() &&
-               segment.name != keywords::DollarCrate.name() {
-               self.print_name(segment.name)?;
+            if segment.ident.name != keywords::CrateRoot.name() &&
+               segment.ident.name != keywords::DollarCrate.name() {
+               self.print_ident(segment.ident)?;
                segment.with_generic_args(|generic_args| {
                    self.print_generic_args(generic_args, segment.infer_types,
                                            colons_before_params)
@@ -1665,9 +1666,9 @@ pub fn print_qpath(&mut self,
                     if i > 0 {
                         self.s.word("::")?
                     }
-                    if segment.name != keywords::CrateRoot.name() &&
-                       segment.name != keywords::DollarCrate.name() {
-                        self.print_name(segment.name)?;
+                    if segment.ident.name != keywords::CrateRoot.name() &&
+                       segment.ident.name != keywords::DollarCrate.name() {
+                        self.print_ident(segment.ident)?;
                         segment.with_generic_args(|generic_args| {
                             self.print_generic_args(generic_args,
                                                     segment.infer_types,
@@ -1679,7 +1680,7 @@ pub fn print_qpath(&mut self,
                 self.s.word(">")?;
                 self.s.word("::")?;
                 let item_segment = path.segments.last().unwrap();
-                self.print_name(item_segment.name)?;
+                self.print_ident(item_segment.ident)?;
                 item_segment.with_generic_args(|generic_args| {
                     self.print_generic_args(generic_args,
                                             item_segment.infer_types,
@@ -1691,7 +1692,7 @@ pub fn print_qpath(&mut self,
                 self.print_type(qself)?;
                 self.s.word(">")?;
                 self.s.word("::")?;
-                self.print_name(item_segment.name)?;
+                self.print_ident(item_segment.ident)?;
                 item_segment.with_generic_args(|generic_args| {
                     self.print_generic_args(generic_args,
                                             item_segment.infer_types,
@@ -1762,7 +1763,7 @@ fn print_generic_args(&mut self,
 
             for binding in generic_args.bindings.iter() {
                 start_or_comma(self)?;
-                self.print_name(binding.name)?;
+                self.print_ident(binding.ident)?;
                 self.s.space()?;
                 self.word_space("=")?;
                 self.print_type(&binding.ty)?;
@@ -1783,7 +1784,7 @@ pub fn print_pat(&mut self, pat: &hir::Pat) -> io::Result<()> {
         // is that it doesn't matter
         match pat.node {
             PatKind::Wild => self.s.word("_")?,
-            PatKind::Binding(binding_mode, _, ref path1, ref sub) => {
+            PatKind::Binding(binding_mode, _, ident, ref sub) => {
                 match binding_mode {
                     hir::BindingAnnotation::Ref => {
                         self.word_nbsp("ref")?;
@@ -1798,7 +1799,7 @@ pub fn print_pat(&mut self, pat: &hir::Pat) -> io::Result<()> {
                         self.word_nbsp("mut")?;
                     }
                 }
-                self.print_name(path1.node)?;
+                self.print_ident(ident)?;
                 if let Some(ref p) = *sub {
                     self.s.word("@")?;
                     self.print_pat(&p)?;
@@ -1963,7 +1964,7 @@ fn print_arm(&mut self, arm: &hir::Arm) -> io::Result<()> {
         match arm.body.node {
             hir::ExprBlock(ref blk, opt_label) => {
                 if let Some(label) = opt_label {
-                    self.print_name(label.name)?;
+                    self.print_ident(label.ident)?;
                     self.word_space(":")?;
                 }
                 // the block will close the pattern's ibox
@@ -1989,7 +1990,7 @@ pub fn print_fn(&mut self,
                     name: Option<ast::Name>,
                     generics: &hir::Generics,
                     vis: &hir::Visibility,
-                    arg_names: &[Spanned<ast::Name>],
+                    arg_names: &[ast::Ident],
                     body_id: Option<hir::BodyId>)
                     -> io::Result<()> {
         self.print_fn_header_info(header, vis)?;
@@ -2006,8 +2007,8 @@ pub fn print_fn(&mut self,
         assert!(arg_names.is_empty() || body_id.is_none());
         self.commasep(Inconsistent, &decl.inputs, |s, ty| {
             s.ibox(indent_unit)?;
-            if let Some(name) = arg_names.get(i) {
-                s.s.word(&name.node.as_str())?;
+            if let Some(arg_name) = arg_names.get(i) {
+                s.s.word(&arg_name.as_str())?;
                 s.s.word(":")?;
                 s.s.space()?;
             } else if let Some(body_id) = body_id {
@@ -2112,7 +2113,7 @@ pub fn print_generic_params(&mut self, generic_params: &[GenericParam]) -> io::R
     }
 
     pub fn print_generic_param(&mut self, param: &GenericParam) -> io::Result<()> {
-        self.print_name(param.name.name())?;
+        self.print_ident(param.name.ident())?;
         match param.kind {
             GenericParamKind::Lifetime { .. } => {
                 let mut sep = ":";
@@ -2143,7 +2144,7 @@ pub fn print_generic_param(&mut self, param: &GenericParam) -> io::Result<()> {
     }
 
     pub fn print_lifetime(&mut self, lifetime: &hir::Lifetime) -> io::Result<()> {
-        self.print_name(lifetime.name.name())
+        self.print_ident(lifetime.name.ident())
     }
 
     pub fn print_where_clause(&mut self, where_clause: &hir::WhereClause) -> io::Result<()> {
@@ -2241,7 +2242,7 @@ pub fn print_ty_fn(&mut self,
                        decl: &hir::FnDecl,
                        name: Option<ast::Name>,
                        generic_params: &[hir::GenericParam],
-                       arg_names: &[Spanned<ast::Name>])
+                       arg_names: &[ast::Ident])
                        -> io::Result<()> {
         self.ibox(indent_unit)?;
         if !generic_params.is_empty() {
index f56f4e12e7a02b9b16ffa34c2f9119ad0c492235..2a3b1ce6a36a5075fcf33ff85b6224cfe705baf0 100644 (file)
@@ -52,7 +52,8 @@ pub fn to_hex(&self) -> String {
     pub fn encode_opaque(&self, encoder: &mut Encoder) -> EncodeResult {
         let bytes: [u8; 16] = unsafe { mem::transmute([self.0.to_le(), self.1.to_le()]) };
 
-        encoder.emit_raw_bytes(&bytes)
+        encoder.emit_raw_bytes(&bytes);
+        Ok(())
     }
 
     pub fn decode_opaque<'a>(decoder: &mut Decoder<'a>) -> Result<Fingerprint, String> {
@@ -92,7 +93,7 @@ impl serialize::UseSpecializedEncodable for Fingerprint { }
 
 impl serialize::UseSpecializedDecodable for Fingerprint { }
 
-impl<'a> serialize::SpecializedEncoder<Fingerprint> for serialize::opaque::Encoder<'a> {
+impl serialize::SpecializedEncoder<Fingerprint> for serialize::opaque::Encoder {
     fn specialized_encode(&mut self, f: &Fingerprint) -> Result<(), Self::Error> {
         f.encode_opaque(self)
     }
index b71b69474a72051efee603b7bc5149a2bf6bf928..d59a20c652250d12b9ee511ecc9a5ec5c599aa9e 100644 (file)
@@ -155,8 +155,7 @@ fn hash_stable<W: StableHasherResult>(&self,
 });
 
 impl_stable_hash_for!(struct hir::Label {
-    span,
-    name
+    ident
 });
 
 impl_stable_hash_for!(struct hir::Lifetime {
@@ -172,7 +171,7 @@ fn hash_stable<W: StableHasherResult>(&self,
 });
 
 impl_stable_hash_for!(struct hir::PathSegment {
-    name,
+    ident -> (ident.name),
     infer_types,
     args
 });
@@ -201,10 +200,10 @@ fn hash_stable<W: StableHasherResult>(&self,
 impl_stable_hash_for!(struct hir::GenericParam {
     id,
     name,
-    span,
     pure_wrt_drop,
     attrs,
     bounds,
+    span,
     kind
 });
 
@@ -278,7 +277,7 @@ fn hash_stable<W: StableHasherResult>(&self,
 
 impl_stable_hash_for!(struct hir::TypeBinding {
     id,
-    name,
+    ident -> (ident.name),
     ty,
     span
 });
@@ -359,20 +358,11 @@ fn hash_stable<W: StableHasherResult>(&self,
     Return(t)
 });
 
-impl<'a> HashStable<StableHashingContext<'a>> for hir::TraitRef {
-    fn hash_stable<W: StableHasherResult>(&self,
-                                          hcx: &mut StableHashingContext<'a>,
-                                          hasher: &mut StableHasher<W>) {
-        let hir::TraitRef {
-            ref path,
-            // Don't hash the ref_id. It is tracked via the thing it is used to access
-            ref_id: _,
-        } = *self;
-
-        path.hash_stable(hcx, hasher);
-    }
-}
-
+impl_stable_hash_for!(struct hir::TraitRef {
+    // Don't hash the ref_id. It is tracked via the thing it is used to access
+    ref_id -> _,
+    path,
+});
 
 impl_stable_hash_for!(struct hir::PolyTraitRef {
     bound_generic_params,
@@ -395,66 +385,32 @@ fn hash_stable<W: StableHasherResult>(&self,
     body
 });
 
+impl_stable_hash_for!(struct hir::Block {
+    stmts,
+    expr,
+    id -> _,
+    hir_id -> _,
+    rules,
+    span,
+    targeted_by_break,
+    recovered,
+});
 
-impl<'a> HashStable<StableHashingContext<'a>> for hir::Block {
-    fn hash_stable<W: StableHasherResult>(&self,
-                                          hcx: &mut StableHashingContext<'a>,
-                                          hasher: &mut StableHasher<W>) {
-        let hir::Block {
-            ref stmts,
-            ref expr,
-            id: _,
-            hir_id: _,
-            rules,
-            span,
-            targeted_by_break,
-            recovered,
-        } = *self;
-
-        stmts.hash_stable(hcx, hasher);
-        expr.hash_stable(hcx, hasher);
-        rules.hash_stable(hcx, hasher);
-        span.hash_stable(hcx, hasher);
-        recovered.hash_stable(hcx, hasher);
-        targeted_by_break.hash_stable(hcx, hasher);
-    }
-}
-
-impl<'a> HashStable<StableHashingContext<'a>> for hir::Pat {
-    fn hash_stable<W: StableHasherResult>(&self,
-                                          hcx: &mut StableHashingContext<'a>,
-                                          hasher: &mut StableHasher<W>) {
-        let hir::Pat {
-            id: _,
-            hir_id: _,
-            ref node,
-            ref span
-        } = *self;
-
-
-        node.hash_stable(hcx, hasher);
-        span.hash_stable(hcx, hasher);
-    }
-}
+impl_stable_hash_for!(struct hir::Pat {
+    id -> _,
+    hir_id -> _,
+    node,
+    span,
+});
 
 impl_stable_hash_for_spanned!(hir::FieldPat);
 
-impl<'a> HashStable<StableHashingContext<'a>> for hir::FieldPat {
-    fn hash_stable<W: StableHasherResult>(&self,
-                                          hcx: &mut StableHashingContext<'a>,
-                                          hasher: &mut StableHasher<W>) {
-        let hir::FieldPat {
-            id: _,
-            ident,
-            ref pat,
-            is_shorthand,
-        } = *self;
-
-        ident.hash_stable(hcx, hasher);
-        pat.hash_stable(hcx, hasher);
-        is_shorthand.hash_stable(hcx, hasher);
-    }
-}
+impl_stable_hash_for!(struct hir::FieldPat {
+    id -> _,
+    ident -> (ident.name),
+    pat,
+    is_shorthand,
+});
 
 impl_stable_hash_for!(enum hir::BindingAnnotation {
     Unannotated,
@@ -537,24 +493,13 @@ fn hash_stable<W: StableHasherResult>(&self,
     body
 });
 
-impl<'a> HashStable<StableHashingContext<'a>> for hir::Field {
-    fn hash_stable<W: StableHasherResult>(&self,
-                                          hcx: &mut StableHashingContext<'a>,
-                                          hasher: &mut StableHasher<W>) {
-        let hir::Field {
-            id: _,
-            ident,
-            ref expr,
-            span,
-            is_shorthand,
-        } = *self;
-
-        ident.hash_stable(hcx, hasher);
-        expr.hash_stable(hcx, hasher);
-        span.hash_stable(hcx, hasher);
-        is_shorthand.hash_stable(hcx, hasher);
-    }
-}
+impl_stable_hash_for!(struct hir::Field {
+    id -> _,
+    ident,
+    expr,
+    span,
+    is_shorthand,
+});
 
 impl_stable_hash_for_spanned!(ast::Name);
 
@@ -686,19 +631,10 @@ fn hash_stable<W: StableHasherResult>(&self,
     UnresolvedLabel
 });
 
-impl<'a> HashStable<StableHashingContext<'a>> for ast::Ident {
-    fn hash_stable<W: StableHasherResult>(&self,
-                                          hcx: &mut StableHashingContext<'a>,
-                                          hasher: &mut StableHasher<W>) {
-        let ast::Ident {
-            name,
-            span,
-        } = *self;
-
-        name.hash_stable(hcx, hasher);
-        span.hash_stable(hcx, hasher);
-    }
-}
+impl_stable_hash_for!(struct ast::Ident {
+    name,
+    span,
+});
 
 impl<'a> HashStable<StableHashingContext<'a>> for hir::TraitItem {
     fn hash_stable<W: StableHasherResult>(&self,
@@ -707,7 +643,7 @@ fn hash_stable<W: StableHasherResult>(&self,
         let hir::TraitItem {
             id: _,
             hir_id: _,
-            name,
+            ident,
             ref attrs,
             ref generics,
             ref node,
@@ -715,7 +651,7 @@ fn hash_stable<W: StableHasherResult>(&self,
         } = *self;
 
         hcx.hash_hir_item_like(|hcx| {
-            name.hash_stable(hcx, hasher);
+            ident.name.hash_stable(hcx, hasher);
             attrs.hash_stable(hcx, hasher);
             generics.hash_stable(hcx, hasher);
             node.hash_stable(hcx, hasher);
@@ -742,7 +678,7 @@ fn hash_stable<W: StableHasherResult>(&self,
         let hir::ImplItem {
             id: _,
             hir_id: _,
-            name,
+            ident,
             ref vis,
             defaultness,
             ref attrs,
@@ -752,7 +688,7 @@ fn hash_stable<W: StableHasherResult>(&self,
         } = *self;
 
         hcx.hash_hir_item_like(|hcx| {
-            name.hash_stable(hcx, hasher);
+            ident.name.hash_stable(hcx, hasher);
             vis.hash_stable(hcx, hasher);
             defaultness.hash_stable(hcx, hasher);
             attrs.hash_stable(hcx, hasher);
@@ -818,21 +754,13 @@ fn hash_stable<W: StableHasherResult>(&self,
     Negative
 });
 
-impl<'a> HashStable<StableHashingContext<'a>> for hir::Mod {
-    fn hash_stable<W: StableHasherResult>(&self,
-                                          hcx: &mut StableHashingContext<'a>,
-                                          hasher: &mut StableHasher<W>) {
-        let hir::Mod {
-            inner,
-            // We are not hashing the IDs of the items contained in the module.
-            // This is harmless and matches the current behavior but it's not
-            // actually correct. See issue #40876.
-            item_ids: _,
-        } = *self;
-
-        inner.hash_stable(hcx, hasher);
-    }
-}
+impl_stable_hash_for!(struct hir::Mod {
+    inner,
+    // We are not hashing the IDs of the items contained in the module.
+    // This is harmless and matches the current behavior but it's not
+    // actually correct. See issue #40876.
+    item_ids -> _,
+});
 
 impl_stable_hash_for!(struct hir::ForeignMod {
     abi,
@@ -860,7 +788,7 @@ fn hash_stable<W: StableHasherResult>(&self,
 
 impl_stable_hash_for!(struct hir::StructField {
     span,
-    ident,
+    ident -> (ident.name),
     vis,
     id,
     ty,
@@ -918,7 +846,7 @@ fn hash_stable<W: StableHasherResult>(&self,
 
 impl_stable_hash_for!(struct hir::TraitItemRef {
     id,
-    name,
+    ident -> (ident.name),
     kind,
     span,
     defaultness
@@ -926,15 +854,14 @@ fn hash_stable<W: StableHasherResult>(&self,
 
 impl_stable_hash_for!(struct hir::ImplItemRef {
     id,
-    name,
+    ident -> (ident.name),
     kind,
     span,
     vis,
     defaultness
 });
 
-impl<'a> HashStable<StableHashingContext<'a>>
-for hir::AssociatedItemKind {
+impl<'a> HashStable<StableHashingContext<'a>> for hir::AssociatedItemKind {
     fn hash_stable<W: StableHasherResult>(&self,
                                           hcx: &mut StableHashingContext<'a>,
                                           hasher: &mut StableHasher<W>) {
@@ -1014,45 +941,22 @@ fn to_stable_hash_key(&self,
     is_indirect
 });
 
-impl<'a> HashStable<StableHashingContext<'a>> for hir::GlobalAsm {
-    fn hash_stable<W: StableHasherResult>(&self,
-                                          hcx: &mut StableHashingContext<'a>,
-                                          hasher: &mut StableHasher<W>) {
-        let hir::GlobalAsm {
-            asm,
-            ctxt: _
-        } = *self;
-
-        asm.hash_stable(hcx, hasher);
-    }
-}
-
-impl<'a> HashStable<StableHashingContext<'a>> for hir::InlineAsm {
-    fn hash_stable<W: StableHasherResult>(&self,
-                                          hcx: &mut StableHashingContext<'a>,
-                                          hasher: &mut StableHasher<W>) {
-        let hir::InlineAsm {
-            asm,
-            asm_str_style,
-            ref outputs,
-            ref inputs,
-            ref clobbers,
-            volatile,
-            alignstack,
-            dialect,
-            ctxt: _, // This is used for error reporting
-        } = *self;
+impl_stable_hash_for!(struct hir::GlobalAsm {
+    asm,
+    ctxt -> _, // This is used for error reporting
+});
 
-        asm.hash_stable(hcx, hasher);
-        asm_str_style.hash_stable(hcx, hasher);
-        outputs.hash_stable(hcx, hasher);
-        inputs.hash_stable(hcx, hasher);
-        clobbers.hash_stable(hcx, hasher);
-        volatile.hash_stable(hcx, hasher);
-        alignstack.hash_stable(hcx, hasher);
-        dialect.hash_stable(hcx, hasher);
-    }
-}
+impl_stable_hash_for!(struct hir::InlineAsm {
+    asm,
+    asm_str_style,
+    outputs,
+    inputs,
+    clobbers,
+    volatile,
+    alignstack,
+    dialect,
+    ctxt -> _, // This is used for error reporting
+});
 
 impl_stable_hash_for!(enum hir::def::CtorKind {
     Fn,
@@ -1115,8 +1019,7 @@ fn hash_stable<W: StableHasherResult>(&self,
     NotConst
 });
 
-impl<'a> HashStable<StableHashingContext<'a>>
-for hir::def_id::DefIndex {
+impl<'a> HashStable<StableHashingContext<'a>> for hir::def_id::DefIndex {
 
     fn hash_stable<W: StableHasherResult>(&self,
                                           hcx: &mut StableHashingContext<'a>,
@@ -1142,8 +1045,7 @@ fn to_stable_hash_key(&self, hcx: &StableHashingContext<'a>) -> DefPathHash {
     span
 });
 
-impl<'a> HashStable<StableHashingContext<'a>>
-for ::middle::lang_items::LangItem {
+impl<'a> HashStable<StableHashingContext<'a>> for ::middle::lang_items::LangItem {
     fn hash_stable<W: StableHasherResult>(&self,
                                           _: &mut StableHashingContext<'a>,
                                           hasher: &mut StableHasher<W>) {
@@ -1156,8 +1058,7 @@ fn hash_stable<W: StableHasherResult>(&self,
     missing
 });
 
-impl<'a> HashStable<StableHashingContext<'a>>
-for hir::TraitCandidate {
+impl<'a> HashStable<StableHashingContext<'a>> for hir::TraitCandidate {
     fn hash_stable<W: StableHasherResult>(&self,
                                           hcx: &mut StableHashingContext<'a>,
                                           hasher: &mut StableHasher<W>) {
@@ -1191,26 +1092,13 @@ fn to_stable_hash_key(&self,
     }
 }
 
-impl<'hir> HashStable<StableHashingContext<'hir>> for hir::CodegenFnAttrs
-{
-    fn hash_stable<W: StableHasherResult>(&self,
-                                          hcx: &mut StableHashingContext<'hir>,
-                                          hasher: &mut StableHasher<W>) {
-        let hir::CodegenFnAttrs {
-            flags,
-            inline,
-            export_name,
-            ref target_features,
-            linkage,
-        } = *self;
-
-        flags.hash_stable(hcx, hasher);
-        inline.hash_stable(hcx, hasher);
-        export_name.hash_stable(hcx, hasher);
-        target_features.hash_stable(hcx, hasher);
-        linkage.hash_stable(hcx, hasher);
-    }
-}
+impl_stable_hash_for!(struct hir::CodegenFnAttrs {
+    flags,
+    inline,
+    export_name,
+    target_features,
+    linkage,
+});
 
 impl<'hir> HashStable<StableHashingContext<'hir>> for hir::CodegenFnAttrFlags
 {
index 935bc4c8c6d8c532e017942150cc4ad53fe38d56..0dca7d6d856fe80059a82d1805a5ebd77899385e 100644 (file)
@@ -395,6 +395,7 @@ fn hash_token<'a, 'gcx, W: StableHasherResult>(
     format,
     allow_internal_unstable,
     allow_internal_unsafe,
+    local_inner_macros,
     edition
 });
 
@@ -455,27 +456,21 @@ fn hash_stable<W: StableHasherResult>(&self,
         src_hash.hash_stable(hcx, hasher);
 
         // We only hash the relative position within this filemap
-        lines.with_lock(|lines| {
-            lines.len().hash_stable(hcx, hasher);
-            for &line in lines.iter() {
-                stable_byte_pos(line, start_pos).hash_stable(hcx, hasher);
-            }
-        });
+        lines.len().hash_stable(hcx, hasher);
+        for &line in lines.iter() {
+            stable_byte_pos(line, start_pos).hash_stable(hcx, hasher);
+        }
 
         // We only hash the relative position within this filemap
-        multibyte_chars.with_lock(|multibyte_chars| {
-            multibyte_chars.len().hash_stable(hcx, hasher);
-            for &char_pos in multibyte_chars.iter() {
-                stable_multibyte_char(char_pos, start_pos).hash_stable(hcx, hasher);
-            }
-        });
+        multibyte_chars.len().hash_stable(hcx, hasher);
+        for &char_pos in multibyte_chars.iter() {
+            stable_multibyte_char(char_pos, start_pos).hash_stable(hcx, hasher);
+        }
 
-        non_narrow_chars.with_lock(|non_narrow_chars| {
-            non_narrow_chars.len().hash_stable(hcx, hasher);
-            for &char_pos in non_narrow_chars.iter() {
-                stable_non_narrow_char(char_pos, start_pos).hash_stable(hcx, hasher);
-            }
-        });
+        non_narrow_chars.len().hash_stable(hcx, hasher);
+        for &char_pos in non_narrow_chars.iter() {
+            stable_non_narrow_char(char_pos, start_pos).hash_stable(hcx, hasher);
+        }
     }
 }
 
index 684a1d2965838e2bda85440abf9f9c3d63762bba..8391cc6d9ba99e286f0e4697b8042bef0d12865d 100644 (file)
@@ -357,24 +357,18 @@ fn hash_stable<W: StableHasherResult>(&self,
     Relative(distance)
 });
 
-impl<'a, 'gcx> HashStable<StableHashingContext<'a>> for ty::FieldDef {
-    fn hash_stable<W: StableHasherResult>(&self,
-                                          hcx: &mut StableHashingContext<'a>,
-                                          hasher: &mut StableHasher<W>) {
-        let ty::FieldDef { did, ident, vis } = *self;
-
-        did.hash_stable(hcx, hasher);
-        ident.name.hash_stable(hcx, hasher);
-        vis.hash_stable(hcx, hasher);
-    }
-}
+impl_stable_hash_for!(struct ty::FieldDef {
+    did,
+    ident -> (ident.name),
+    vis,
+});
 
 impl<'a, 'gcx> HashStable<StableHashingContext<'a>>
-for ::middle::const_val::ConstVal<'gcx> {
+for ::mir::interpret::ConstValue<'gcx> {
     fn hash_stable<W: StableHasherResult>(&self,
                                           hcx: &mut StableHashingContext<'a>,
                                           hasher: &mut StableHasher<W>) {
-        use middle::const_val::ConstVal::*;
+        use mir::interpret::ConstValue::*;
 
         mem::discriminant(self).hash_stable(hcx, hasher);
 
@@ -383,23 +377,6 @@ fn hash_stable<W: StableHasherResult>(&self,
                 def_id.hash_stable(hcx, hasher);
                 substs.hash_stable(hcx, hasher);
             }
-            Value(ref value) => {
-                value.hash_stable(hcx, hasher);
-            }
-        }
-    }
-}
-
-impl<'a, 'gcx> HashStable<StableHashingContext<'a>>
-for ::mir::interpret::ConstValue<'gcx> {
-    fn hash_stable<W: StableHasherResult>(&self,
-                                          hcx: &mut StableHashingContext<'a>,
-                                          hasher: &mut StableHasher<W>) {
-        use mir::interpret::ConstValue::*;
-
-        mem::discriminant(self).hash_stable(hcx, hasher);
-
-        match *self {
             Scalar(val) => {
                 val.hash_stable(hcx, hasher);
             }
@@ -503,40 +480,18 @@ fn hash_stable<W: StableHasherResult>(&self,
     val
 });
 
-impl_stable_hash_for!(struct ::middle::const_val::ConstEvalErr<'tcx> {
+impl_stable_hash_for!(struct ::mir::interpret::ConstEvalErr<'tcx> {
     span,
-    kind
+    stacktrace,
+    error
 });
 
-impl_stable_hash_for!(struct ::middle::const_val::FrameInfo {
+impl_stable_hash_for!(struct ::mir::interpret::FrameInfo {
     span,
     lint_root,
     location
 });
 
-impl<'a, 'gcx> HashStable<StableHashingContext<'a>>
-for ::middle::const_val::ErrKind<'gcx> {
-    fn hash_stable<W: StableHasherResult>(&self,
-                                          hcx: &mut StableHashingContext<'a>,
-                                          hasher: &mut StableHasher<W>) {
-        use middle::const_val::ErrKind::*;
-
-        mem::discriminant(self).hash_stable(hcx, hasher);
-
-        match *self {
-            TypeckError |
-            CouldNotResolve |
-            CheckMatchError => {
-                // nothing to do
-            }
-            Miri(ref err, ref trace) => {
-                err.hash_stable(hcx, hasher);
-                trace.hash_stable(hcx, hasher);
-            },
-        }
-    }
-}
-
 impl_stable_hash_for!(struct ty::ClosureSubsts<'tcx> { substs });
 impl_stable_hash_for!(struct ty::GeneratorSubsts<'tcx> { substs });
 
@@ -545,15 +500,7 @@ fn hash_stable<W: StableHasherResult>(&self,
     predicates
 });
 
-
-impl<'a, 'gcx> HashStable<StableHashingContext<'a>>
-for ::mir::interpret::EvalError<'gcx> {
-    fn hash_stable<W: StableHasherResult>(&self,
-                                          hcx: &mut StableHashingContext<'a>,
-                                          hasher: &mut StableHasher<W>) {
-        self.kind.hash_stable(hcx, hasher)
-    }
-}
+impl_stable_hash_for!(struct ::mir::interpret::EvalError<'tcx> { kind });
 
 impl<'a, 'gcx, O: HashStable<StableHashingContext<'a>>> HashStable<StableHashingContext<'a>>
 for ::mir::interpret::EvalErrorKind<'gcx, O> {
@@ -593,6 +540,8 @@ fn hash_stable<W: StableHasherResult>(&self,
             ReadFromReturnPointer |
             UnimplementedTraitSelection |
             TypeckError |
+            TooGeneric |
+            CheckMatchError |
             DerefFunctionPointer |
             ExecuteMemory |
             OverflowNeg |
@@ -726,28 +675,15 @@ fn hash_stable<W: StableHasherResult>(&self,
     Struct(index)
 });
 
-impl<'a> HashStable<StableHashingContext<'a>> for ty::Generics {
-    fn hash_stable<W: StableHasherResult>(&self,
-                                          hcx: &mut StableHashingContext<'a>,
-                                          hasher: &mut StableHasher<W>) {
-        let ty::Generics {
-            parent,
-            ref parent_count,
-            ref params,
-
-            // Reverse map to each param's `index` field, from its `def_id`.
-            param_def_id_to_index: _, // Don't hash this
-            has_self,
-            has_late_bound_regions,
-        } = *self;
-
-        parent.hash_stable(hcx, hasher);
-        parent_count.hash_stable(hcx, hasher);
-        params.hash_stable(hcx, hasher);
-        has_self.hash_stable(hcx, hasher);
-        has_late_bound_regions.hash_stable(hcx, hasher);
-    }
-}
+impl_stable_hash_for!(struct ty::Generics {
+    parent,
+    parent_count,
+    params,
+    // Reverse map to each param's `index` field, from its `def_id`.
+    param_def_id_to_index -> _, // Don't hash this
+    has_self,
+    has_late_bound_regions,
+});
 
 impl_stable_hash_for!(struct ty::GenericParamDef {
     name,
@@ -1079,61 +1015,34 @@ fn hash_stable<W: StableHasherResult>(&self,
     }
 }
 
-impl<'a> HashStable<StableHashingContext<'a>> for ty::TraitDef {
-    fn hash_stable<W: StableHasherResult>(&self,
-                                          hcx: &mut StableHashingContext<'a>,
-                                          hasher: &mut StableHasher<W>) {
-        let ty::TraitDef {
-            // We already have the def_path_hash below, no need to hash it twice
-            def_id: _,
-            unsafety,
-            paren_sugar,
-            has_auto_impl,
-            def_path_hash,
-        } = *self;
-
-        unsafety.hash_stable(hcx, hasher);
-        paren_sugar.hash_stable(hcx, hasher);
-        has_auto_impl.hash_stable(hcx, hasher);
-        def_path_hash.hash_stable(hcx, hasher);
-    }
-}
+impl_stable_hash_for!(struct ty::TraitDef {
+    // We already have the def_path_hash below, no need to hash it twice
+    def_id -> _,
+    unsafety,
+    paren_sugar,
+    has_auto_impl,
+    def_path_hash,
+});
 
 impl_stable_hash_for!(struct ty::Destructor {
     did
 });
 
-impl<'a> HashStable<StableHashingContext<'a>> for ty::CrateVariancesMap {
-    fn hash_stable<W: StableHasherResult>(&self,
-                                          hcx: &mut StableHashingContext<'a>,
-                                          hasher: &mut StableHasher<W>) {
-        let ty::CrateVariancesMap {
-            ref variances,
-            // This is just an irrelevant helper value.
-            empty_variance: _,
-        } = *self;
-
-        variances.hash_stable(hcx, hasher);
-    }
-}
-
-impl<'a, 'gcx> HashStable<StableHashingContext<'a>> for ty::CratePredicatesMap<'gcx> {
-    fn hash_stable<W: StableHasherResult>(&self,
-                                          hcx: &mut StableHashingContext<'a>,
-                                          hasher: &mut StableHasher<W>) {
-        let ty::CratePredicatesMap {
-            ref predicates,
-            // This is just an irrelevant helper value.
-            empty_predicate: _,
-        } = *self;
+impl_stable_hash_for!(struct ty::CrateVariancesMap {
+    variances,
+    // This is just an irrelevant helper value.
+    empty_variance -> _,
+});
 
-        predicates.hash_stable(hcx, hasher);
-    }
-}
+impl_stable_hash_for!(struct ty::CratePredicatesMap<'tcx> {
+    predicates,
+    // This is just an irrelevant helper value.
+    empty_predicate -> _,
+});
 
 impl_stable_hash_for!(struct ty::AssociatedItem {
     def_id,
-    name,
+    ident -> (ident.name),
     kind,
     vis,
     defaultness,
diff --git a/src/librustc/infer/canonical.rs b/src/librustc/infer/canonical.rs
deleted file mode 100644 (file)
index ef11cc0..0000000
+++ /dev/null
@@ -1,925 +0,0 @@
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! **Canonicalization** is the key to constructing a query in the
-//! middle of type inference. Ordinarily, it is not possible to store
-//! types from type inference in query keys, because they contain
-//! references to inference variables whose lifetimes are too short
-//! and so forth. Canonicalizing a value T1 using `canonicalize_query`
-//! produces two things:
-//!
-//! - a value T2 where each unbound inference variable has been
-//!   replaced with a **canonical variable**;
-//! - a map M (of type `CanonicalVarValues`) from those canonical
-//!   variables back to the original.
-//!
-//! We can then do queries using T2. These will give back constriants
-//! on the canonical variables which can be translated, using the map
-//! M, into constraints in our source context. This process of
-//! translating the results back is done by the
-//! `instantiate_query_result` method.
-//!
-//! For a more detailed look at what is happening here, check
-//! out the [chapter in the rustc guide][c].
-//!
-//! [c]: https://rust-lang-nursery.github.io/rustc-guide/traits/canonicalization.html
-
-use infer::{InferCtxt, InferOk, InferResult, RegionVariableOrigin, TypeVariableOrigin};
-use rustc_data_structures::indexed_vec::Idx;
-use serialize::UseSpecializedDecodable;
-use std::fmt::Debug;
-use std::ops::Index;
-use std::sync::atomic::Ordering;
-use syntax::codemap::Span;
-use traits::{Obligation, ObligationCause, PredicateObligation};
-use ty::{self, CanonicalVar, Lift, Region, Slice, Ty, TyCtxt, TypeFlags};
-use ty::subst::{Kind, UnpackedKind};
-use ty::fold::{TypeFoldable, TypeFolder};
-
-use rustc_data_structures::indexed_vec::IndexVec;
-use rustc_data_structures::fx::FxHashMap;
-
-/// A "canonicalized" type `V` is one where all free inference
-/// variables have been rewriten to "canonical vars". These are
-/// numbered starting from 0 in order of first appearance.
-#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcDecodable, RustcEncodable)]
-pub struct Canonical<'gcx, V> {
-    pub variables: CanonicalVarInfos<'gcx>,
-    pub value: V,
-}
-
-pub type CanonicalVarInfos<'gcx> = &'gcx Slice<CanonicalVarInfo>;
-
-impl<'gcx> UseSpecializedDecodable for CanonicalVarInfos<'gcx> { }
-
-/// A set of values corresponding to the canonical variables from some
-/// `Canonical`. You can give these values to
-/// `canonical_value.substitute` to substitute them into the canonical
-/// value at the right places.
-///
-/// When you canonicalize a value `V`, you get back one of these
-/// vectors with the original values that were replaced by canonical
-/// variables.
-///
-/// You can also use `infcx.fresh_inference_vars_for_canonical_vars`
-/// to get back a `CanonicalVarValues` containing fresh inference
-/// variables.
-#[derive(Clone, Debug, PartialEq, Eq, Hash, RustcDecodable, RustcEncodable)]
-pub struct CanonicalVarValues<'tcx> {
-    pub var_values: IndexVec<CanonicalVar, Kind<'tcx>>,
-}
-
-/// Information about a canonical variable that is included with the
-/// canonical value. This is sufficient information for code to create
-/// a copy of the canonical value in some other inference context,
-/// with fresh inference variables replacing the canonical values.
-#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcDecodable, RustcEncodable)]
-pub struct CanonicalVarInfo {
-    pub kind: CanonicalVarKind,
-}
-
-/// Describes the "kind" of the canonical variable. This is a "kind"
-/// in the type-theory sense of the term -- i.e., a "meta" type system
-/// that analyzes type-like values.
-#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcDecodable, RustcEncodable)]
-pub enum CanonicalVarKind {
-    /// Some kind of type inference variable.
-    Ty(CanonicalTyVarKind),
-
-    /// Region variable `'?R`.
-    Region,
-}
-
-/// Rust actually has more than one category of type variables;
-/// notably, the type variables we create for literals (e.g., 22 or
-/// 22.) can only be instantiated with integral/float types (e.g.,
-/// usize or f32). In order to faithfully reproduce a type, we need to
-/// know what set of types a given type variable can be unified with.
-#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcDecodable, RustcEncodable)]
-pub enum CanonicalTyVarKind {
-    /// General type variable `?T` that can be unified with arbitrary types.
-    General,
-
-    /// Integral type variable `?I` (that can only be unified with integral types).
-    Int,
-
-    /// Floating-point type variable `?F` (that can only be unified with float types).
-    Float,
-}
-
-/// After we execute a query with a canonicalized key, we get back a
-/// `Canonical<QueryResult<..>>`. You can use
-/// `instantiate_query_result` to access the data in this result.
-#[derive(Clone, Debug)]
-pub struct QueryResult<'tcx, R> {
-    pub var_values: CanonicalVarValues<'tcx>,
-    pub region_constraints: Vec<QueryRegionConstraint<'tcx>>,
-    pub certainty: Certainty,
-    pub value: R,
-}
-
-/// Indicates whether or not we were able to prove the query to be
-/// true.
-#[derive(Copy, Clone, Debug)]
-pub enum Certainty {
-    /// The query is known to be true, presuming that you apply the
-    /// given `var_values` and the region-constraints are satisfied.
-    Proven,
-
-    /// The query is not known to be true, but also not known to be
-    /// false. The `var_values` represent *either* values that must
-    /// hold in order for the query to be true, or helpful tips that
-    /// *might* make it true. Currently rustc's trait solver cannot
-    /// distinguish the two (e.g., due to our preference for where
-    /// clauses over impls).
-    ///
-    /// After some unifiations and things have been done, it makes
-    /// sense to try and prove again -- of course, at that point, the
-    /// canonical form will be different, making this a distinct
-    /// query.
-    Ambiguous,
-}
-
-impl Certainty {
-    pub fn is_proven(&self) -> bool {
-        match self {
-            Certainty::Proven => true,
-            Certainty::Ambiguous => false,
-        }
-    }
-
-    pub fn is_ambiguous(&self) -> bool {
-        !self.is_proven()
-    }
-}
-
-impl<'tcx, R> QueryResult<'tcx, R> {
-    pub fn is_proven(&self) -> bool {
-        self.certainty.is_proven()
-    }
-
-    pub fn is_ambiguous(&self) -> bool {
-        !self.is_proven()
-    }
-}
-
-impl<'tcx, R> Canonical<'tcx, QueryResult<'tcx, R>> {
-    pub fn is_proven(&self) -> bool {
-        self.value.is_proven()
-    }
-
-    pub fn is_ambiguous(&self) -> bool {
-        !self.is_proven()
-    }
-}
-
-pub type QueryRegionConstraint<'tcx> = ty::Binder<ty::OutlivesPredicate<Kind<'tcx>, Region<'tcx>>>;
-
-/// Trait implemented by values that can be canonicalized. It mainly
-/// serves to identify the interning table we will use.
-pub trait Canonicalize<'gcx: 'tcx, 'tcx>: TypeFoldable<'tcx> + Lift<'gcx> {
-    type Canonicalized: 'gcx + Debug;
-
-    /// After a value has been fully canonicalized and lifted, this
-    /// method will allocate it in a global arena.
-    fn intern(
-        gcx: TyCtxt<'_, 'gcx, 'gcx>,
-        value: Canonical<'gcx, Self::Lifted>,
-    ) -> Self::Canonicalized;
-}
-
-impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> {
-    /// Creates a substitution S for the canonical value with fresh
-    /// inference variables and applies it to the canonical value.
-    /// Returns both the instantiated result *and* the substitution S.
-    ///
-    /// This is useful at the start of a query: it basically brings
-    /// the canonical value "into scope" within your new infcx. At the
-    /// end of processing, the substitution S (once canonicalized)
-    /// then represents the values that you computed for each of the
-    /// canonical inputs to your query.
-    pub fn instantiate_canonical_with_fresh_inference_vars<T>(
-        &self,
-        span: Span,
-        canonical: &Canonical<'tcx, T>,
-    ) -> (T, CanonicalVarValues<'tcx>)
-    where
-        T: TypeFoldable<'tcx>,
-    {
-        let canonical_inference_vars =
-            self.fresh_inference_vars_for_canonical_vars(span, canonical.variables);
-        let result = canonical.substitute(self.tcx, &canonical_inference_vars);
-        (result, canonical_inference_vars)
-    }
-
-    /// Given the "infos" about the canonical variables from some
-    /// canonical, creates fresh inference variables with the same
-    /// characteristics. You can then use `substitute` to instantiate
-    /// the canonical variable with these inference variables.
-    pub fn fresh_inference_vars_for_canonical_vars(
-        &self,
-        span: Span,
-        variables: &Slice<CanonicalVarInfo>,
-    ) -> CanonicalVarValues<'tcx> {
-        let var_values: IndexVec<CanonicalVar, Kind<'tcx>> = variables
-            .iter()
-            .map(|info| self.fresh_inference_var_for_canonical_var(span, *info))
-            .collect();
-
-        CanonicalVarValues { var_values }
-    }
-
-    /// Given the "info" about a canonical variable, creates a fresh
-    /// inference variable with the same characteristics.
-    pub fn fresh_inference_var_for_canonical_var(
-        &self,
-        span: Span,
-        cv_info: CanonicalVarInfo,
-    ) -> Kind<'tcx> {
-        match cv_info.kind {
-            CanonicalVarKind::Ty(ty_kind) => {
-                let ty = match ty_kind {
-                    CanonicalTyVarKind::General => {
-                        self.next_ty_var(
-                            TypeVariableOrigin::MiscVariable(span),
-                        )
-                    }
-
-                    CanonicalTyVarKind::Int => self.tcx.mk_int_var(self.next_int_var_id()),
-
-                    CanonicalTyVarKind::Float => self.tcx.mk_float_var(self.next_float_var_id()),
-                };
-                ty.into()
-            }
-
-            CanonicalVarKind::Region => {
-                self.next_region_var(RegionVariableOrigin::MiscVariable(span)).into()
-            }
-        }
-    }
-
-    /// Given the (canonicalized) result to a canonical query,
-    /// instantiates the result so it can be used, plugging in the
-    /// values from the canonical query. (Note that the result may
-    /// have been ambiguous; you should check the certainty level of
-    /// the query before applying this function.)
-    ///
-    /// To get a good understanding of what is happening here, check
-    /// out the [chapter in the rustc guide][c].
-    ///
-    /// [c]: https://rust-lang-nursery.github.io/rustc-guide/traits/canonicalization.html#processing-the-canonicalized-query-result
-    pub fn instantiate_query_result<R>(
-        &self,
-        cause: &ObligationCause<'tcx>,
-        param_env: ty::ParamEnv<'tcx>,
-        original_values: &CanonicalVarValues<'tcx>,
-        query_result: &Canonical<'tcx, QueryResult<'tcx, R>>,
-    ) -> InferResult<'tcx, R>
-    where
-        R: Debug + TypeFoldable<'tcx>,
-    {
-        debug!(
-            "instantiate_query_result(original_values={:#?}, query_result={:#?})",
-            original_values, query_result,
-        );
-
-        // Every canonical query result includes values for each of
-        // the inputs to the query. Therefore, we begin by unifying
-        // these values with the original inputs that were
-        // canonicalized.
-        let result_values = &query_result.value.var_values;
-        assert_eq!(original_values.len(), result_values.len());
-
-        // Quickly try to find initial values for the canonical
-        // variables in the result in terms of the query. We do this
-        // by iterating down the values that the query gave to each of
-        // the canonical inputs. If we find that one of those values
-        // is directly equal to one of the canonical variables in the
-        // result, then we can type the corresponding value from the
-        // input. See the example above.
-        let mut opt_values: IndexVec<CanonicalVar, Option<Kind<'tcx>>> =
-            IndexVec::from_elem_n(None, query_result.variables.len());
-
-        // In terms of our example above, we are iterating over pairs like:
-        // [(?A, Vec<?0>), ('static, '?1), (?B, ?0)]
-        for (original_value, result_value) in original_values.iter().zip(result_values) {
-            match result_value.unpack() {
-                UnpackedKind::Type(result_value) => {
-                    // e.g., here `result_value` might be `?0` in the example above...
-                    if let ty::TyInfer(ty::InferTy::CanonicalTy(index)) = result_value.sty {
-                        // in which case we would set `canonical_vars[0]` to `Some(?U)`.
-                        opt_values[index] = Some(original_value);
-                    }
-                }
-                UnpackedKind::Lifetime(result_value) => {
-                    // e.g., here `result_value` might be `'?1` in the example above...
-                    if let &ty::RegionKind::ReCanonical(index) = result_value {
-                        // in which case we would set `canonical_vars[0]` to `Some('static)`.
-                        opt_values[index] = Some(original_value);
-                    }
-                }
-            }
-        }
-
-        // Create a result substitution: if we found a value for a
-        // given variable in the loop above, use that. Otherwise, use
-        // a fresh inference variable.
-        let result_subst = &CanonicalVarValues {
-            var_values: query_result
-                .variables
-                .iter()
-                .enumerate()
-                .map(|(index, info)| match opt_values[CanonicalVar::new(index)] {
-                    Some(k) => k,
-                    None => self.fresh_inference_var_for_canonical_var(cause.span, *info),
-                })
-                .collect(),
-        };
-
-        // Unify the original values for the canonical variables in
-        // the input with the value found in the query
-        // post-substitution. Often, but not always, this is a no-op,
-        // because we already found the mapping in the first step.
-        let substituted_values = |index: CanonicalVar| -> Kind<'tcx> {
-            query_result.substitute_projected(self.tcx, result_subst, |v| &v.var_values[index])
-        };
-        let mut obligations =
-            self.unify_canonical_vars(cause, param_env, original_values, substituted_values)?
-                .into_obligations();
-
-        obligations.extend(self.query_region_constraints_into_obligations(
-            cause,
-            param_env,
-            &query_result.value.region_constraints,
-            result_subst,
-        ));
-
-        let user_result: R =
-            query_result.substitute_projected(self.tcx, result_subst, |q_r| &q_r.value);
-
-        Ok(InferOk {
-            value: user_result,
-            obligations,
-        })
-    }
-
-    /// Converts the region constraints resulting from a query into an
-    /// iterator of obligations.
-    fn query_region_constraints_into_obligations<'a>(
-        &'a self,
-        cause: &'a ObligationCause<'tcx>,
-        param_env: ty::ParamEnv<'tcx>,
-        unsubstituted_region_constraints: &'a [QueryRegionConstraint<'tcx>],
-        result_subst: &'a CanonicalVarValues<'tcx>,
-    ) -> impl Iterator<Item = PredicateObligation<'tcx>> + 'a {
-        Box::new(unsubstituted_region_constraints.iter().map(move |constraint| {
-            let ty::OutlivesPredicate(k1, r2) = constraint.skip_binder(); // restored below
-            let k1 = substitute_value(self.tcx, result_subst, k1);
-            let r2 = substitute_value(self.tcx, result_subst, r2);
-            match k1.unpack() {
-                UnpackedKind::Lifetime(r1) =>
-                    Obligation::new(
-                        cause.clone(),
-                        param_env,
-                        ty::Predicate::RegionOutlives(
-                            ty::Binder::dummy(ty::OutlivesPredicate(r1, r2))),
-                    ),
-
-                UnpackedKind::Type(t1) =>
-                    Obligation::new(
-                        cause.clone(),
-                        param_env,
-                        ty::Predicate::TypeOutlives(
-                            ty::Binder::dummy(ty::OutlivesPredicate(t1, r2))),
-                    ),
-            }
-        })) as Box<dyn Iterator<Item = _>>
-    }
-
-    /// Given two sets of values for the same set of canonical variables, unify them.
-    /// The second set is produced lazilly by supplying indices from the first set.
-    fn unify_canonical_vars(
-        &self,
-        cause: &ObligationCause<'tcx>,
-        param_env: ty::ParamEnv<'tcx>,
-        variables1: &CanonicalVarValues<'tcx>,
-        variables2: impl Fn(CanonicalVar) -> Kind<'tcx>,
-    ) -> InferResult<'tcx, ()> {
-        self.commit_if_ok(|_| {
-            let mut obligations = vec![];
-            for (index, value1) in variables1.var_values.iter_enumerated() {
-                let value2 = variables2(index);
-
-                match (value1.unpack(), value2.unpack()) {
-                    (UnpackedKind::Type(v1), UnpackedKind::Type(v2)) => {
-                        obligations
-                            .extend(self.at(cause, param_env).eq(v1, v2)?.into_obligations());
-                    }
-                    (
-                        UnpackedKind::Lifetime(ty::ReErased),
-                        UnpackedKind::Lifetime(ty::ReErased),
-                    ) => {
-                        // no action needed
-                    }
-                    (UnpackedKind::Lifetime(v1), UnpackedKind::Lifetime(v2)) => {
-                        obligations
-                            .extend(self.at(cause, param_env).eq(v1, v2)?.into_obligations());
-                    }
-                    _ => {
-                        bug!("kind mismatch, cannot unify {:?} and {:?}", value1, value2,);
-                    }
-                }
-            }
-            Ok(InferOk {
-                value: (),
-                obligations,
-            })
-        })
-    }
-
-    /// Canonicalizes a query value `V`. When we canonicalize a query,
-    /// we not only canonicalize unbound inference variables, but we
-    /// *also* replace all free regions whatsoever. So for example a
-    /// query like `T: Trait<'static>` would be canonicalized to
-    ///
-    /// ```text
-    /// T: Trait<'?0>
-    /// ```
-    ///
-    /// with a mapping M that maps `'?0` to `'static`.
-    ///
-    /// To get a good understanding of what is happening here, check
-    /// out the [chapter in the rustc guide][c].
-    ///
-    /// [c]: https://rust-lang-nursery.github.io/rustc-guide/traits/canonicalization.html#canonicalizing-the-query
-    pub fn canonicalize_query<V>(&self, value: &V) -> (V::Canonicalized, CanonicalVarValues<'tcx>)
-    where
-        V: Canonicalize<'gcx, 'tcx>,
-    {
-        self.tcx.sess.perf_stats.queries_canonicalized.fetch_add(1, Ordering::Relaxed);
-
-        Canonicalizer::canonicalize(
-            value,
-            Some(self),
-            self.tcx,
-            CanonicalizeAllFreeRegions(true),
-        )
-    }
-
-    /// Canonicalizes a query *response* `V`. When we canonicalize a
-    /// query response, we only canonicalize unbound inference
-    /// variables, and we leave other free regions alone. So,
-    /// continuing with the example from `canonicalize_query`, if
-    /// there was an input query `T: Trait<'static>`, it would have
-    /// been canonicalized to
-    ///
-    /// ```text
-    /// T: Trait<'?0>
-    /// ```
-    ///
-    /// with a mapping M that maps `'?0` to `'static`. But if we found that there
-    /// exists only one possible impl of `Trait`, and it looks like
-    ///
-    ///     impl<T> Trait<'static> for T { .. }
-    ///
-    /// then we would prepare a query result R that (among other
-    /// things) includes a mapping to `'?0 := 'static`. When
-    /// canonicalizing this query result R, we would leave this
-    /// reference to `'static` alone.
-    ///
-    /// To get a good understanding of what is happening here, check
-    /// out the [chapter in the rustc guide][c].
-    ///
-    /// [c]: https://rust-lang-nursery.github.io/rustc-guide/traits/canonicalization.html#canonicalizing-the-query-result
-    pub fn canonicalize_response<V>(
-        &self,
-        value: &V,
-    ) -> (V::Canonicalized, CanonicalVarValues<'tcx>)
-    where
-        V: Canonicalize<'gcx, 'tcx>,
-    {
-        Canonicalizer::canonicalize(
-            value,
-            Some(self),
-            self.tcx,
-            CanonicalizeAllFreeRegions(false),
-        )
-    }
-}
-
-/// If this flag is true, then all free regions will be replaced with
-/// a canonical var. This is used to make queries as generic as
-/// possible. For example, the query `F: Foo<'static>` would be
-/// canonicalized to `F: Foo<'0>`.
-struct CanonicalizeAllFreeRegions(bool);
-
-struct Canonicalizer<'cx, 'gcx: 'tcx, 'tcx: 'cx> {
-    infcx: Option<&'cx InferCtxt<'cx, 'gcx, 'tcx>>,
-    tcx: TyCtxt<'cx, 'gcx, 'tcx>,
-    variables: IndexVec<CanonicalVar, CanonicalVarInfo>,
-    indices: FxHashMap<Kind<'tcx>, CanonicalVar>,
-    var_values: IndexVec<CanonicalVar, Kind<'tcx>>,
-    canonicalize_all_free_regions: CanonicalizeAllFreeRegions,
-    needs_canonical_flags: TypeFlags,
-}
-
-impl<'cx, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for Canonicalizer<'cx, 'gcx, 'tcx> {
-    fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> {
-        self.tcx
-    }
-
-    fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
-        match *r {
-            ty::ReLateBound(..) => {
-                // leave bound regions alone
-                r
-            }
-
-            ty::ReVar(vid) => {
-                let r = self.infcx
-                    .unwrap()
-                    .borrow_region_constraints()
-                    .opportunistic_resolve_var(self.tcx, vid);
-                let info = CanonicalVarInfo {
-                    kind: CanonicalVarKind::Region,
-                };
-                debug!(
-                    "canonical: region var found with vid {:?}, \
-                     opportunistically resolved to {:?}",
-                    vid, r
-                );
-                let cvar = self.canonical_var(info, r.into());
-                self.tcx().mk_region(ty::ReCanonical(cvar))
-            }
-
-            ty::ReStatic
-            | ty::ReEarlyBound(..)
-            | ty::ReFree(_)
-            | ty::ReScope(_)
-            | ty::ReSkolemized(..)
-            | ty::ReEmpty
-            | ty::ReErased => {
-                if self.canonicalize_all_free_regions.0 {
-                    let info = CanonicalVarInfo {
-                        kind: CanonicalVarKind::Region,
-                    };
-                    let cvar = self.canonical_var(info, r.into());
-                    self.tcx().mk_region(ty::ReCanonical(cvar))
-                } else {
-                    r
-                }
-            }
-
-            ty::ReClosureBound(..) | ty::ReCanonical(_) => {
-                bug!("canonical region encountered during canonicalization")
-            }
-        }
-    }
-
-    fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> {
-        match t.sty {
-            ty::TyInfer(ty::TyVar(_)) => self.canonicalize_ty_var(CanonicalTyVarKind::General, t),
-
-            ty::TyInfer(ty::IntVar(_)) => self.canonicalize_ty_var(CanonicalTyVarKind::Int, t),
-
-            ty::TyInfer(ty::FloatVar(_)) => self.canonicalize_ty_var(CanonicalTyVarKind::Float, t),
-
-            ty::TyInfer(ty::FreshTy(_))
-            | ty::TyInfer(ty::FreshIntTy(_))
-            | ty::TyInfer(ty::FreshFloatTy(_)) => {
-                bug!("encountered a fresh type during canonicalization")
-            }
-
-            ty::TyInfer(ty::CanonicalTy(_)) => {
-                bug!("encountered a canonical type during canonicalization")
-            }
-
-            ty::TyClosure(..)
-            | ty::TyGenerator(..)
-            | ty::TyGeneratorWitness(..)
-            | ty::TyBool
-            | ty::TyChar
-            | ty::TyInt(..)
-            | ty::TyUint(..)
-            | ty::TyFloat(..)
-            | ty::TyAdt(..)
-            | ty::TyStr
-            | ty::TyError
-            | ty::TyArray(..)
-            | ty::TySlice(..)
-            | ty::TyRawPtr(..)
-            | ty::TyRef(..)
-            | ty::TyFnDef(..)
-            | ty::TyFnPtr(_)
-            | ty::TyDynamic(..)
-            | ty::TyNever
-            | ty::TyTuple(..)
-            | ty::TyProjection(..)
-            | ty::TyForeign(..)
-            | ty::TyParam(..)
-            | ty::TyAnon(..) => {
-                if t.flags.intersects(self.needs_canonical_flags) {
-                    t.super_fold_with(self)
-                } else {
-                    t
-                }
-            }
-        }
-    }
-}
-
-impl<'cx, 'gcx, 'tcx> Canonicalizer<'cx, 'gcx, 'tcx> {
-    /// The main `canonicalize` method, shared impl of
-    /// `canonicalize_query` and `canonicalize_response`.
-    fn canonicalize<V>(
-        value: &V,
-        infcx: Option<&'cx InferCtxt<'cx, 'gcx, 'tcx>>,
-        tcx: TyCtxt<'cx, 'gcx, 'tcx>,
-        canonicalize_all_free_regions: CanonicalizeAllFreeRegions,
-    ) -> (V::Canonicalized, CanonicalVarValues<'tcx>)
-    where
-        V: Canonicalize<'gcx, 'tcx>,
-    {
-        debug_assert!(
-            !value.has_type_flags(TypeFlags::HAS_CANONICAL_VARS),
-            "canonicalizing a canonical value: {:?}",
-            value,
-        );
-
-        let needs_canonical_flags = if canonicalize_all_free_regions.0 {
-            TypeFlags::HAS_FREE_REGIONS | TypeFlags::KEEP_IN_LOCAL_TCX
-        } else {
-            TypeFlags::KEEP_IN_LOCAL_TCX
-        };
-
-        let gcx = tcx.global_tcx();
-
-        // Fast path: nothing that needs to be canonicalized.
-        if !value.has_type_flags(needs_canonical_flags) {
-            let out_value = gcx.lift(value).unwrap();
-            let canon_value = V::intern(
-                gcx,
-                Canonical {
-                    variables: Slice::empty(),
-                    value: out_value,
-                },
-            );
-            let values = CanonicalVarValues {
-                var_values: IndexVec::default(),
-            };
-            return (canon_value, values);
-        }
-
-        let mut canonicalizer = Canonicalizer {
-            infcx,
-            tcx,
-            canonicalize_all_free_regions,
-            needs_canonical_flags,
-            variables: IndexVec::default(),
-            indices: FxHashMap::default(),
-            var_values: IndexVec::default(),
-        };
-        let out_value = value.fold_with(&mut canonicalizer);
-
-        // Once we have canonicalized `out_value`, it should not
-        // contain anything that ties it to this inference context
-        // anymore, so it should live in the global arena.
-        let out_value = gcx.lift(&out_value).unwrap_or_else(|| {
-            bug!(
-                "failed to lift `{:?}`, canonicalized from `{:?}`",
-                out_value,
-                value
-            )
-        });
-
-        let canonical_variables = tcx.intern_canonical_var_infos(&canonicalizer.variables.raw);
-
-        let canonical_value = V::intern(
-            gcx,
-            Canonical {
-                variables: canonical_variables,
-                value: out_value,
-            },
-        );
-        let canonical_var_values = CanonicalVarValues {
-            var_values: canonicalizer.var_values,
-        };
-        (canonical_value, canonical_var_values)
-    }
-
-    /// Creates a canonical variable replacing `kind` from the input,
-    /// or returns an existing variable if `kind` has already been
-    /// seen. `kind` is expected to be an unbound variable (or
-    /// potentially a free region).
-    fn canonical_var(&mut self, info: CanonicalVarInfo, kind: Kind<'tcx>) -> CanonicalVar {
-        let Canonicalizer {
-            indices,
-            variables,
-            var_values,
-            ..
-        } = self;
-
-        indices
-            .entry(kind)
-            .or_insert_with(|| {
-                let cvar1 = variables.push(info);
-                let cvar2 = var_values.push(kind);
-                assert_eq!(cvar1, cvar2);
-                cvar1
-            })
-            .clone()
-    }
-
-    /// Given a type variable `ty_var` of the given kind, first check
-    /// if `ty_var` is bound to anything; if so, canonicalize
-    /// *that*. Otherwise, create a new canonical variable for
-    /// `ty_var`.
-    fn canonicalize_ty_var(&mut self, ty_kind: CanonicalTyVarKind, ty_var: Ty<'tcx>) -> Ty<'tcx> {
-        let infcx = self.infcx.expect("encountered ty-var without infcx");
-        let bound_to = infcx.shallow_resolve(ty_var);
-        if bound_to != ty_var {
-            self.fold_ty(bound_to)
-        } else {
-            let info = CanonicalVarInfo {
-                kind: CanonicalVarKind::Ty(ty_kind),
-            };
-            let cvar = self.canonical_var(info, ty_var.into());
-            self.tcx().mk_infer(ty::InferTy::CanonicalTy(cvar))
-        }
-    }
-}
-
-impl<'tcx, V> Canonical<'tcx, V> {
-    /// Instantiate the wrapped value, replacing each canonical value
-    /// with the value given in `var_values`.
-    fn substitute(&self, tcx: TyCtxt<'_, '_, 'tcx>, var_values: &CanonicalVarValues<'tcx>) -> V
-    where
-        V: TypeFoldable<'tcx>,
-    {
-        self.substitute_projected(tcx, var_values, |value| value)
-    }
-
-    /// Invoke `projection_fn` with `self.value` to get a value V that
-    /// is expressed in terms of the same canonical variables bound in
-    /// `self`. Apply the substitution `var_values` to this value V,
-    /// replacing each of the canonical variables.
-    fn substitute_projected<T>(
-        &self,
-        tcx: TyCtxt<'_, '_, 'tcx>,
-        var_values: &CanonicalVarValues<'tcx>,
-        projection_fn: impl FnOnce(&V) -> &T,
-    ) -> T
-    where
-        T: TypeFoldable<'tcx>,
-    {
-        assert_eq!(self.variables.len(), var_values.var_values.len());
-        let value = projection_fn(&self.value);
-        substitute_value(tcx, var_values, value)
-    }
-}
-
-/// Substitute the values from `var_values` into `value`. `var_values`
-/// must be values for the set of cnaonical variables that appear in
-/// `value`.
-fn substitute_value<'a, 'tcx, T>(
-    tcx: TyCtxt<'_, '_, 'tcx>,
-    var_values: &CanonicalVarValues<'tcx>,
-    value: &'a T,
-) -> T
-where
-    T: TypeFoldable<'tcx>,
-{
-    if var_values.var_values.is_empty() {
-        debug_assert!(!value.has_type_flags(TypeFlags::HAS_CANONICAL_VARS));
-        value.clone()
-    } else if !value.has_type_flags(TypeFlags::HAS_CANONICAL_VARS) {
-        value.clone()
-    } else {
-        value.fold_with(&mut CanonicalVarValuesSubst { tcx, var_values })
-    }
-}
-
-struct CanonicalVarValuesSubst<'cx, 'gcx: 'tcx, 'tcx: 'cx> {
-    tcx: TyCtxt<'cx, 'gcx, 'tcx>,
-    var_values: &'cx CanonicalVarValues<'tcx>,
-}
-
-impl<'cx, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for CanonicalVarValuesSubst<'cx, 'gcx, 'tcx> {
-    fn tcx(&self) -> TyCtxt<'_, 'gcx, 'tcx> {
-        self.tcx
-    }
-
-    fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> {
-        match t.sty {
-            ty::TyInfer(ty::InferTy::CanonicalTy(c)) => {
-                match self.var_values.var_values[c].unpack() {
-                    UnpackedKind::Type(ty) => ty,
-                    r => bug!("{:?} is a type but value is {:?}", c, r),
-                }
-            }
-            _ => {
-                if !t.has_type_flags(TypeFlags::HAS_CANONICAL_VARS) {
-                    t
-                } else {
-                    t.super_fold_with(self)
-                }
-            }
-        }
-    }
-
-    fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
-        match r {
-            ty::RegionKind::ReCanonical(c) => match self.var_values.var_values[*c].unpack() {
-                UnpackedKind::Lifetime(l) => l,
-                r => bug!("{:?} is a region but value is {:?}", c, r),
-            },
-            _ => r.super_fold_with(self),
-        }
-    }
-}
-
-CloneTypeFoldableAndLiftImpls! {
-    ::infer::canonical::Certainty,
-    ::infer::canonical::CanonicalVarInfo,
-    ::infer::canonical::CanonicalVarKind,
-}
-
-CloneTypeFoldableImpls! {
-    for <'tcx> {
-        ::infer::canonical::CanonicalVarInfos<'tcx>,
-    }
-}
-
-BraceStructTypeFoldableImpl! {
-    impl<'tcx, C> TypeFoldable<'tcx> for Canonical<'tcx, C> {
-        variables,
-        value,
-    } where C: TypeFoldable<'tcx>
-}
-
-BraceStructLiftImpl! {
-    impl<'a, 'tcx, T> Lift<'tcx> for Canonical<'a, T> {
-        type Lifted = Canonical<'tcx, T::Lifted>;
-        variables, value
-    } where T: Lift<'tcx>
-}
-
-impl<'tcx> CanonicalVarValues<'tcx> {
-    fn iter<'a>(&'a self) -> impl Iterator<Item = Kind<'tcx>> + 'a {
-        self.var_values.iter().cloned()
-    }
-
-    fn len(&self) -> usize {
-        self.var_values.len()
-    }
-}
-
-impl<'a, 'tcx> IntoIterator for &'a CanonicalVarValues<'tcx> {
-    type Item = Kind<'tcx>;
-    type IntoIter = ::std::iter::Cloned<::std::slice::Iter<'a, Kind<'tcx>>>;
-
-    fn into_iter(self) -> Self::IntoIter {
-        self.var_values.iter().cloned()
-    }
-}
-
-BraceStructLiftImpl! {
-    impl<'a, 'tcx> Lift<'tcx> for CanonicalVarValues<'a> {
-        type Lifted = CanonicalVarValues<'tcx>;
-        var_values,
-    }
-}
-
-BraceStructTypeFoldableImpl! {
-    impl<'tcx> TypeFoldable<'tcx> for CanonicalVarValues<'tcx> {
-        var_values,
-    }
-}
-
-BraceStructTypeFoldableImpl! {
-    impl<'tcx, R> TypeFoldable<'tcx> for QueryResult<'tcx, R> {
-        var_values, region_constraints, certainty, value
-    } where R: TypeFoldable<'tcx>,
-}
-
-BraceStructLiftImpl! {
-    impl<'a, 'tcx, R> Lift<'tcx> for QueryResult<'a, R> {
-        type Lifted = QueryResult<'tcx, R::Lifted>;
-        var_values, region_constraints, certainty, value
-    } where R: Lift<'tcx>
-}
-
-impl<'tcx> Index<CanonicalVar> for CanonicalVarValues<'tcx> {
-    type Output = Kind<'tcx>;
-
-    fn index(&self, value: CanonicalVar) -> &Kind<'tcx> {
-        &self.var_values[value]
-    }
-}
diff --git a/src/librustc/infer/canonical/canonicalizer.rs b/src/librustc/infer/canonical/canonicalizer.rs
new file mode 100644 (file)
index 0000000..8b67f04
--- /dev/null
@@ -0,0 +1,403 @@
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! This module contains the "canonicalizer" itself.
+//!
+//! For an overview of what canonicaliation is and how it fits into
+//! rustc, check out the [chapter in the rustc guide][c].
+//!
+//! [c]: https://rust-lang-nursery.github.io/rustc-guide/traits/canonicalization.html
+
+use infer::canonical::{
+    Canonical, CanonicalTyVarKind, CanonicalVarInfo, CanonicalVarKind, CanonicalVarValues,
+    Canonicalized,
+};
+use infer::InferCtxt;
+use std::sync::atomic::Ordering;
+use ty::fold::{TypeFoldable, TypeFolder};
+use ty::subst::Kind;
+use ty::{self, CanonicalVar, Lift, Slice, Ty, TyCtxt, TypeFlags};
+
+use rustc_data_structures::fx::FxHashMap;
+use rustc_data_structures::indexed_vec::IndexVec;
+
+impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> {
+    /// Canonicalizes a query value `V`. When we canonicalize a query,
+    /// we not only canonicalize unbound inference variables, but we
+    /// *also* replace all free regions whatsoever. So for example a
+    /// query like `T: Trait<'static>` would be canonicalized to
+    ///
+    /// ```text
+    /// T: Trait<'?0>
+    /// ```
+    ///
+    /// with a mapping M that maps `'?0` to `'static`.
+    ///
+    /// To get a good understanding of what is happening here, check
+    /// out the [chapter in the rustc guide][c].
+    ///
+    /// [c]: https://rust-lang-nursery.github.io/rustc-guide/traits/canonicalization.html#canonicalizing-the-query
+    pub fn canonicalize_query<V>(
+        &self,
+        value: &V,
+    ) -> (Canonicalized<'gcx, V>, CanonicalVarValues<'tcx>)
+    where
+        V: TypeFoldable<'tcx> + Lift<'gcx>,
+    {
+        self.tcx
+            .sess
+            .perf_stats
+            .queries_canonicalized
+            .fetch_add(1, Ordering::Relaxed);
+
+        Canonicalizer::canonicalize(
+            value,
+            Some(self),
+            self.tcx,
+            CanonicalizeRegionMode {
+                static_region: true,
+                other_free_regions: true,
+            },
+        )
+    }
+
+    /// Canonicalizes a query *response* `V`. When we canonicalize a
+    /// query response, we only canonicalize unbound inference
+    /// variables, and we leave other free regions alone. So,
+    /// continuing with the example from `canonicalize_query`, if
+    /// there was an input query `T: Trait<'static>`, it would have
+    /// been canonicalized to
+    ///
+    /// ```text
+    /// T: Trait<'?0>
+    /// ```
+    ///
+    /// with a mapping M that maps `'?0` to `'static`. But if we found that there
+    /// exists only one possible impl of `Trait`, and it looks like
+    ///
+    ///     impl<T> Trait<'static> for T { .. }
+    ///
+    /// then we would prepare a query result R that (among other
+    /// things) includes a mapping to `'?0 := 'static`. When
+    /// canonicalizing this query result R, we would leave this
+    /// reference to `'static` alone.
+    ///
+    /// To get a good understanding of what is happening here, check
+    /// out the [chapter in the rustc guide][c].
+    ///
+    /// [c]: https://rust-lang-nursery.github.io/rustc-guide/traits/canonicalization.html#canonicalizing-the-query-result
+    pub fn canonicalize_response<V>(
+        &self,
+        value: &V,
+    ) -> (Canonicalized<'gcx, V>, CanonicalVarValues<'tcx>)
+    where
+        V: TypeFoldable<'tcx> + Lift<'gcx>,
+    {
+        Canonicalizer::canonicalize(
+            value,
+            Some(self),
+            self.tcx,
+            CanonicalizeRegionMode {
+                static_region: false,
+                other_free_regions: false,
+            },
+        )
+    }
+
+    /// A hacky variant of `canonicalize_query` that does not
+    /// canonicalize `'static`.  Unfortunately, the existing leak
+    /// check treaks `'static` differently in some cases (see also
+    /// #33684), so if we are performing an operation that may need to
+    /// prove "leak-check" related things, we leave `'static`
+    /// alone.
+    ///
+    /// FIXME(#48536) -- once we have universes, we can remove this and just use
+    /// `canonicalize_query`.
+    pub fn canonicalize_hr_query_hack<V>(
+        &self,
+        value: &V,
+    ) -> (Canonicalized<'gcx, V>, CanonicalVarValues<'tcx>)
+    where
+        V: TypeFoldable<'tcx> + Lift<'gcx>,
+    {
+        self.tcx
+            .sess
+            .perf_stats
+            .queries_canonicalized
+            .fetch_add(1, Ordering::Relaxed);
+
+        Canonicalizer::canonicalize(
+            value,
+            Some(self),
+            self.tcx,
+            CanonicalizeRegionMode {
+                static_region: false,
+                other_free_regions: true,
+            },
+        )
+    }
+}
+
+/// If this flag is true, then all free regions will be replaced with
+/// a canonical var. This is used to make queries as generic as
+/// possible. For example, the query `F: Foo<'static>` would be
+/// canonicalized to `F: Foo<'0>`.
+struct CanonicalizeRegionMode {
+    static_region: bool,
+    other_free_regions: bool,
+}
+
+impl CanonicalizeRegionMode {
+    fn any(&self) -> bool {
+        self.static_region || self.other_free_regions
+    }
+}
+
+struct Canonicalizer<'cx, 'gcx: 'tcx, 'tcx: 'cx> {
+    infcx: Option<&'cx InferCtxt<'cx, 'gcx, 'tcx>>,
+    tcx: TyCtxt<'cx, 'gcx, 'tcx>,
+    variables: IndexVec<CanonicalVar, CanonicalVarInfo>,
+    indices: FxHashMap<Kind<'tcx>, CanonicalVar>,
+    var_values: IndexVec<CanonicalVar, Kind<'tcx>>,
+    canonicalize_region_mode: CanonicalizeRegionMode,
+    needs_canonical_flags: TypeFlags,
+}
+
+impl<'cx, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for Canonicalizer<'cx, 'gcx, 'tcx> {
+    fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> {
+        self.tcx
+    }
+
+    fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
+        match *r {
+            ty::ReLateBound(..) => {
+                // leave bound regions alone
+                r
+            }
+
+            ty::ReVar(vid) => {
+                let r = self
+                    .infcx
+                    .unwrap()
+                    .borrow_region_constraints()
+                    .opportunistic_resolve_var(self.tcx, vid);
+                let info = CanonicalVarInfo {
+                    kind: CanonicalVarKind::Region,
+                };
+                debug!(
+                    "canonical: region var found with vid {:?}, \
+                     opportunistically resolved to {:?}",
+                    vid, r
+                );
+                let cvar = self.canonical_var(info, r.into());
+                self.tcx().mk_region(ty::ReCanonical(cvar))
+            }
+
+            ty::ReStatic => {
+                if self.canonicalize_region_mode.static_region {
+                    let info = CanonicalVarInfo {
+                        kind: CanonicalVarKind::Region,
+                    };
+                    let cvar = self.canonical_var(info, r.into());
+                    self.tcx().mk_region(ty::ReCanonical(cvar))
+                } else {
+                    r
+                }
+            }
+
+            ty::ReEarlyBound(..)
+            | ty::ReFree(_)
+            | ty::ReScope(_)
+            | ty::ReSkolemized(..)
+            | ty::ReEmpty
+            | ty::ReErased => {
+                if self.canonicalize_region_mode.other_free_regions {
+                    let info = CanonicalVarInfo {
+                        kind: CanonicalVarKind::Region,
+                    };
+                    let cvar = self.canonical_var(info, r.into());
+                    self.tcx().mk_region(ty::ReCanonical(cvar))
+                } else {
+                    r
+                }
+            }
+
+            ty::ReClosureBound(..) | ty::ReCanonical(_) => {
+                bug!("canonical region encountered during canonicalization")
+            }
+        }
+    }
+
+    fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> {
+        match t.sty {
+            ty::TyInfer(ty::TyVar(_)) => self.canonicalize_ty_var(CanonicalTyVarKind::General, t),
+
+            ty::TyInfer(ty::IntVar(_)) => self.canonicalize_ty_var(CanonicalTyVarKind::Int, t),
+
+            ty::TyInfer(ty::FloatVar(_)) => self.canonicalize_ty_var(CanonicalTyVarKind::Float, t),
+
+            ty::TyInfer(ty::FreshTy(_))
+            | ty::TyInfer(ty::FreshIntTy(_))
+            | ty::TyInfer(ty::FreshFloatTy(_)) => {
+                bug!("encountered a fresh type during canonicalization")
+            }
+
+            ty::TyInfer(ty::CanonicalTy(_)) => {
+                bug!("encountered a canonical type during canonicalization")
+            }
+
+            ty::TyClosure(..)
+            | ty::TyGenerator(..)
+            | ty::TyGeneratorWitness(..)
+            | ty::TyBool
+            | ty::TyChar
+            | ty::TyInt(..)
+            | ty::TyUint(..)
+            | ty::TyFloat(..)
+            | ty::TyAdt(..)
+            | ty::TyStr
+            | ty::TyError
+            | ty::TyArray(..)
+            | ty::TySlice(..)
+            | ty::TyRawPtr(..)
+            | ty::TyRef(..)
+            | ty::TyFnDef(..)
+            | ty::TyFnPtr(_)
+            | ty::TyDynamic(..)
+            | ty::TyNever
+            | ty::TyTuple(..)
+            | ty::TyProjection(..)
+            | ty::TyForeign(..)
+            | ty::TyParam(..)
+            | ty::TyAnon(..) => {
+                if t.flags.intersects(self.needs_canonical_flags) {
+                    t.super_fold_with(self)
+                } else {
+                    t
+                }
+            }
+        }
+    }
+}
+
+impl<'cx, 'gcx, 'tcx> Canonicalizer<'cx, 'gcx, 'tcx> {
+    /// The main `canonicalize` method, shared impl of
+    /// `canonicalize_query` and `canonicalize_response`.
+    fn canonicalize<V>(
+        value: &V,
+        infcx: Option<&'cx InferCtxt<'cx, 'gcx, 'tcx>>,
+        tcx: TyCtxt<'cx, 'gcx, 'tcx>,
+        canonicalize_region_mode: CanonicalizeRegionMode,
+    ) -> (Canonicalized<'gcx, V>, CanonicalVarValues<'tcx>)
+    where
+        V: TypeFoldable<'tcx> + Lift<'gcx>,
+    {
+        debug_assert!(
+            !value.has_type_flags(TypeFlags::HAS_CANONICAL_VARS),
+            "canonicalizing a canonical value: {:?}",
+            value,
+        );
+
+        let needs_canonical_flags = if canonicalize_region_mode.any() {
+            TypeFlags::HAS_FREE_REGIONS | TypeFlags::KEEP_IN_LOCAL_TCX
+        } else {
+            TypeFlags::KEEP_IN_LOCAL_TCX
+        };
+
+        let gcx = tcx.global_tcx();
+
+        // Fast path: nothing that needs to be canonicalized.
+        if !value.has_type_flags(needs_canonical_flags) {
+            let out_value = gcx.lift(value).unwrap();
+            let canon_value = Canonical {
+                variables: Slice::empty(),
+                value: out_value,
+            };
+            let values = CanonicalVarValues {
+                var_values: IndexVec::default(),
+            };
+            return (canon_value, values);
+        }
+
+        let mut canonicalizer = Canonicalizer {
+            infcx,
+            tcx,
+            canonicalize_region_mode,
+            needs_canonical_flags,
+            variables: IndexVec::default(),
+            indices: FxHashMap::default(),
+            var_values: IndexVec::default(),
+        };
+        let out_value = value.fold_with(&mut canonicalizer);
+
+        // Once we have canonicalized `out_value`, it should not
+        // contain anything that ties it to this inference context
+        // anymore, so it should live in the global arena.
+        let out_value = gcx.lift(&out_value).unwrap_or_else(|| {
+            bug!(
+                "failed to lift `{:?}`, canonicalized from `{:?}`",
+                out_value,
+                value
+            )
+        });
+
+        let canonical_variables = tcx.intern_canonical_var_infos(&canonicalizer.variables.raw);
+
+        let canonical_value = Canonical {
+            variables: canonical_variables,
+            value: out_value,
+        };
+        let canonical_var_values = CanonicalVarValues {
+            var_values: canonicalizer.var_values,
+        };
+        (canonical_value, canonical_var_values)
+    }
+
+    /// Creates a canonical variable replacing `kind` from the input,
+    /// or returns an existing variable if `kind` has already been
+    /// seen. `kind` is expected to be an unbound variable (or
+    /// potentially a free region).
+    fn canonical_var(&mut self, info: CanonicalVarInfo, kind: Kind<'tcx>) -> CanonicalVar {
+        let Canonicalizer {
+            indices,
+            variables,
+            var_values,
+            ..
+        } = self;
+
+        indices
+            .entry(kind)
+            .or_insert_with(|| {
+                let cvar1 = variables.push(info);
+                let cvar2 = var_values.push(kind);
+                assert_eq!(cvar1, cvar2);
+                cvar1
+            })
+            .clone()
+    }
+
+    /// Given a type variable `ty_var` of the given kind, first check
+    /// if `ty_var` is bound to anything; if so, canonicalize
+    /// *that*. Otherwise, create a new canonical variable for
+    /// `ty_var`.
+    fn canonicalize_ty_var(&mut self, ty_kind: CanonicalTyVarKind, ty_var: Ty<'tcx>) -> Ty<'tcx> {
+        let infcx = self.infcx.expect("encountered ty-var without infcx");
+        let bound_to = infcx.shallow_resolve(ty_var);
+        if bound_to != ty_var {
+            self.fold_ty(bound_to)
+        } else {
+            let info = CanonicalVarInfo {
+                kind: CanonicalVarKind::Ty(ty_kind),
+            };
+            let cvar = self.canonical_var(info, ty_var.into());
+            self.tcx().mk_infer(ty::InferTy::CanonicalTy(cvar))
+        }
+    }
+}
diff --git a/src/librustc/infer/canonical/mod.rs b/src/librustc/infer/canonical/mod.rs
new file mode 100644 (file)
index 0000000..62424ff
--- /dev/null
@@ -0,0 +1,334 @@
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! **Canonicalization** is the key to constructing a query in the
+//! middle of type inference. Ordinarily, it is not possible to store
+//! types from type inference in query keys, because they contain
+//! references to inference variables whose lifetimes are too short
+//! and so forth. Canonicalizing a value T1 using `canonicalize_query`
+//! produces two things:
+//!
+//! - a value T2 where each unbound inference variable has been
+//!   replaced with a **canonical variable**;
+//! - a map M (of type `CanonicalVarValues`) from those canonical
+//!   variables back to the original.
+//!
+//! We can then do queries using T2. These will give back constriants
+//! on the canonical variables which can be translated, using the map
+//! M, into constraints in our source context. This process of
+//! translating the results back is done by the
+//! `instantiate_query_result` method.
+//!
+//! For a more detailed look at what is happening here, check
+//! out the [chapter in the rustc guide][c].
+//!
+//! [c]: https://rust-lang-nursery.github.io/rustc-guide/traits/canonicalization.html
+
+use infer::{InferCtxt, RegionVariableOrigin, TypeVariableOrigin};
+use rustc_data_structures::indexed_vec::IndexVec;
+use rustc_data_structures::sync::Lrc;
+use serialize::UseSpecializedDecodable;
+use std::ops::Index;
+use syntax::codemap::Span;
+use ty::fold::TypeFoldable;
+use ty::subst::Kind;
+use ty::{self, CanonicalVar, Lift, Region, Slice, TyCtxt};
+
+mod canonicalizer;
+
+pub mod query_result;
+
+mod substitute;
+
+/// A "canonicalized" type `V` is one where all free inference
+/// variables have been rewriten to "canonical vars". These are
+/// numbered starting from 0 in order of first appearance.
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcDecodable, RustcEncodable)]
+pub struct Canonical<'gcx, V> {
+    pub variables: CanonicalVarInfos<'gcx>,
+    pub value: V,
+}
+
+pub type CanonicalVarInfos<'gcx> = &'gcx Slice<CanonicalVarInfo>;
+
+impl<'gcx> UseSpecializedDecodable for CanonicalVarInfos<'gcx> {}
+
+/// A set of values corresponding to the canonical variables from some
+/// `Canonical`. You can give these values to
+/// `canonical_value.substitute` to substitute them into the canonical
+/// value at the right places.
+///
+/// When you canonicalize a value `V`, you get back one of these
+/// vectors with the original values that were replaced by canonical
+/// variables. You will need to supply it later to instantiate the
+/// canonicalized query response.
+#[derive(Clone, Debug, PartialEq, Eq, Hash, RustcDecodable, RustcEncodable)]
+pub struct CanonicalVarValues<'tcx> {
+    pub var_values: IndexVec<CanonicalVar, Kind<'tcx>>,
+}
+
+/// Information about a canonical variable that is included with the
+/// canonical value. This is sufficient information for code to create
+/// a copy of the canonical value in some other inference context,
+/// with fresh inference variables replacing the canonical values.
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcDecodable, RustcEncodable)]
+pub struct CanonicalVarInfo {
+    pub kind: CanonicalVarKind,
+}
+
+/// Describes the "kind" of the canonical variable. This is a "kind"
+/// in the type-theory sense of the term -- i.e., a "meta" type system
+/// that analyzes type-like values.
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcDecodable, RustcEncodable)]
+pub enum CanonicalVarKind {
+    /// Some kind of type inference variable.
+    Ty(CanonicalTyVarKind),
+
+    /// Region variable `'?R`.
+    Region,
+}
+
+/// Rust actually has more than one category of type variables;
+/// notably, the type variables we create for literals (e.g., 22 or
+/// 22.) can only be instantiated with integral/float types (e.g.,
+/// usize or f32). In order to faithfully reproduce a type, we need to
+/// know what set of types a given type variable can be unified with.
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcDecodable, RustcEncodable)]
+pub enum CanonicalTyVarKind {
+    /// General type variable `?T` that can be unified with arbitrary types.
+    General,
+
+    /// Integral type variable `?I` (that can only be unified with integral types).
+    Int,
+
+    /// Floating-point type variable `?F` (that can only be unified with float types).
+    Float,
+}
+
+/// After we execute a query with a canonicalized key, we get back a
+/// `Canonical<QueryResult<..>>`. You can use
+/// `instantiate_query_result` to access the data in this result.
+#[derive(Clone, Debug)]
+pub struct QueryResult<'tcx, R> {
+    pub var_values: CanonicalVarValues<'tcx>,
+    pub region_constraints: Vec<QueryRegionConstraint<'tcx>>,
+    pub certainty: Certainty,
+    pub value: R,
+}
+
+pub type Canonicalized<'gcx, V> = Canonical<'gcx, <V as Lift<'gcx>>::Lifted>;
+
+pub type CanonicalizedQueryResult<'gcx, T> =
+    Lrc<Canonical<'gcx, QueryResult<'gcx, <T as Lift<'gcx>>::Lifted>>>;
+
+/// Indicates whether or not we were able to prove the query to be
+/// true.
+#[derive(Copy, Clone, Debug)]
+pub enum Certainty {
+    /// The query is known to be true, presuming that you apply the
+    /// given `var_values` and the region-constraints are satisfied.
+    Proven,
+
+    /// The query is not known to be true, but also not known to be
+    /// false. The `var_values` represent *either* values that must
+    /// hold in order for the query to be true, or helpful tips that
+    /// *might* make it true. Currently rustc's trait solver cannot
+    /// distinguish the two (e.g., due to our preference for where
+    /// clauses over impls).
+    ///
+    /// After some unifiations and things have been done, it makes
+    /// sense to try and prove again -- of course, at that point, the
+    /// canonical form will be different, making this a distinct
+    /// query.
+    Ambiguous,
+}
+
+impl Certainty {
+    pub fn is_proven(&self) -> bool {
+        match self {
+            Certainty::Proven => true,
+            Certainty::Ambiguous => false,
+        }
+    }
+
+    pub fn is_ambiguous(&self) -> bool {
+        !self.is_proven()
+    }
+}
+
+impl<'tcx, R> QueryResult<'tcx, R> {
+    pub fn is_proven(&self) -> bool {
+        self.certainty.is_proven()
+    }
+
+    pub fn is_ambiguous(&self) -> bool {
+        !self.is_proven()
+    }
+}
+
+impl<'tcx, R> Canonical<'tcx, QueryResult<'tcx, R>> {
+    pub fn is_proven(&self) -> bool {
+        self.value.is_proven()
+    }
+
+    pub fn is_ambiguous(&self) -> bool {
+        !self.is_proven()
+    }
+}
+
+pub type QueryRegionConstraint<'tcx> = ty::Binder<ty::OutlivesPredicate<Kind<'tcx>, Region<'tcx>>>;
+
+impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> {
+    /// Creates a substitution S for the canonical value with fresh
+    /// inference variables and applies it to the canonical value.
+    /// Returns both the instantiated result *and* the substitution S.
+    ///
+    /// This is useful at the start of a query: it basically brings
+    /// the canonical value "into scope" within your new infcx. At the
+    /// end of processing, the substitution S (once canonicalized)
+    /// then represents the values that you computed for each of the
+    /// canonical inputs to your query.
+    pub fn instantiate_canonical_with_fresh_inference_vars<T>(
+        &self,
+        span: Span,
+        canonical: &Canonical<'tcx, T>,
+    ) -> (T, CanonicalVarValues<'tcx>)
+    where
+        T: TypeFoldable<'tcx>,
+    {
+        let canonical_inference_vars =
+            self.fresh_inference_vars_for_canonical_vars(span, canonical.variables);
+        let result = canonical.substitute(self.tcx, &canonical_inference_vars);
+        (result, canonical_inference_vars)
+    }
+
+    /// Given the "infos" about the canonical variables from some
+    /// canonical, creates fresh inference variables with the same
+    /// characteristics. You can then use `substitute` to instantiate
+    /// the canonical variable with these inference variables.
+    fn fresh_inference_vars_for_canonical_vars(
+        &self,
+        span: Span,
+        variables: &Slice<CanonicalVarInfo>,
+    ) -> CanonicalVarValues<'tcx> {
+        let var_values: IndexVec<CanonicalVar, Kind<'tcx>> = variables
+            .iter()
+            .map(|info| self.fresh_inference_var_for_canonical_var(span, *info))
+            .collect();
+
+        CanonicalVarValues { var_values }
+    }
+
+    /// Given the "info" about a canonical variable, creates a fresh
+    /// inference variable with the same characteristics.
+    fn fresh_inference_var_for_canonical_var(
+        &self,
+        span: Span,
+        cv_info: CanonicalVarInfo,
+    ) -> Kind<'tcx> {
+        match cv_info.kind {
+            CanonicalVarKind::Ty(ty_kind) => {
+                let ty = match ty_kind {
+                    CanonicalTyVarKind::General => {
+                        self.next_ty_var(TypeVariableOrigin::MiscVariable(span))
+                    }
+
+                    CanonicalTyVarKind::Int => self.tcx.mk_int_var(self.next_int_var_id()),
+
+                    CanonicalTyVarKind::Float => self.tcx.mk_float_var(self.next_float_var_id()),
+                };
+                ty.into()
+            }
+
+            CanonicalVarKind::Region => self
+                .next_region_var(RegionVariableOrigin::MiscVariable(span))
+                .into(),
+        }
+    }
+}
+
+CloneTypeFoldableAndLiftImpls! {
+    ::infer::canonical::Certainty,
+    ::infer::canonical::CanonicalVarInfo,
+    ::infer::canonical::CanonicalVarKind,
+}
+
+CloneTypeFoldableImpls! {
+    for <'tcx> {
+        ::infer::canonical::CanonicalVarInfos<'tcx>,
+    }
+}
+
+BraceStructTypeFoldableImpl! {
+    impl<'tcx, C> TypeFoldable<'tcx> for Canonical<'tcx, C> {
+        variables,
+        value,
+    } where C: TypeFoldable<'tcx>
+}
+
+BraceStructLiftImpl! {
+    impl<'a, 'tcx, T> Lift<'tcx> for Canonical<'a, T> {
+        type Lifted = Canonical<'tcx, T::Lifted>;
+        variables, value
+    } where T: Lift<'tcx>
+}
+
+impl<'tcx> CanonicalVarValues<'tcx> {
+    fn iter<'a>(&'a self) -> impl Iterator<Item = Kind<'tcx>> + 'a {
+        self.var_values.iter().cloned()
+    }
+
+    fn len(&self) -> usize {
+        self.var_values.len()
+    }
+}
+
+impl<'a, 'tcx> IntoIterator for &'a CanonicalVarValues<'tcx> {
+    type Item = Kind<'tcx>;
+    type IntoIter = ::std::iter::Cloned<::std::slice::Iter<'a, Kind<'tcx>>>;
+
+    fn into_iter(self) -> Self::IntoIter {
+        self.var_values.iter().cloned()
+    }
+}
+
+BraceStructLiftImpl! {
+    impl<'a, 'tcx> Lift<'tcx> for CanonicalVarValues<'a> {
+        type Lifted = CanonicalVarValues<'tcx>;
+        var_values,
+    }
+}
+
+BraceStructTypeFoldableImpl! {
+    impl<'tcx> TypeFoldable<'tcx> for CanonicalVarValues<'tcx> {
+        var_values,
+    }
+}
+
+BraceStructTypeFoldableImpl! {
+    impl<'tcx, R> TypeFoldable<'tcx> for QueryResult<'tcx, R> {
+        var_values, region_constraints, certainty, value
+    } where R: TypeFoldable<'tcx>,
+}
+
+BraceStructLiftImpl! {
+    impl<'a, 'tcx, R> Lift<'tcx> for QueryResult<'a, R> {
+        type Lifted = QueryResult<'tcx, R::Lifted>;
+        var_values, region_constraints, certainty, value
+    } where R: Lift<'tcx>
+}
+
+impl<'tcx> Index<CanonicalVar> for CanonicalVarValues<'tcx> {
+    type Output = Kind<'tcx>;
+
+    fn index(&self, value: CanonicalVar) -> &Kind<'tcx> {
+        &self.var_values[value]
+    }
+}
diff --git a/src/librustc/infer/canonical/query_result.rs b/src/librustc/infer/canonical/query_result.rs
new file mode 100644 (file)
index 0000000..b8b13e0
--- /dev/null
@@ -0,0 +1,605 @@
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! This module contains the code to instantiate a "query result", and
+//! in particular to extract out the resulting region obligations and
+//! encode them therein.
+//!
+//! For an overview of what canonicaliation is and how it fits into
+//! rustc, check out the [chapter in the rustc guide][c].
+//!
+//! [c]: https://rust-lang-nursery.github.io/rustc-guide/traits/canonicalization.html
+
+use infer::canonical::substitute::substitute_value;
+use infer::canonical::{Canonical, CanonicalVarKind, CanonicalVarValues, CanonicalizedQueryResult,
+                       Certainty, QueryRegionConstraint, QueryResult};
+use infer::region_constraints::{Constraint, RegionConstraintData};
+use infer::InferCtxtBuilder;
+use infer::{InferCtxt, InferOk, InferResult, RegionObligation};
+use rustc_data_structures::indexed_vec::Idx;
+use rustc_data_structures::indexed_vec::IndexVec;
+use rustc_data_structures::sync::Lrc;
+use std::fmt::Debug;
+use syntax::ast;
+use syntax_pos::DUMMY_SP;
+use traits::query::{Fallible, NoSolution};
+use traits::{FulfillmentContext, TraitEngine};
+use traits::{Obligation, ObligationCause, PredicateObligation};
+use ty::fold::TypeFoldable;
+use ty::subst::{Kind, UnpackedKind};
+use ty::{self, CanonicalVar, Lift, TyCtxt};
+
+impl<'cx, 'gcx, 'tcx> InferCtxtBuilder<'cx, 'gcx, 'tcx> {
+    /// The "main method" for a canonicalized trait query. Given the
+    /// canonical key `canonical_key`, this method will create a new
+    /// inference context, instantiate the key, and run your operation
+    /// `op`. The operation should yield up a result (of type `R`) as
+    /// well as a set of trait obligations that must be fully
+    /// satisfied. These obligations will be processed and the
+    /// canonical result created.
+    ///
+    /// Returns `NoSolution` in the event of any error.
+    ///
+    /// (It might be mildly nicer to implement this on `TyCtxt`, and
+    /// not `InferCtxtBuilder`, but that is a bit tricky right now.
+    /// In part because we would need a `for<'gcx: 'tcx>` sort of
+    /// bound for the closure and in part because it is convenient to
+    /// have `'tcx` be free on this function so that we can talk about
+    /// `K: TypeFoldable<'tcx>`.)
+    pub fn enter_canonical_trait_query<K, R>(
+        &'tcx mut self,
+        canonical_key: &Canonical<'tcx, K>,
+        operation: impl FnOnce(&InferCtxt<'_, 'gcx, 'tcx>, &mut FulfillmentContext<'tcx>, K)
+            -> Fallible<R>,
+    ) -> Fallible<CanonicalizedQueryResult<'gcx, R>>
+    where
+        K: TypeFoldable<'tcx>,
+        R: Debug + Lift<'gcx> + TypeFoldable<'tcx>,
+    {
+        self.enter(|ref infcx| {
+            let (key, canonical_inference_vars) =
+                infcx.instantiate_canonical_with_fresh_inference_vars(DUMMY_SP, &canonical_key);
+            let fulfill_cx = &mut FulfillmentContext::new();
+            let value = operation(infcx, fulfill_cx, key)?;
+            infcx.make_canonicalized_query_result(canonical_inference_vars, value, fulfill_cx)
+        })
+    }
+}
+
+impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> {
+    /// This method is meant to be invoked as the final step of a canonical query
+    /// implementation. It is given:
+    ///
+    /// - the instantiated variables `inference_vars` created from the query key
+    /// - the result `answer` of the query
+    /// - a fulfillment context `fulfill_cx` that may contain various obligations which
+    ///   have yet to be proven.
+    ///
+    /// Given this, the function will process the obligations pending
+    /// in `fulfill_cx`:
+    ///
+    /// - If all the obligations can be proven successfully, it will
+    ///   package up any resulting region obligations (extracted from
+    ///   `infcx`) along with the fully resolved value `answer` into a
+    ///   query result (which is then itself canonicalized).
+    /// - If some obligations can be neither proven nor disproven, then
+    ///   the same thing happens, but the resulting query is marked as ambiguous.
+    /// - Finally, if any of the obligations result in a hard error,
+    ///   then `Err(NoSolution)` is returned.
+    pub fn make_canonicalized_query_result<T>(
+        &self,
+        inference_vars: CanonicalVarValues<'tcx>,
+        answer: T,
+        fulfill_cx: &mut FulfillmentContext<'tcx>,
+    ) -> Fallible<CanonicalizedQueryResult<'gcx, T>>
+    where
+        T: Debug + Lift<'gcx> + TypeFoldable<'tcx>,
+    {
+        let query_result = self.make_query_result(inference_vars, answer, fulfill_cx)?;
+        let (canonical_result, _) = self.canonicalize_response(&query_result);
+
+        debug!(
+            "make_canonicalized_query_result: canonical_result = {:#?}",
+            canonical_result
+        );
+
+        Ok(Lrc::new(canonical_result))
+    }
+
+    /// Helper for `make_canonicalized_query_result` that does
+    /// everything up until the final canonicalization.
+    fn make_query_result<T>(
+        &self,
+        inference_vars: CanonicalVarValues<'tcx>,
+        answer: T,
+        fulfill_cx: &mut FulfillmentContext<'tcx>,
+    ) -> Result<QueryResult<'tcx, T>, NoSolution>
+    where
+        T: Debug + TypeFoldable<'tcx> + Lift<'gcx>,
+    {
+        let tcx = self.tcx;
+
+        debug!(
+            "make_query_result(\
+             inference_vars={:?}, \
+             answer={:?})",
+            inference_vars, answer,
+        );
+
+        // Select everything, returning errors.
+        let true_errors = match fulfill_cx.select_where_possible(self) {
+            Ok(()) => vec![],
+            Err(errors) => errors,
+        };
+        debug!("true_errors = {:#?}", true_errors);
+
+        if !true_errors.is_empty() {
+            // FIXME -- we don't indicate *why* we failed to solve
+            debug!("make_query_result: true_errors={:#?}", true_errors);
+            return Err(NoSolution);
+        }
+
+        // Anything left unselected *now* must be an ambiguity.
+        let ambig_errors = match fulfill_cx.select_all_or_error(self) {
+            Ok(()) => vec![],
+            Err(errors) => errors,
+        };
+        debug!("ambig_errors = {:#?}", ambig_errors);
+
+        let region_obligations = self.take_registered_region_obligations();
+        let region_constraints = self.with_region_constraints(|region_constraints| {
+            make_query_outlives(tcx, region_obligations, region_constraints)
+        });
+
+        let certainty = if ambig_errors.is_empty() {
+            Certainty::Proven
+        } else {
+            Certainty::Ambiguous
+        };
+
+        Ok(QueryResult {
+            var_values: inference_vars,
+            region_constraints,
+            certainty,
+            value: answer,
+        })
+    }
+
+    /// Given the (canonicalized) result to a canonical query,
+    /// instantiates the result so it can be used, plugging in the
+    /// values from the canonical query. (Note that the result may
+    /// have been ambiguous; you should check the certainty level of
+    /// the query before applying this function.)
+    ///
+    /// To get a good understanding of what is happening here, check
+    /// out the [chapter in the rustc guide][c].
+    ///
+    /// [c]: https://rust-lang-nursery.github.io/rustc-guide/traits/canonicalization.html#processing-the-canonicalized-query-result
+    pub fn instantiate_query_result_and_region_obligations<R>(
+        &self,
+        cause: &ObligationCause<'tcx>,
+        param_env: ty::ParamEnv<'tcx>,
+        original_values: &CanonicalVarValues<'tcx>,
+        query_result: &Canonical<'tcx, QueryResult<'tcx, R>>,
+    ) -> InferResult<'tcx, R>
+    where
+        R: Debug + TypeFoldable<'tcx>,
+    {
+        let InferOk {
+            value: result_subst,
+            mut obligations,
+        } = self.query_result_substitution(cause, param_env, original_values, query_result)?;
+
+        obligations.extend(self.query_region_constraints_into_obligations(
+            cause,
+            param_env,
+            &query_result.value.region_constraints,
+            &result_subst,
+        ));
+
+        let user_result: R =
+            query_result.substitute_projected(self.tcx, &result_subst, |q_r| &q_r.value);
+
+        Ok(InferOk {
+            value: user_result,
+            obligations,
+        })
+    }
+
+    /// An alternative to
+    /// `instantiate_query_result_and_region_obligations` that is more
+    /// efficient for NLL. NLL is a bit more advanced in the
+    /// "transition to chalk" than the rest of the compiler. During
+    /// the NLL type check, all of the "processing" of types and
+    /// things happens in queries -- the NLL checker itself is only
+    /// interested in the region obligations (`'a: 'b` or `T: 'b`)
+    /// that come out of these queries, which it wants to convert into
+    /// MIR-based constraints and solve. Therefore, it is most
+    /// convenient for the NLL Type Checker to **directly consume**
+    /// the `QueryRegionConstraint` values that arise from doing a
+    /// query. This is contrast to other parts of the compiler, which
+    /// would prefer for those `QueryRegionConstraint` to be converted
+    /// into the older infcx-style constraints (e.g., calls to
+    /// `sub_regions` or `register_region_obligation`).
+    ///
+    /// Therefore, `instantiate_nll_query_result_and_region_obligations` performs the same
+    /// basic operations as `instantiate_query_result_and_region_obligations` but
+    /// it returns its result differently:
+    ///
+    /// - It creates a substitution `S` that maps from the original
+    ///   query variables to the values computed in the query
+    ///   result. If any errors arise, they are propagated back as an
+    ///   `Err` result.
+    /// - In the case of a successful substitution, we will append
+    ///   `QueryRegionConstraint` values onto the
+    ///   `output_query_region_constraints` vector for the solver to
+    ///   use (if an error arises, some values may also be pushed, but
+    ///   they should be ignored).
+    /// - It **can happen** (though it rarely does currently) that
+    ///   equating types and things will give rise to subobligations
+    ///   that must be processed.  In this case, those subobligations
+    ///   are propagated back in the return value.
+    /// - Finally, the query result (of type `R`) is propagated back,
+    ///   after applying the substitution `S`.
+    pub fn instantiate_nll_query_result_and_region_obligations<R>(
+        &self,
+        cause: &ObligationCause<'tcx>,
+        param_env: ty::ParamEnv<'tcx>,
+        original_values: &CanonicalVarValues<'tcx>,
+        query_result: &Canonical<'tcx, QueryResult<'tcx, R>>,
+        output_query_region_constraints: &mut Vec<QueryRegionConstraint<'tcx>>,
+    ) -> InferResult<'tcx, R>
+    where
+        R: Debug + TypeFoldable<'tcx>,
+    {
+        // In an NLL query, there should be no type variables in the
+        // query, only region variables.
+        debug_assert!(query_result.variables.iter().all(|v| match v.kind {
+            CanonicalVarKind::Ty(_) => false,
+            CanonicalVarKind::Region => true,
+        }));
+
+        let result_subst =
+            self.query_result_substitution_guess(cause, original_values, query_result);
+
+        // Compute `QueryRegionConstraint` values that unify each of
+        // the original values `v_o` that was canonicalized into a
+        // variable...
+        let mut obligations = vec![];
+
+        for (index, original_value) in original_values.var_values.iter_enumerated() {
+            // ...with the value `v_r` of that variable from the query.
+            let result_value = query_result
+                .substitute_projected(self.tcx, &result_subst, |v| &v.var_values[index]);
+            match (original_value.unpack(), result_value.unpack()) {
+                (UnpackedKind::Lifetime(ty::ReErased), UnpackedKind::Lifetime(ty::ReErased)) => {
+                    // no action needed
+                }
+
+                (UnpackedKind::Lifetime(v_o), UnpackedKind::Lifetime(v_r)) => {
+                    // To make `v_o = v_r`, we emit `v_o: v_r` and `v_r: v_o`.
+                    if v_o != v_r {
+                        output_query_region_constraints
+                            .push(ty::Binder::dummy(ty::OutlivesPredicate(v_o.into(), v_r)));
+                        output_query_region_constraints
+                            .push(ty::Binder::dummy(ty::OutlivesPredicate(v_r.into(), v_o)));
+                    }
+                }
+
+                (UnpackedKind::Type(v1), UnpackedKind::Type(v2)) => {
+                    let ok = self.at(cause, param_env).eq(v1, v2)?;
+                    obligations.extend(ok.into_obligations());
+                }
+
+                _ => {
+                    bug!(
+                        "kind mismatch, cannot unify {:?} and {:?}",
+                        original_value,
+                        result_value
+                    );
+                }
+            }
+        }
+
+        // ...also include the other query region constraints from the query.
+        output_query_region_constraints.reserve(query_result.value.region_constraints.len());
+        for r_c in query_result.value.region_constraints.iter() {
+            output_query_region_constraints.push(r_c.map_bound(|ty::OutlivesPredicate(k1, r2)| {
+                let k1 = substitute_value(self.tcx, &result_subst, &k1);
+                let r2 = substitute_value(self.tcx, &result_subst, &r2);
+                ty::OutlivesPredicate(k1, r2)
+            }));
+        }
+
+        let user_result: R =
+            query_result.substitute_projected(self.tcx, &result_subst, |q_r| &q_r.value);
+
+        Ok(InferOk {
+            value: user_result,
+            obligations,
+        })
+    }
+
+    /// Given the original values and the (canonicalized) result from
+    /// computing a query, returns a substitution that can be applied
+    /// to the query result to convert the result back into the
+    /// original namespace.
+    ///
+    /// The substitution also comes accompanied with subobligations
+    /// that arose from unification; these might occur if (for
+    /// example) we are doing lazy normalization and the value
+    /// assigned to a type variable is unified with an unnormalized
+    /// projection.
+    fn query_result_substitution<R>(
+        &self,
+        cause: &ObligationCause<'tcx>,
+        param_env: ty::ParamEnv<'tcx>,
+        original_values: &CanonicalVarValues<'tcx>,
+        query_result: &Canonical<'tcx, QueryResult<'tcx, R>>,
+    ) -> InferResult<'tcx, CanonicalVarValues<'tcx>>
+    where
+        R: Debug + TypeFoldable<'tcx>,
+    {
+        debug!(
+            "query_result_substitution(original_values={:#?}, query_result={:#?})",
+            original_values, query_result,
+        );
+
+        let result_subst =
+            self.query_result_substitution_guess(cause, original_values, query_result);
+
+        let obligations = self.unify_query_result_substitution_guess(
+            cause,
+            param_env,
+            original_values,
+            &result_subst,
+            query_result,
+        )?
+            .into_obligations();
+
+        Ok(InferOk {
+            value: result_subst,
+            obligations,
+        })
+    }
+
+    /// Given the original values and the (canonicalized) result from
+    /// computing a query, returns a **guess** at a substitution that
+    /// can be applied to the query result to convert the result back
+    /// into the original namespace. This is called a **guess**
+    /// because it uses a quick heuristic to find the values for each
+    /// canonical variable; if that quick heuristic fails, then we
+    /// will instantiate fresh inference variables for each canonical
+    /// variable instead. Therefore, the result of this method must be
+    /// properly unified
+    fn query_result_substitution_guess<R>(
+        &self,
+        cause: &ObligationCause<'tcx>,
+        original_values: &CanonicalVarValues<'tcx>,
+        query_result: &Canonical<'tcx, QueryResult<'tcx, R>>,
+    ) -> CanonicalVarValues<'tcx>
+    where
+        R: Debug + TypeFoldable<'tcx>,
+    {
+        debug!(
+            "query_result_substitution_guess(original_values={:#?}, query_result={:#?})",
+            original_values, query_result,
+        );
+
+        // Every canonical query result includes values for each of
+        // the inputs to the query. Therefore, we begin by unifying
+        // these values with the original inputs that were
+        // canonicalized.
+        let result_values = &query_result.value.var_values;
+        assert_eq!(original_values.len(), result_values.len());
+
+        // Quickly try to find initial values for the canonical
+        // variables in the result in terms of the query. We do this
+        // by iterating down the values that the query gave to each of
+        // the canonical inputs. If we find that one of those values
+        // is directly equal to one of the canonical variables in the
+        // result, then we can type the corresponding value from the
+        // input. See the example above.
+        let mut opt_values: IndexVec<CanonicalVar, Option<Kind<'tcx>>> =
+            IndexVec::from_elem_n(None, query_result.variables.len());
+
+        // In terms of our example above, we are iterating over pairs like:
+        // [(?A, Vec<?0>), ('static, '?1), (?B, ?0)]
+        for (original_value, result_value) in original_values.iter().zip(result_values) {
+            match result_value.unpack() {
+                UnpackedKind::Type(result_value) => {
+                    // e.g., here `result_value` might be `?0` in the example above...
+                    if let ty::TyInfer(ty::InferTy::CanonicalTy(index)) = result_value.sty {
+                        // in which case we would set `canonical_vars[0]` to `Some(?U)`.
+                        opt_values[index] = Some(original_value);
+                    }
+                }
+                UnpackedKind::Lifetime(result_value) => {
+                    // e.g., here `result_value` might be `'?1` in the example above...
+                    if let &ty::RegionKind::ReCanonical(index) = result_value {
+                        // in which case we would set `canonical_vars[0]` to `Some('static)`.
+                        opt_values[index] = Some(original_value);
+                    }
+                }
+            }
+        }
+
+        // Create a result substitution: if we found a value for a
+        // given variable in the loop above, use that. Otherwise, use
+        // a fresh inference variable.
+        let result_subst = CanonicalVarValues {
+            var_values: query_result
+                .variables
+                .iter()
+                .enumerate()
+                .map(|(index, info)| match opt_values[CanonicalVar::new(index)] {
+                    Some(k) => k,
+                    None => self.fresh_inference_var_for_canonical_var(cause.span, *info),
+                })
+                .collect(),
+        };
+
+        result_subst
+    }
+
+    /// Given a "guess" at the values for the canonical variables in
+    /// the input, try to unify with the *actual* values found in the
+    /// query result.  Often, but not always, this is a no-op, because
+    /// we already found the mapping in the "guessing" step.
+    ///
+    /// See also: `query_result_substitution_guess`
+    fn unify_query_result_substitution_guess<R>(
+        &self,
+        cause: &ObligationCause<'tcx>,
+        param_env: ty::ParamEnv<'tcx>,
+        original_values: &CanonicalVarValues<'tcx>,
+        result_subst: &CanonicalVarValues<'tcx>,
+        query_result: &Canonical<'tcx, QueryResult<'tcx, R>>,
+    ) -> InferResult<'tcx, ()>
+    where
+        R: Debug + TypeFoldable<'tcx>,
+    {
+        // A closure that yields the result value for the given
+        // canonical variable; this is taken from
+        // `query_result.var_values` after applying the substitution
+        // `result_subst`.
+        let substituted_query_result = |index: CanonicalVar| -> Kind<'tcx> {
+            query_result.substitute_projected(self.tcx, &result_subst, |v| &v.var_values[index])
+        };
+
+        // Unify the original value for each variable with the value
+        // taken from `query_result` (after applying `result_subst`).
+        Ok(self.unify_canonical_vars(cause, param_env, original_values, substituted_query_result)?)
+    }
+
+    /// Converts the region constraints resulting from a query into an
+    /// iterator of obligations.
+    fn query_region_constraints_into_obligations<'a>(
+        &'a self,
+        cause: &'a ObligationCause<'tcx>,
+        param_env: ty::ParamEnv<'tcx>,
+        unsubstituted_region_constraints: &'a [QueryRegionConstraint<'tcx>],
+        result_subst: &'a CanonicalVarValues<'tcx>,
+    ) -> impl Iterator<Item = PredicateObligation<'tcx>> + 'a {
+        Box::new(
+            unsubstituted_region_constraints
+                .iter()
+                .map(move |constraint| {
+                    let ty::OutlivesPredicate(k1, r2) = constraint.skip_binder(); // restored below
+                    let k1 = substitute_value(self.tcx, result_subst, k1);
+                    let r2 = substitute_value(self.tcx, result_subst, r2);
+                    match k1.unpack() {
+                        UnpackedKind::Lifetime(r1) => Obligation::new(
+                            cause.clone(),
+                            param_env,
+                            ty::Predicate::RegionOutlives(ty::Binder::dummy(
+                                ty::OutlivesPredicate(r1, r2),
+                            )),
+                        ),
+
+                        UnpackedKind::Type(t1) => Obligation::new(
+                            cause.clone(),
+                            param_env,
+                            ty::Predicate::TypeOutlives(ty::Binder::dummy(ty::OutlivesPredicate(
+                                t1, r2,
+                            ))),
+                        ),
+                    }
+                }),
+        ) as Box<dyn Iterator<Item = _>>
+    }
+
+    /// Given two sets of values for the same set of canonical variables, unify them.
+    /// The second set is produced lazilly by supplying indices from the first set.
+    fn unify_canonical_vars(
+        &self,
+        cause: &ObligationCause<'tcx>,
+        param_env: ty::ParamEnv<'tcx>,
+        variables1: &CanonicalVarValues<'tcx>,
+        variables2: impl Fn(CanonicalVar) -> Kind<'tcx>,
+    ) -> InferResult<'tcx, ()> {
+        self.commit_if_ok(|_| {
+            let mut obligations = vec![];
+            for (index, value1) in variables1.var_values.iter_enumerated() {
+                let value2 = variables2(index);
+
+                match (value1.unpack(), value2.unpack()) {
+                    (UnpackedKind::Type(v1), UnpackedKind::Type(v2)) => {
+                        obligations
+                            .extend(self.at(cause, param_env).eq(v1, v2)?.into_obligations());
+                    }
+                    (
+                        UnpackedKind::Lifetime(ty::ReErased),
+                        UnpackedKind::Lifetime(ty::ReErased),
+                    ) => {
+                        // no action needed
+                    }
+                    (UnpackedKind::Lifetime(v1), UnpackedKind::Lifetime(v2)) => {
+                        obligations
+                            .extend(self.at(cause, param_env).eq(v1, v2)?.into_obligations());
+                    }
+                    _ => {
+                        bug!("kind mismatch, cannot unify {:?} and {:?}", value1, value2,);
+                    }
+                }
+            }
+            Ok(InferOk {
+                value: (),
+                obligations,
+            })
+        })
+    }
+}
+
+/// Given the region obligations and constraints scraped from the infcx,
+/// creates query region constraints.
+pub fn make_query_outlives<'tcx>(
+    tcx: TyCtxt<'_, '_, 'tcx>,
+    region_obligations: Vec<(ast::NodeId, RegionObligation<'tcx>)>,
+    region_constraints: &RegionConstraintData<'tcx>,
+) -> Vec<QueryRegionConstraint<'tcx>> {
+    let RegionConstraintData {
+        constraints,
+        verifys,
+        givens,
+    } = region_constraints;
+
+    assert!(verifys.is_empty());
+    assert!(givens.is_empty());
+
+    let mut outlives: Vec<_> = constraints
+            .into_iter()
+            .map(|(k, _)| match *k {
+                // Swap regions because we are going from sub (<=) to outlives
+                // (>=).
+                Constraint::VarSubVar(v1, v2) => ty::OutlivesPredicate(
+                    tcx.mk_region(ty::ReVar(v2)).into(),
+                    tcx.mk_region(ty::ReVar(v1)),
+                ),
+                Constraint::VarSubReg(v1, r2) => {
+                    ty::OutlivesPredicate(r2.into(), tcx.mk_region(ty::ReVar(v1)))
+                }
+                Constraint::RegSubVar(r1, v2) => {
+                    ty::OutlivesPredicate(tcx.mk_region(ty::ReVar(v2)).into(), r1)
+                }
+                Constraint::RegSubReg(r1, r2) => ty::OutlivesPredicate(r2.into(), r1),
+            })
+            .map(ty::Binder::dummy) // no bound regions in the code above
+            .collect();
+
+    outlives.extend(
+        region_obligations
+            .into_iter()
+            .map(|(_, r_o)| ty::OutlivesPredicate(r_o.sup_type.into(), r_o.sub_region))
+            .map(ty::Binder::dummy), // no bound regions in the code above
+    );
+
+    outlives
+}
diff --git a/src/librustc/infer/canonical/substitute.rs b/src/librustc/infer/canonical/substitute.rs
new file mode 100644 (file)
index 0000000..5bc1ae6
--- /dev/null
@@ -0,0 +1,113 @@
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! This module contains code to substitute new values into a
+//! `Canonical<'tcx, T>`.
+//!
+//! For an overview of what canonicaliation is and how it fits into
+//! rustc, check out the [chapter in the rustc guide][c].
+//!
+//! [c]: https://rust-lang-nursery.github.io/rustc-guide/traits/canonicalization.html
+
+use infer::canonical::{Canonical, CanonicalVarValues};
+use ty::fold::{TypeFoldable, TypeFolder};
+use ty::subst::UnpackedKind;
+use ty::{self, Ty, TyCtxt, TypeFlags};
+
+impl<'tcx, V> Canonical<'tcx, V> {
+    /// Instantiate the wrapped value, replacing each canonical value
+    /// with the value given in `var_values`.
+    pub fn substitute(&self, tcx: TyCtxt<'_, '_, 'tcx>, var_values: &CanonicalVarValues<'tcx>) -> V
+    where
+        V: TypeFoldable<'tcx>,
+    {
+        self.substitute_projected(tcx, var_values, |value| value)
+    }
+
+    /// Allows one to apply a substitute to some subset of
+    /// `self.value`. Invoke `projection_fn` with `self.value` to get
+    /// a value V that is expressed in terms of the same canonical
+    /// variables bound in `self` (usually this extracts from subset
+    /// of `self`). Apply the substitution `var_values` to this value
+    /// V, replacing each of the canonical variables.
+    pub fn substitute_projected<T>(
+        &self,
+        tcx: TyCtxt<'_, '_, 'tcx>,
+        var_values: &CanonicalVarValues<'tcx>,
+        projection_fn: impl FnOnce(&V) -> &T,
+    ) -> T
+    where
+        T: TypeFoldable<'tcx>,
+    {
+        assert_eq!(self.variables.len(), var_values.var_values.len());
+        let value = projection_fn(&self.value);
+        substitute_value(tcx, var_values, value)
+    }
+}
+
+/// Substitute the values from `var_values` into `value`. `var_values`
+/// must be values for the set of canonical variables that appear in
+/// `value`.
+pub(super) fn substitute_value<'a, 'tcx, T>(
+    tcx: TyCtxt<'_, '_, 'tcx>,
+    var_values: &CanonicalVarValues<'tcx>,
+    value: &'a T,
+) -> T
+where
+    T: TypeFoldable<'tcx>,
+{
+    if var_values.var_values.is_empty() {
+        debug_assert!(!value.has_type_flags(TypeFlags::HAS_CANONICAL_VARS));
+        value.clone()
+    } else if !value.has_type_flags(TypeFlags::HAS_CANONICAL_VARS) {
+        value.clone()
+    } else {
+        value.fold_with(&mut CanonicalVarValuesSubst { tcx, var_values })
+    }
+}
+
+struct CanonicalVarValuesSubst<'cx, 'gcx: 'tcx, 'tcx: 'cx> {
+    tcx: TyCtxt<'cx, 'gcx, 'tcx>,
+    var_values: &'cx CanonicalVarValues<'tcx>,
+}
+
+impl<'cx, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for CanonicalVarValuesSubst<'cx, 'gcx, 'tcx> {
+    fn tcx(&self) -> TyCtxt<'_, 'gcx, 'tcx> {
+        self.tcx
+    }
+
+    fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> {
+        match t.sty {
+            ty::TyInfer(ty::InferTy::CanonicalTy(c)) => {
+                match self.var_values.var_values[c].unpack() {
+                    UnpackedKind::Type(ty) => ty,
+                    r => bug!("{:?} is a type but value is {:?}", c, r),
+                }
+            }
+            _ => {
+                if !t.has_type_flags(TypeFlags::HAS_CANONICAL_VARS) {
+                    t
+                } else {
+                    t.super_fold_with(self)
+                }
+            }
+        }
+    }
+
+    fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
+        match r {
+            ty::RegionKind::ReCanonical(c) => match self.var_values.var_values[*c].unpack() {
+                UnpackedKind::Lifetime(l) => l,
+                r => bug!("{:?} is a region but value is {:?}", c, r),
+            },
+            _ => r.super_fold_with(self),
+        }
+    }
+}
index 4d6f2fb41b05d7e5c5fa5512071b9aa2f9e0eacc..482af9c005f32e42f39759d166b081daf5d65e4a 100644 (file)
@@ -1255,7 +1255,7 @@ fn report_inference_failure(
             infer::LateBoundRegion(_, br, infer::AssocTypeProjection(def_id)) => format!(
                 " for lifetime parameter {}in trait containing associated type `{}`",
                 br_string(br),
-                self.tcx.associated_item(def_id).name
+                self.tcx.associated_item(def_id).ident
             ),
             infer::EarlyBoundRegion(_, name) => format!(" for lifetime parameter `{}`", name),
             infer::BoundRegionInCoherence(name) => {
index 7352c14490d1aaeaaf3f35551447827a80c9e97a..773de8912ce9b54d22fcf903da17685500225c16 100644 (file)
@@ -131,8 +131,8 @@ pub fn need_type_info_err(&self,
             labels.clear();
             labels.push((pattern.span, format!("consider giving this closure parameter a type")));
         } else if let Some(pattern) = local_visitor.found_local_pattern {
-            if let Some(simple_name) = pattern.simple_name() {
-                labels.push((pattern.span, format!("consider giving `{}` a type", simple_name)));
+            if let Some(simple_ident) = pattern.simple_ident() {
+                labels.push((pattern.span, format!("consider giving `{}` a type", simple_ident)));
             } else {
                 labels.push((pattern.span, format!("consider giving the pattern a type")));
             }
index 7b0f2933580b39429b2354fb25c0fd2fb1f49741..a2b8947521021ff03b810766bbdb0da8f6b1047b 100644 (file)
@@ -96,14 +96,14 @@ pub(super) fn try_report_anon_anon_conflict(&self) -> Option<ErrorReported> {
         let sub_is_ret_type =
             self.is_return_type_anon(scope_def_id_sub, bregion_sub, ty_fndecl_sub);
 
-        let span_label_var1 = if let Some(simple_name) = anon_arg_sup.pat.simple_name() {
-            format!(" from `{}`", simple_name)
+        let span_label_var1 = if let Some(simple_ident) = anon_arg_sup.pat.simple_ident() {
+            format!(" from `{}`", simple_ident)
         } else {
             format!("")
         };
 
-        let span_label_var2 = if let Some(simple_name) = anon_arg_sub.pat.simple_name() {
-            format!(" into `{}`", simple_name)
+        let span_label_var2 = if let Some(simple_ident) = anon_arg_sub.pat.simple_ident() {
+            format!(" into `{}`", simple_ident)
         } else {
             format!("")
         };
index 59b36a50a2b09d2dcb5c774bec1d1f8bb7581191..f50c23b0aa75209e498c70f3d2df5ddb2d546a9b 100644 (file)
@@ -19,6 +19,7 @@
 mod find_anon_type;
 mod named_anon_conflict;
 mod outlives_closure;
+mod static_impl_trait;
 mod util;
 
 impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> {
@@ -67,6 +68,7 @@ pub fn try_report(&self) -> Option<ErrorReported> {
         self.try_report_named_anon_conflict()
             .or_else(|| self.try_report_anon_anon_conflict())
             .or_else(|| self.try_report_outlives_closure())
+            .or_else(|| self.try_report_static_impl_trait())
     }
 
     pub fn get_regions(&self) -> (Span, ty::Region<'tcx>, ty::Region<'tcx>) {
index c106fd0c3d2c0b3c95d483d5bfdd4294272d72a1..51abfa2505ab5cf49943000072e1493d6f50faad 100644 (file)
@@ -95,10 +95,10 @@ pub(super) fn try_report_named_anon_conflict(&self) -> Option<ErrorReported> {
             }
         }
 
-        let (error_var, span_label_var) = if let Some(simple_name) = arg.pat.simple_name() {
+        let (error_var, span_label_var) = if let Some(simple_ident) = arg.pat.simple_ident() {
             (
-                format!("the type of `{}`", simple_name),
-                format!("the type of `{}`", simple_name),
+                format!("the type of `{}`", simple_ident),
+                format!("the type of `{}`", simple_ident),
             )
         } else {
             ("parameter type".to_owned(), "type".to_owned())
diff --git a/src/librustc/infer/error_reporting/nice_region_error/static_impl_trait.rs b/src/librustc/infer/error_reporting/nice_region_error/static_impl_trait.rs
new file mode 100644 (file)
index 0000000..f9ec5fa
--- /dev/null
@@ -0,0 +1,83 @@
+// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Error Reporting for static impl Traits.
+
+use infer::error_reporting::nice_region_error::NiceRegionError;
+use infer::lexical_region_resolve::RegionResolutionError;
+use ty::{BoundRegion, FreeRegion, RegionKind};
+use util::common::ErrorReported;
+
+impl<'a, 'gcx, 'tcx> NiceRegionError<'a, 'gcx, 'tcx> {
+    /// Print the error message for lifetime errors when the return type is a static impl Trait.
+    pub(super) fn try_report_static_impl_trait(&self) -> Option<ErrorReported> {
+        if let Some(ref error) = self.error {
+            match error.clone() {
+                RegionResolutionError::SubSupConflict(
+                    var_origin,
+                    sub_origin,
+                    sub_r,
+                    sup_origin,
+                    sup_r,
+                ) => {
+                    let anon_reg_sup = self.is_suitable_region(sup_r)?;
+                    if sub_r == &RegionKind::ReStatic &&
+                        self.is_return_type_impl_trait(anon_reg_sup.def_id)
+                    {
+                        let sp = var_origin.span();
+                        let return_sp = sub_origin.span();
+                        let mut err = self.tcx.sess.struct_span_err(
+                            sp,
+                            "cannot infer an appropriate lifetime",
+                        );
+                        err.span_label(
+                            return_sp,
+                            "this return type evaluates to the `'static` lifetime...",
+                        );
+                        err.span_label(
+                            sup_origin.span(),
+                            "...but this borrow...",
+                        );
+
+                        let (lifetime, lt_sp_opt) = self.tcx.msg_span_from_free_region(sup_r);
+                        if let Some(lifetime_sp) = lt_sp_opt {
+                            err.span_note(
+                                lifetime_sp,
+                                &format!("...can't outlive {}", lifetime),
+                            );
+                        }
+
+                        let lifetime_name = match sup_r {
+                            RegionKind::ReFree(FreeRegion {
+                                bound_region: BoundRegion::BrNamed(_, ref name), ..
+                            }) => format!("{}", name),
+                            _ => "'_".to_owned(),
+                        };
+                        if let Ok(snippet) = self.tcx.sess.codemap().span_to_snippet(return_sp) {
+                            err.span_suggestion(
+                                return_sp,
+                                &format!(
+                                    "you can add a constraint to the return type to make it last \
+                                     less than `'static` and match {}",
+                                    lifetime,
+                                ),
+                                format!("{} + {}", snippet, lifetime_name),
+                            );
+                        }
+                        err.emit();
+                        return Some(ErrorReported);
+                    }
+                }
+                _ => {}
+            }
+        }
+        None
+    }
+}
index 8aadec64554143e08f7a16b4b24337e9b8c08587..1cc2b9d50b99ba1eb57de1f4bd1526558be7c1eb 100644 (file)
@@ -167,6 +167,23 @@ pub(super) fn is_return_type_anon(
         }
         None
     }
+
+    pub(super) fn is_return_type_impl_trait(
+        &self,
+        scope_def_id: DefId,
+    ) -> bool {
+        let ret_ty = self.tcx.type_of(scope_def_id);
+        match ret_ty.sty {
+            ty::TyFnDef(_, _) => {
+                let sig = ret_ty.fn_sig(self.tcx);
+                let output = self.tcx.erase_late_bound_regions(&sig.output());
+                return output.is_impl_trait();
+            }
+            _ => {}
+        }
+        false
+    }
+
     // Here we check for the case where anonymous region
     // corresponds to self and if yes, we display E0312.
     // FIXME(#42700) - Need to format self properly to
index 6b31f869ef9b30796ceb1c091d9406326614e73f..5b5ae6473f84011cdf3215cc4d3a7e8e532d19f1 100644 (file)
@@ -27,7 +27,7 @@
 use ty::error::{ExpectedFound, TypeError, UnconstrainedNumeric};
 use ty::fold::TypeFoldable;
 use ty::relate::RelateResult;
-use traits::{self, ObligationCause, PredicateObligations};
+use traits::{self, ObligationCause, PredicateObligations, TraitEngine};
 use rustc_data_structures::unify as ut;
 use std::cell::{Cell, RefCell, Ref, RefMut};
 use std::collections::BTreeMap;
@@ -485,6 +485,19 @@ impl<'tcx, T> InferOk<'tcx, T> {
     pub fn unit(self) -> InferOk<'tcx, ()> {
         InferOk { value: (), obligations: self.obligations }
     }
+
+    /// Extract `value`, registering any obligations into `fulfill_cx`
+    pub fn into_value_registering_obligations(
+        self,
+        infcx: &InferCtxt<'_, '_, 'tcx>,
+        fulfill_cx: &mut impl TraitEngine<'tcx>,
+    ) -> T {
+        let InferOk { value, obligations } = self;
+        for obligation in obligations {
+            fulfill_cx.register_predicate_obligation(infcx, obligation);
+        }
+        value
+    }
 }
 
 impl<'tcx> InferOk<'tcx, ()> {
index 4bc64acc7630690ddae18eda0394d5bfbe7bd3d0..57abdd18d353cb52a5084480b76726d3ddc2b27d 100644 (file)
@@ -11,7 +11,7 @@
 use infer::InferCtxt;
 use syntax::ast;
 use syntax::codemap::Span;
-use traits::{FulfillmentContext, TraitEngine};
+use traits::{FulfillmentContext, TraitEngine, TraitEngineExt};
 use ty::{self, Ty, TypeFoldable};
 use ty::outlives::Component;
 use ty::wf;
index 6aafebe79c6717de4eeb87cba1b73886c4d8a222..93079b046690c509aa5028d11a462a40f1102722 100644 (file)
@@ -13,4 +13,4 @@
 pub mod env;
 pub mod free_region_map;
 pub mod bounds;
-mod obligations;
+pub mod obligations;
index e5461685bd470bd21a744008698bf75013d6f0ac..07286f1250cd38f4063dce51754e7b205a96aee9 100644 (file)
 
 use hir::def_id::DefId;
 use infer::{self, GenericKind, InferCtxt, RegionObligation, SubregionOrigin, VerifyBound};
+use syntax::ast;
 use traits;
-use ty::{self, Ty, TyCtxt, TypeFoldable};
-use ty::subst::{Subst, Substs};
 use ty::outlives::Component;
-use syntax::ast;
+use ty::subst::{Subst, Substs};
+use ty::{self, Ty, TyCtxt, TypeFoldable};
 
 impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> {
     /// Registers that the given region obligation must be resolved
@@ -90,8 +90,7 @@ pub fn register_region_obligation(
     ) {
         debug!(
             "register_region_obligation(body_id={:?}, obligation={:?})",
-            body_id,
-            obligation
+            body_id, obligation
         );
 
         self.region_obligations
@@ -100,13 +99,8 @@ pub fn register_region_obligation(
     }
 
     /// Trait queries just want to pass back type obligations "as is"
-    pub fn take_registered_region_obligations(
-        &self,
-    ) -> Vec<(ast::NodeId, RegionObligation<'tcx>)> {
-        ::std::mem::replace(
-            &mut *self.region_obligations.borrow_mut(),
-            vec![],
-        )
+    pub fn take_registered_region_obligations(&self) -> Vec<(ast::NodeId, RegionObligation<'tcx>)> {
+        ::std::mem::replace(&mut *self.region_obligations.borrow_mut(), vec![])
     }
 
     /// Process the region obligations that must be proven (during
@@ -165,8 +159,13 @@ pub fn process_registered_region_obligations(
             }
         }
 
-        let outlives =
-            TypeOutlives::new(self, region_bound_pairs, implicit_region_bound, param_env);
+        let outlives = &mut TypeOutlives::new(
+            self,
+            self.tcx,
+            region_bound_pairs,
+            implicit_region_bound,
+            param_env,
+        );
 
         for RegionObligation {
             sup_type,
@@ -176,16 +175,14 @@ pub fn process_registered_region_obligations(
         {
             debug!(
                 "process_registered_region_obligations: sup_type={:?} sub_region={:?} cause={:?}",
-                sup_type,
-                sub_region,
-                cause
+                sup_type, sub_region, cause
             );
 
-            let origin = SubregionOrigin::from_obligation_cause(
-                &cause,
-                || infer::RelateParamBound(cause.span, sup_type),
-            );
+            let origin = SubregionOrigin::from_obligation_cause(&cause, || {
+                infer::RelateParamBound(cause.span, sup_type)
+            });
 
+            let sup_type = self.resolve_type_vars_if_possible(&sup_type);
             outlives.type_must_outlive(origin, sup_type, sub_region);
         }
     }
@@ -201,31 +198,68 @@ pub fn type_must_outlive(
         ty: Ty<'tcx>,
         region: ty::Region<'tcx>,
     ) {
-        let outlives =
-            TypeOutlives::new(self, region_bound_pairs, implicit_region_bound, param_env);
+        let outlives = &mut TypeOutlives::new(
+            self,
+            self.tcx,
+            region_bound_pairs,
+            implicit_region_bound,
+            param_env,
+        );
+        let ty = self.resolve_type_vars_if_possible(&ty);
         outlives.type_must_outlive(origin, ty, region);
     }
 }
 
-#[must_use] // you ought to invoke `into_accrued_obligations` when you are done =)
-struct TypeOutlives<'cx, 'gcx: 'tcx, 'tcx: 'cx> {
+/// The `TypeOutlives` struct has the job of "lowering" a `T: 'a`
+/// obligation into a series of `'a: 'b` constraints and "verifys", as
+/// described on the module comment. The final constraints are emitted
+/// via a "delegate" of type `D` -- this is usually the `infcx`, which
+/// accrues them into the `region_obligations` code, but for NLL we
+/// use something else.
+pub struct TypeOutlives<'cx, 'gcx: 'tcx, 'tcx: 'cx, D>
+where
+    D: TypeOutlivesDelegate<'tcx>,
+{
     // See the comments on `process_registered_region_obligations` for the meaning
     // of these fields.
-    infcx: &'cx InferCtxt<'cx, 'gcx, 'tcx>,
+    delegate: D,
+    tcx: TyCtxt<'cx, 'gcx, 'tcx>,
     region_bound_pairs: &'cx [(ty::Region<'tcx>, GenericKind<'tcx>)],
     implicit_region_bound: Option<ty::Region<'tcx>>,
     param_env: ty::ParamEnv<'tcx>,
 }
 
-impl<'cx, 'gcx, 'tcx> TypeOutlives<'cx, 'gcx, 'tcx> {
-    fn new(
-        infcx: &'cx InferCtxt<'cx, 'gcx, 'tcx>,
+pub trait TypeOutlivesDelegate<'tcx> {
+    fn push_sub_region_constraint(
+        &mut self,
+        origin: SubregionOrigin<'tcx>,
+        a: ty::Region<'tcx>,
+        b: ty::Region<'tcx>,
+    );
+
+    fn push_verify(
+        &mut self,
+        origin: SubregionOrigin<'tcx>,
+        kind: GenericKind<'tcx>,
+        a: ty::Region<'tcx>,
+        bound: VerifyBound<'tcx>,
+    );
+}
+
+impl<'cx, 'gcx, 'tcx, D> TypeOutlives<'cx, 'gcx, 'tcx, D>
+where
+    D: TypeOutlivesDelegate<'tcx>,
+{
+    pub fn new(
+        delegate: D,
+        tcx: TyCtxt<'cx, 'gcx, 'tcx>,
         region_bound_pairs: &'cx [(ty::Region<'tcx>, GenericKind<'tcx>)],
         implicit_region_bound: Option<ty::Region<'tcx>>,
         param_env: ty::ParamEnv<'tcx>,
     ) -> Self {
         Self {
-            infcx,
+            delegate,
+            tcx,
             region_bound_pairs,
             implicit_region_bound,
             param_env,
@@ -240,33 +274,25 @@ fn new(
     /// - `origin`, the reason we need this constraint
     /// - `ty`, the type `T`
     /// - `region`, the region `'a`
-    fn type_must_outlive(
-        &self,
+    pub fn type_must_outlive(
+        &mut self,
         origin: infer::SubregionOrigin<'tcx>,
         ty: Ty<'tcx>,
         region: ty::Region<'tcx>,
     ) {
-        let ty = self.infcx.resolve_type_vars_if_possible(&ty);
-
         debug!(
             "type_must_outlive(ty={:?}, region={:?}, origin={:?})",
-            ty,
-            region,
-            origin
+            ty, region, origin
         );
 
         assert!(!ty.has_escaping_regions());
 
-        let components = self.tcx().outlives_components(ty);
+        let components = self.tcx.outlives_components(ty);
         self.components_must_outlive(origin, components, region);
     }
 
-    fn tcx(&self) -> TyCtxt<'cx, 'gcx, 'tcx> {
-        self.infcx.tcx
-    }
-
     fn components_must_outlive(
-        &self,
+        &mut self,
         origin: infer::SubregionOrigin<'tcx>,
         components: Vec<Component<'tcx>>,
         region: ty::Region<'tcx>,
@@ -275,7 +301,7 @@ fn components_must_outlive(
             let origin = origin.clone();
             match component {
                 Component::Region(region1) => {
-                    self.infcx.sub_regions(origin, region, region1);
+                    self.delegate.push_sub_region_constraint(origin, region, region1);
                 }
                 Component::Param(param_ty) => {
                     self.param_ty_must_outlive(origin, region, param_ty);
@@ -290,7 +316,7 @@ fn components_must_outlive(
                     // ignore this, we presume it will yield an error
                     // later, since if a type variable is not resolved by
                     // this point it never will be
-                    self.infcx.tcx.sess.delay_span_bug(
+                    self.tcx.sess.delay_span_bug(
                         origin.span(),
                         &format!("unresolved inference variable in outlives: {:?}", v),
                     );
@@ -300,35 +326,31 @@ fn components_must_outlive(
     }
 
     fn param_ty_must_outlive(
-        &self,
+        &mut self,
         origin: infer::SubregionOrigin<'tcx>,
         region: ty::Region<'tcx>,
         param_ty: ty::ParamTy,
     ) {
         debug!(
             "param_ty_must_outlive(region={:?}, param_ty={:?}, origin={:?})",
-            region,
-            param_ty,
-            origin
+            region, param_ty, origin
         );
 
         let verify_bound = self.param_bound(param_ty);
         let generic = GenericKind::Param(param_ty);
-        self.infcx
-            .verify_generic_bound(origin, generic, region, verify_bound);
+        self.delegate
+            .push_verify(origin, generic, region, verify_bound);
     }
 
     fn projection_must_outlive(
-        &self,
+        &mut self,
         origin: infer::SubregionOrigin<'tcx>,
         region: ty::Region<'tcx>,
         projection_ty: ty::ProjectionTy<'tcx>,
     ) {
         debug!(
             "projection_must_outlive(region={:?}, projection_ty={:?}, origin={:?})",
-            region,
-            projection_ty,
-            origin
+            region, projection_ty, origin
         );
 
         // This case is thorny for inference. The fundamental problem is
@@ -382,7 +404,7 @@ fn projection_must_outlive(
             }
 
             for r in projection_ty.substs.regions() {
-                self.infcx.sub_regions(origin.clone(), region, r);
+                self.delegate.push_sub_region_constraint(origin.clone(), region, r);
             }
 
             return;
@@ -408,7 +430,8 @@ fn projection_must_outlive(
                 .any(|r| env_bounds.contains(&r))
             {
                 debug!("projection_must_outlive: unique declared bound appears in trait ref");
-                self.infcx.sub_regions(origin.clone(), region, unique_bound);
+                self.delegate
+                    .push_sub_region_constraint(origin.clone(), region, unique_bound);
                 return;
             }
         }
@@ -420,8 +443,8 @@ fn projection_must_outlive(
         // even though a satisfactory solution exists.
         let verify_bound = self.projection_bound(env_bounds, projection_ty);
         let generic = GenericKind::Projection(projection_ty);
-        self.infcx
-            .verify_generic_bound(origin, generic.clone(), region, verify_bound);
+        self.delegate
+            .push_verify(origin, generic.clone(), region, verify_bound);
     }
 
     fn type_bound(&self, ty: Ty<'tcx>) -> VerifyBound<'tcx> {
@@ -469,12 +492,11 @@ fn projection_bound(
     ) -> VerifyBound<'tcx> {
         debug!(
             "projection_bound(declared_bounds={:?}, projection_ty={:?})",
-            declared_bounds,
-            projection_ty
+            declared_bounds, projection_ty
         );
 
         // see the extensive comment in projection_must_outlive
-        let ty = self.infcx
+        let ty = self
             .tcx
             .mk_projection(projection_ty.item_def_id, projection_ty.substs);
         let recursive_bound = self.recursive_type_bound(ty);
@@ -507,7 +529,7 @@ fn declared_generic_bounds_from_env(
         &self,
         generic: GenericKind<'tcx>,
     ) -> Vec<ty::Region<'tcx>> {
-        let tcx = self.tcx();
+        let tcx = self.tcx;
 
         // To start, collect bounds from user environment. Note that
         // parameter environments are already elaborated, so we don't
@@ -559,7 +581,7 @@ fn declared_projection_bounds_from_trait(
         debug!("projection_bounds(projection_ty={:?})", projection_ty);
         let mut bounds = self.region_bounds_declared_on_associated_item(projection_ty.item_def_id);
         for r in &mut bounds {
-            *r = r.subst(self.tcx(), projection_ty.substs);
+            *r = r.subst(self.tcx, projection_ty.substs);
         }
         bounds
     }
@@ -598,7 +620,7 @@ fn region_bounds_declared_on_associated_item(
         &self,
         assoc_item_def_id: DefId,
     ) -> Vec<ty::Region<'tcx>> {
-        let tcx = self.tcx();
+        let tcx = self.tcx;
         let assoc_item = tcx.associated_item(assoc_item_def_id);
         let trait_def_id = assoc_item.container.assert_trait();
         let trait_predicates = tcx.predicates_of(trait_def_id);
@@ -634,3 +656,25 @@ fn collect_outlives_from_predicate_list<I, P>(
             .collect()
     }
 }
+
+impl<'cx, 'gcx, 'tcx> TypeOutlivesDelegate<'tcx> for &'cx InferCtxt<'cx, 'gcx, 'tcx> {
+    fn push_sub_region_constraint(
+        &mut self,
+        origin: SubregionOrigin<'tcx>,
+        a: ty::Region<'tcx>,
+        b: ty::Region<'tcx>,
+    ) {
+        self.sub_regions(origin, a, b)
+    }
+
+    fn push_verify(
+        &mut self,
+        origin: SubregionOrigin<'tcx>,
+        kind: GenericKind<'tcx>,
+        a: ty::Region<'tcx>,
+        bound: VerifyBound<'tcx>,
+    ) {
+        self.verify_generic_bound(origin, kind, a, bound)
+    }
+}
+
index 102efe2bef3f750077e02e4fa67be742bdccf89d..c500800a30f9cd9717788a492b6e4f7e664b7ff7 100644 (file)
@@ -132,7 +132,6 @@ pub mod middle {
     pub mod allocator;
     pub mod borrowck;
     pub mod expr_use_visitor;
-    pub mod const_val;
     pub mod cstore;
     pub mod dataflow;
     pub mod dead;
index 4178fdb352d514c87bc01dcae1cf546cd7131f3e..ccd9024f4aaabb412af30c066cbf7b9e27f88ad5 100644 (file)
@@ -64,15 +64,14 @@ macro_rules! span_bug {
 
 #[macro_export]
 macro_rules! __impl_stable_hash_field {
-    (DECL IGNORED) => (_);
-    (DECL $name:ident) => (ref $name);
-    (USE IGNORED $ctx:expr, $hasher:expr) => ({});
-    (USE $name:ident, $ctx:expr, $hasher:expr) => ($name.hash_stable($ctx, $hasher));
+    ($field:ident, $ctx:expr, $hasher:expr) => ($field.hash_stable($ctx, $hasher));
+    ($field:ident, $ctx:expr, $hasher:expr, _) => ({ let _ = $field; });
+    ($field:ident, $ctx:expr, $hasher:expr, $delegate:expr) => ($delegate.hash_stable($ctx, $hasher));
 }
 
 #[macro_export]
 macro_rules! impl_stable_hash_for {
-    (enum $enum_name:path { $( $variant:ident $( ( $($arg:ident),* ) )* ),* $(,)* }) => {
+    (enum $enum_name:path { $( $variant:ident $( ( $($field:ident $(-> $delegate:tt)?),* ) )* ),* $(,)? }) => {
         impl<'a, 'tcx> ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a>> for $enum_name {
             #[inline]
             fn hash_stable<W: ::rustc_data_structures::stable_hasher::StableHasherResult>(&self,
@@ -83,15 +82,15 @@ fn hash_stable<W: ::rustc_data_structures::stable_hasher::StableHasherResult>(&s
 
                 match *self {
                     $(
-                        $variant $( ( $( __impl_stable_hash_field!(DECL $arg) ),* ) )* => {
-                            $($( __impl_stable_hash_field!(USE $arg, __ctx, __hasher) );*)*
+                        $variant $( ( $(ref $field),* ) )* => {
+                            $($( __impl_stable_hash_field!($field, __ctx, __hasher $(, $delegate)?) );*)*
                         }
                     )*
                 }
             }
         }
     };
-    (struct $struct_name:path { $($field:ident),* }) => {
+    (struct $struct_name:path { $($field:ident $(-> $delegate:tt)?),*  $(,)? }) => {
         impl<'a, 'tcx> ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a>> for $struct_name {
             #[inline]
             fn hash_stable<W: ::rustc_data_structures::stable_hasher::StableHasherResult>(&self,
@@ -101,11 +100,11 @@ fn hash_stable<W: ::rustc_data_structures::stable_hasher::StableHasherResult>(&s
                     $(ref $field),*
                 } = *self;
 
-                $( $field.hash_stable(__ctx, __hasher));*
+                $( __impl_stable_hash_field!($field, __ctx, __hasher $(, $delegate)?) );*
             }
         }
     };
-    (tuple_struct $struct_name:path { $($field:ident),* }) => {
+    (tuple_struct $struct_name:path { $($field:ident $(-> $delegate:tt)?),*  $(,)? }) => {
         impl<'a, 'tcx> ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a>> for $struct_name {
             #[inline]
             fn hash_stable<W: ::rustc_data_structures::stable_hasher::StableHasherResult>(&self,
@@ -115,7 +114,7 @@ fn hash_stable<W: ::rustc_data_structures::stable_hasher::StableHasherResult>(&s
                     $(ref $field),*
                 ) = *self;
 
-                $( $field.hash_stable(__ctx, __hasher));*
+                $( __impl_stable_hash_field!($field, __ctx, __hasher $(, $delegate)?) );*
             }
         }
     };
diff --git a/src/librustc/middle/const_val.rs b/src/librustc/middle/const_val.rs
deleted file mode 100644 (file)
index 2fa77be..0000000
+++ /dev/null
@@ -1,178 +0,0 @@
-// Copyright 2012-2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use hir::def_id::DefId;
-use ty;
-use ty::subst::Substs;
-use ty::query::TyCtxtAt;
-use mir::interpret::ConstValue;
-use errors::DiagnosticBuilder;
-
-use graphviz::IntoCow;
-use syntax_pos::Span;
-use syntax::ast;
-
-use std::borrow::Cow;
-use rustc_data_structures::sync::Lrc;
-
-pub type EvalResult<'tcx> = Result<&'tcx ty::Const<'tcx>, ConstEvalErr<'tcx>>;
-
-#[derive(Copy, Clone, Debug, Hash, RustcEncodable, RustcDecodable, Eq, PartialEq, Ord, PartialOrd)]
-pub enum ConstVal<'tcx> {
-    Unevaluated(DefId, &'tcx Substs<'tcx>),
-    Value(ConstValue<'tcx>),
-}
-
-#[derive(Clone, Debug, RustcEncodable, RustcDecodable)]
-pub struct ConstEvalErr<'tcx> {
-    pub span: Span,
-    pub kind: Lrc<ErrKind<'tcx>>,
-}
-
-#[derive(Clone, Debug, RustcEncodable, RustcDecodable)]
-pub enum ErrKind<'tcx> {
-
-    CouldNotResolve,
-    TypeckError,
-    CheckMatchError,
-    Miri(::mir::interpret::EvalError<'tcx>, Vec<FrameInfo>),
-}
-
-#[derive(Clone, Debug, RustcEncodable, RustcDecodable)]
-pub struct FrameInfo {
-    pub span: Span,
-    pub location: String,
-    pub lint_root: Option<ast::NodeId>,
-}
-
-#[derive(Clone, Debug)]
-pub enum ConstEvalErrDescription<'a, 'tcx: 'a> {
-    Simple(Cow<'a, str>),
-    Backtrace(&'a ::mir::interpret::EvalError<'tcx>, &'a [FrameInfo]),
-}
-
-impl<'a, 'tcx> ConstEvalErrDescription<'a, 'tcx> {
-    /// Return a one-line description of the error, for lints and such
-    pub fn into_oneline(self) -> Cow<'a, str> {
-        match self {
-            ConstEvalErrDescription::Simple(simple) => simple,
-            ConstEvalErrDescription::Backtrace(miri, _) => format!("{}", miri).into_cow(),
-        }
-    }
-}
-
-impl<'a, 'gcx, 'tcx> ConstEvalErr<'tcx> {
-    pub fn description(&'a self) -> ConstEvalErrDescription<'a, 'tcx> {
-        use self::ErrKind::*;
-        use self::ConstEvalErrDescription::*;
-
-        macro_rules! simple {
-            ($msg:expr) => ({ Simple($msg.into_cow()) });
-            ($fmt:expr, $($arg:tt)+) => ({
-                Simple(format!($fmt, $($arg)+).into_cow())
-            })
-        }
-
-        match *self.kind {
-            CouldNotResolve => simple!("could not resolve"),
-            TypeckError => simple!("type-checking failed"),
-            CheckMatchError => simple!("match-checking failed"),
-            Miri(ref err, ref trace) => Backtrace(err, trace),
-        }
-    }
-
-    pub fn struct_error(&self,
-        tcx: TyCtxtAt<'a, 'gcx, 'tcx>,
-        message: &str)
-        -> Option<DiagnosticBuilder<'tcx>>
-    {
-        self.struct_generic(tcx, message, None, true)
-    }
-
-    pub fn report_as_error(&self,
-        tcx: TyCtxtAt<'a, 'gcx, 'tcx>,
-        message: &str
-    ) {
-        let err = self.struct_generic(tcx, message, None, true);
-        if let Some(mut err) = err {
-            err.emit();
-        }
-    }
-
-    pub fn report_as_lint(&self,
-        tcx: TyCtxtAt<'a, 'gcx, 'tcx>,
-        message: &str,
-        lint_root: ast::NodeId,
-    ) {
-        let lint = self.struct_generic(
-            tcx,
-            message,
-            Some(lint_root),
-            false,
-        );
-        if let Some(mut lint) = lint {
-            lint.emit();
-        }
-    }
-
-    fn struct_generic(
-        &self,
-        tcx: TyCtxtAt<'a, 'gcx, 'tcx>,
-        message: &str,
-        lint_root: Option<ast::NodeId>,
-        as_err: bool,
-    ) -> Option<DiagnosticBuilder<'tcx>> {
-        let (msg, frames): (_, &[_]) = match *self.kind {
-            ErrKind::TypeckError | ErrKind::CheckMatchError => return None,
-            ErrKind::Miri(ref miri, ref frames) => {
-                match miri.kind {
-                    ::mir::interpret::EvalErrorKind::TypeckError |
-                    ::mir::interpret::EvalErrorKind::Layout(_) => return None,
-                    ::mir::interpret::EvalErrorKind::ReferencedConstant(ref inner) => {
-                        inner.struct_generic(tcx, "referenced constant", lint_root, as_err)?.emit();
-                        (miri.to_string(), frames)
-                    },
-                    _ => (miri.to_string(), frames),
-                }
-            }
-            _ => (self.description().into_oneline().to_string(), &[]),
-        };
-        trace!("reporting const eval failure at {:?}", self.span);
-        let mut err = if as_err {
-            struct_error(tcx, message)
-        } else {
-            let node_id = frames
-                .iter()
-                .rev()
-                .filter_map(|frame| frame.lint_root)
-                .next()
-                .or(lint_root)
-                .expect("some part of a failing const eval must be local");
-            tcx.struct_span_lint_node(
-                ::rustc::lint::builtin::CONST_ERR,
-                node_id,
-                tcx.span,
-                message,
-            )
-        };
-        err.span_label(self.span, msg);
-        for FrameInfo { span, location, .. } in frames {
-            err.span_label(*span, format!("inside call to `{}`", location));
-        }
-        Some(err)
-    }
-}
-
-pub fn struct_error<'a, 'gcx, 'tcx>(
-    tcx: TyCtxtAt<'a, 'gcx, 'tcx>,
-    msg: &str,
-) -> DiagnosticBuilder<'tcx> {
-    struct_span_err!(tcx.sess, tcx.span, E0080, "{}", msg)
-}
index 7ebc0d4a4de5a4a358566bb002ad670eeb730ff0..caf73096ebf5e88e506091eff7779fcb72732ef1 100644 (file)
@@ -599,7 +599,7 @@ fn visit_impl_item(&mut self, impl_item: &'tcx hir::ImplItem) {
                 if !self.symbol_is_live(impl_item.id, None) {
                     self.warn_dead_code(impl_item.id,
                                         impl_item.span,
-                                        impl_item.name,
+                                        impl_item.ident.name,
                                         "associated const",
                                         "used");
                 }
@@ -608,7 +608,7 @@ fn visit_impl_item(&mut self, impl_item: &'tcx hir::ImplItem) {
             hir::ImplItemKind::Method(_, body_id) => {
                 if !self.symbol_is_live(impl_item.id, None) {
                     let span = self.tcx.sess.codemap().def_span(impl_item.span);
-                    self.warn_dead_code(impl_item.id, span, impl_item.name, "method", "used");
+                    self.warn_dead_code(impl_item.id, span, impl_item.ident.name, "method", "used");
                 }
                 self.visit_nested_body(body_id)
             }
index b98a4416d213f32ce2c7a65c90154eab153ac7d4..07a9dd75d4ca4ae1229449666c4a1cc72a37887f 100644 (file)
@@ -374,10 +374,9 @@ fn visit_fn<'a, 'tcx: 'a>(ir: &mut IrMaps<'a, 'tcx>,
     let body = ir.tcx.hir.body(body_id);
 
     for arg in &body.arguments {
-        arg.pat.each_binding(|_bm, hir_id, _x, path1| {
+        arg.pat.each_binding(|_bm, hir_id, _x, ident| {
             debug!("adding argument {:?}", hir_id);
-            let name = path1.node;
-            fn_maps.add_variable(Arg(hir_id, name));
+            fn_maps.add_variable(Arg(hir_id, ident.name));
         })
     };
 
@@ -430,12 +429,11 @@ fn add_from_pat<'a, 'tcx>(ir: &mut IrMaps<'a, 'tcx>, pat: &P<hir::Pat>) {
         }
     }
 
-    pat.each_binding(|_bm, hir_id, _sp, path1| {
-        let name = path1.node;
-        ir.add_live_node_for_node(hir_id, VarDefNode(path1.span));
+    pat.each_binding(|_bm, hir_id, _sp, ident| {
+        ir.add_live_node_for_node(hir_id, VarDefNode(ident.span));
         ir.add_variable(Local(LocalInfo {
             id: hir_id,
-            name,
+            name: ident.name,
             is_shorthand: shorthand_field_ids.contains(&hir_id)
         }));
     });
@@ -1374,7 +1372,7 @@ fn check_local<'a, 'tcx>(this: &mut Liveness<'a, 'tcx>, local: &'tcx hir::Local)
         },
         None => {
             this.pat_bindings(&local.pat, |this, ln, var, sp, id| {
-                let span = local.pat.simple_span().unwrap_or(sp);
+                let span = local.pat.simple_ident().map_or(sp, |ident| ident.span);
                 this.warn_about_unused(span, id, ln, var);
             })
         }
@@ -1475,12 +1473,11 @@ fn should_warn(&self, var: Variable) -> Option<String> {
 
     fn warn_about_unused_args(&self, body: &hir::Body, entry_ln: LiveNode) {
         for arg in &body.arguments {
-            arg.pat.each_binding(|_bm, hir_id, _, path1| {
-                let sp = path1.span;
+            arg.pat.each_binding(|_bm, hir_id, _, ident| {
+                let sp = ident.span;
                 let var = self.variable(hir_id, sp);
                 // Ignore unused self.
-                let name = path1.node;
-                if name != keywords::SelfValue.name() {
+                if ident.name != keywords::SelfValue.name() {
                     if !self.warn_about_unused(sp, hir_id, entry_ln, var) {
                         if self.live_on_entry(entry_ln, var).is_none() {
                             self.report_dead_assign(hir_id, sp, var, true);
index e3b59d25ab91f04c324bdfec65e91e93355cc03c..6d4e9a1b767a9524e29b771c40d4d0d0a70e349e 100644 (file)
@@ -179,7 +179,7 @@ pub enum Note {
 // and how it is located, as well as the mutability of the memory in
 // which the value is stored.
 //
-// *WARNING* The field `cmt.type` is NOT necessarily the same as the
+// *WARNING* The field `cmt.ty` is NOT necessarily the same as the
 // result of `node_id_to_type(cmt.id)`. This is because the `id` is
 // always the `id` of the node producing the type; in an expression
 // like `*x`, the type of this deref node is the deref'd type (`T`),
index efb3eecd691f2dc47d3930af098c0b49166ff05b..ed2b9c5068929c49a720f2b78be98730423e2acc 100644 (file)
@@ -94,7 +94,7 @@ fn early(hir_map: &Map, index: &mut u32, param: &GenericParam) -> (ParamName, Re
         let def_id = hir_map.local_def_id(param.id);
         let origin = LifetimeDefOrigin::from_param(param);
         debug!("Region::early: index={} def_id={:?}", i, def_id);
-        (param.name, Region::EarlyBound(i, def_id, origin))
+        (param.name.modern(), Region::EarlyBound(i, def_id, origin))
     }
 
     fn late(hir_map: &Map, param: &GenericParam) -> (ParamName, Region) {
@@ -108,7 +108,7 @@ fn late(hir_map: &Map, param: &GenericParam) -> (ParamName, Region) {
             def_id,
             origin,
         );
-        (param.name, Region::LateBound(depth, def_id, origin))
+        (param.name.modern(), Region::LateBound(depth, def_id, origin))
     }
 
     fn late_anon(index: &Cell<u32>) -> Region {
@@ -254,7 +254,7 @@ struct LifetimeContext<'a, 'tcx: 'a> {
     is_in_fn_syntax: bool,
 
     /// List of labels in the function/method currently under analysis.
-    labels_in_fn: Vec<(ast::Name, Span)>,
+    labels_in_fn: Vec<ast::Ident>,
 
     /// Cache for cross-crate per-definition object lifetime defaults.
     xcrate_object_lifetime_defaults: DefIdMap<Vec<ObjectLifetimeDefault>>,
@@ -689,7 +689,7 @@ fn visit_ty(&mut self, ty: &'tcx hir::Ty) {
                         GenericParamKind::Lifetime { .. } => {
                             let (name, reg) = Region::early(&self.tcx.hir, &mut index, &param);
                             if let hir::ParamName::Plain(param_name) = name {
-                                if param_name == keywords::UnderscoreLifetime.name() {
+                                if param_name.name == keywords::UnderscoreLifetime.name() {
                                     // Pick the elided lifetime "definition" if one exists
                                     // and use it to make an elision scope.
                                     elision = Some(reg);
@@ -1109,7 +1109,7 @@ fn extract_labels(ctxt: &mut LifetimeContext<'_, '_>, body: &hir::Body) {
     struct GatherLabels<'a, 'tcx: 'a> {
         tcx: TyCtxt<'a, 'tcx, 'tcx>,
         scope: ScopeRef<'a>,
-        labels_in_fn: &'a mut Vec<(ast::Name, Span)>,
+        labels_in_fn: &'a mut Vec<ast::Ident>,
     }
 
     let mut gather = GatherLabels {
@@ -1125,32 +1125,31 @@ fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'v> {
         }
 
         fn visit_expr(&mut self, ex: &hir::Expr) {
-            if let Some((label, label_span)) = expression_label(ex) {
-                for &(prior, prior_span) in &self.labels_in_fn[..] {
+            if let Some(label) = expression_label(ex) {
+                for prior_label in &self.labels_in_fn[..] {
                     // FIXME (#24278): non-hygienic comparison
-                    if label == prior {
+                    if label.name == prior_label.name {
                         signal_shadowing_problem(
                             self.tcx,
-                            label,
-                            original_label(prior_span),
-                            shadower_label(label_span),
+                            label.name,
+                            original_label(prior_label.span),
+                            shadower_label(label.span),
                         );
                     }
                 }
 
-                check_if_label_shadows_lifetime(self.tcx, self.scope, label, label_span);
+                check_if_label_shadows_lifetime(self.tcx, self.scope, label);
 
-                self.labels_in_fn.push((label, label_span));
+                self.labels_in_fn.push(label);
             }
             intravisit::walk_expr(self, ex)
         }
     }
 
-    fn expression_label(ex: &hir::Expr) -> Option<(ast::Name, Span)> {
+    fn expression_label(ex: &hir::Expr) -> Option<ast::Ident> {
         match ex.node {
-            hir::ExprWhile(.., Some(label)) | hir::ExprLoop(_, Some(label), _) => {
-                Some((label.name, label.span))
-            }
+            hir::ExprWhile(.., Some(label)) |
+            hir::ExprLoop(_, Some(label), _) => Some(label.ident),
             _ => None,
         }
     }
@@ -1158,8 +1157,7 @@ fn expression_label(ex: &hir::Expr) -> Option<(ast::Name, Span)> {
     fn check_if_label_shadows_lifetime(
         tcx: TyCtxt<'_, '_, '_>,
         mut scope: ScopeRef<'_>,
-        label: ast::Name,
-        label_span: Span,
+        label: ast::Ident,
     ) {
         loop {
             match *scope {
@@ -1177,15 +1175,14 @@ fn check_if_label_shadows_lifetime(
                     ref lifetimes, s, ..
                 } => {
                     // FIXME (#24278): non-hygienic comparison
-                    let param_name = hir::ParamName::Plain(label);
-                    if let Some(def) = lifetimes.get(&param_name) {
+                    if let Some(def) = lifetimes.get(&hir::ParamName::Plain(label.modern())) {
                         let node_id = tcx.hir.as_local_node_id(def.id().unwrap()).unwrap();
 
                         signal_shadowing_problem(
                             tcx,
-                            label,
+                            label.name,
                             original_lifetime(tcx.hir.span(node_id)),
-                            shadower_label(label_span),
+                            shadower_label(label.span),
                         );
                         return;
                     }
@@ -1220,7 +1217,7 @@ fn compute_object_lifetime_defaults(
                                 generics.params.iter().find_map(|param| match param.kind {
                                         GenericParamKind::Lifetime { .. } => {
                                             if i == 0 {
-                                                return Some(param.name.name().to_string());
+                                                return Some(param.name.ident().to_string());
                                             }
                                             i -= 1;
                                             None
@@ -1254,7 +1251,7 @@ fn object_lifetime_defaults_for_item(
     fn add_bounds(set: &mut Set1<hir::LifetimeName>, bounds: &[hir::GenericBound]) {
         for bound in bounds {
             if let hir::GenericBound::Outlives(ref lifetime) = *bound {
-                set.insert(lifetime.name);
+                set.insert(lifetime.name.modern());
             }
         }
     }
@@ -1400,10 +1397,10 @@ fn check_uses_for_lifetimes_defined_by_scope(&mut self) {
                     debug!("node id first={:?}", node_id);
                     if let Some((id, span, name)) = match self.tcx.hir.get(node_id) {
                         hir::map::NodeLifetime(hir_lifetime) => {
-                            Some((hir_lifetime.id, hir_lifetime.span, hir_lifetime.name.name()))
+                            Some((hir_lifetime.id, hir_lifetime.span, hir_lifetime.name.ident()))
                         }
                         hir::map::NodeGenericParam(param) => {
-                            Some((param.id, param.span, param.name.name()))
+                            Some((param.id, param.span, param.name.ident()))
                         }
                         _ => None,
                     } {
@@ -1426,10 +1423,10 @@ fn check_uses_for_lifetimes_defined_by_scope(&mut self) {
                     let node_id = self.tcx.hir.as_local_node_id(def_id).unwrap();
                     if let Some((id, span, name)) = match self.tcx.hir.get(node_id) {
                         hir::map::NodeLifetime(hir_lifetime) => {
-                            Some((hir_lifetime.id, hir_lifetime.span, hir_lifetime.name.name()))
+                            Some((hir_lifetime.id, hir_lifetime.span, hir_lifetime.name.ident()))
                         }
                         hir::map::NodeGenericParam(param) => {
-                            Some((param.id, param.span, param.name.name()))
+                            Some((param.id, param.span, param.name.ident()))
                         }
                         _ => None,
                     } {
@@ -1582,7 +1579,7 @@ fn resolve_lifetime_ref(&mut self, lifetime_ref: &'tcx hir::Lifetime) {
                         LifetimeName::Param(param_name) => param_name,
                         _ => bug!("expected LifetimeName::Param"),
                     };
-                    if let Some(&def) = lifetimes.get(&name) {
+                    if let Some(&def) = lifetimes.get(&name.modern()) {
                         break Some(def.shifted(late_depth));
                     } else {
                         late_depth += 1;
@@ -1651,7 +1648,7 @@ fn resolve_lifetime_ref(&mut self, lifetime_ref: &'tcx hir::Lifetime) {
                 lifetime_ref.span,
                 E0261,
                 "use of undeclared lifetime name `{}`",
-                lifetime_ref.name.name()
+                lifetime_ref
             ).span_label(lifetime_ref.span, "undeclared lifetime")
                 .emit();
         }
@@ -1792,7 +1789,7 @@ fn visit_segment_args(
 
     fn visit_fn_like_elision(
         &mut self,
-        inputs: &'tcx [P<hir::Ty>],
+        inputs: &'tcx [hir::Ty],
         output: Option<&'tcx P<hir::Ty>>,
     ) {
         debug!("visit_fn_like_elision: enter");
@@ -2246,7 +2243,7 @@ fn check_lifetime_params(&mut self, old_scope: ScopeRef, params: &'tcx [hir::Gen
         }).collect();
         for (i, (lifetime_i, lifetime_i_name)) in lifetimes.iter().enumerate() {
             if let hir::ParamName::Plain(_) = lifetime_i_name {
-                let name = lifetime_i_name.name();
+                let name = lifetime_i_name.ident().name;
                 if name == keywords::UnderscoreLifetime.name() ||
                    name == keywords::StaticLifetime.name() {
                     let mut err = struct_span_err!(
@@ -2254,7 +2251,7 @@ fn check_lifetime_params(&mut self, old_scope: ScopeRef, params: &'tcx [hir::Gen
                         lifetime_i.span,
                         E0262,
                         "invalid lifetime parameter name: `{}`",
-                        name
+                        lifetime_i.name.ident(),
                     );
                     err.span_label(
                         lifetime_i.span,
@@ -2272,7 +2269,7 @@ fn check_lifetime_params(&mut self, old_scope: ScopeRef, params: &'tcx [hir::Gen
                         lifetime_j.span,
                         E0263,
                         "lifetime name `{}` declared twice in the same scope",
-                        lifetime_j.name.name()
+                        lifetime_j.name.ident()
                     ).span_label(lifetime_j.span, "declared twice")
                      .span_label(lifetime_i.span, "previous declaration here")
                      .emit();
@@ -2301,12 +2298,12 @@ fn check_lifetime_params(&mut self, old_scope: ScopeRef, params: &'tcx [hir::Gen
                                 lifetime_i.span.to(lt.span),
                                 &format!(
                                     "unnecessary lifetime parameter `{}`",
-                                    lifetime_i.name.name(),
+                                    lifetime_i.name.ident(),
                                 ),
                             ).help(&format!(
                                 "you can use the `'static` lifetime directly, in place \
                                     of `{}`",
-                                lifetime_i.name.name(),
+                                lifetime_i.name.ident(),
                             )).emit();
                         }
                         hir::LifetimeName::Param(_)
@@ -2325,13 +2322,13 @@ fn check_lifetime_param_for_shadowing(
         mut old_scope: ScopeRef,
         param: &'tcx hir::GenericParam,
     ) {
-        for &(label, label_span) in &self.labels_in_fn {
+        for label in &self.labels_in_fn {
             // FIXME (#24278): non-hygienic comparison
-            if param.name.name() == label {
+            if param.name.ident().name == label.name {
                 signal_shadowing_problem(
                     self.tcx,
-                    label,
-                    original_label(label_span),
+                    label.name,
+                    original_label(label.span),
                     shadower_lifetime(&param),
                 );
                 return;
@@ -2353,12 +2350,12 @@ fn check_lifetime_param_for_shadowing(
                 Scope::Binder {
                     ref lifetimes, s, ..
                 } => {
-                    if let Some(&def) = lifetimes.get(&param.name) {
+                    if let Some(&def) = lifetimes.get(&param.name.modern()) {
                         let node_id = self.tcx.hir.as_local_node_id(def.id().unwrap()).unwrap();
 
                         signal_shadowing_problem(
                             self.tcx,
-                            param.name.name(),
+                            param.name.ident().name,
                             original_lifetime(self.tcx.hir.span(node_id)),
                             shadower_lifetime(&param),
                         );
@@ -2520,7 +2517,8 @@ fn insert_late_bound_lifetimes(
             hir::GenericParamKind::Lifetime { .. } => {
                 if !param.bounds.is_empty() {
                     // `'a: 'b` means both `'a` and `'b` are referenced
-                    appears_in_where_clause.regions.insert(hir::LifetimeName::Param(param.name));
+                    appears_in_where_clause
+                        .regions.insert(hir::LifetimeName::Param(param.name.modern()));
                 }
             }
             hir::GenericParamKind::Type { .. } => {}
@@ -2537,7 +2535,7 @@ fn insert_late_bound_lifetimes(
     // - do not appear in the where-clauses
     // - are not implicitly captured by `impl Trait`
     for param in &generics.params {
-        let lt_name = hir::LifetimeName::Param(param.name);
+        let lt_name = hir::LifetimeName::Param(param.name.modern());
         // appears in the where clauses? early-bound.
         if appears_in_where_clause.regions.contains(&lt_name) {
             continue;
@@ -2551,7 +2549,7 @@ fn insert_late_bound_lifetimes(
         }
 
         debug!("insert_late_bound_lifetimes: lifetime {:?} with id {:?} is late-bound",
-               param.name.name(),
+               param.name.ident(),
                param.id);
 
         let inserted = map.late_bound.insert(param.id);
@@ -2596,7 +2594,7 @@ fn visit_ty(&mut self, ty: &'v hir::Ty) {
         }
 
         fn visit_lifetime(&mut self, lifetime_ref: &'v hir::Lifetime) {
-            self.regions.insert(lifetime_ref.name);
+            self.regions.insert(lifetime_ref.name.modern());
         }
     }
 
@@ -2610,7 +2608,7 @@ fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'v> {
         }
 
         fn visit_lifetime(&mut self, lifetime_ref: &'v hir::Lifetime) {
-            self.regions.insert(lifetime_ref.name);
+            self.regions.insert(lifetime_ref.name.modern());
         }
     }
 }
index 4cdc9fdddbf80b1176de05bdd44b9a26ab91315a..a289a2c21ce7f421c3d2876a8284e118c46adf68 100644 (file)
@@ -744,7 +744,8 @@ fn visit_item(&mut self, item: &'tcx hir::Item) {
                     for impl_item_ref in impl_item_refs {
                         let impl_item = self.tcx.hir.impl_item(impl_item_ref.id);
                         let trait_item_def_id = self.tcx.associated_items(trait_did)
-                            .find(|item| item.name == impl_item.name).map(|item| item.def_id);
+                            .find(|item| item.ident.name == impl_item.ident.name)
+                            .map(|item| item.def_id);
                         if let Some(def_id) = trait_item_def_id {
                             // Pass `None` to skip deprecation warnings.
                             self.tcx.check_stability(def_id, None, impl_item.span);
index 3c2ea047218a7bfb16d119383a0b23af10c24a94..1d147eef054f300b64a707bfc3f599f063b92609 100644 (file)
@@ -17,6 +17,7 @@
 use syntax::ast;
 use syntax::symbol::Symbol;
 use syntax_pos::Span;
+use hir::def_id::DefId;
 use hir::intravisit::{Visitor, NestedVisitorMap};
 use hir::intravisit;
 use hir;
@@ -145,6 +146,15 @@ fn visit_foreign_item(&mut self, i: &hir::ForeignItem) {
     }
 }
 
+impl<'a, 'tcx, 'gcx> TyCtxt<'a, 'tcx, 'gcx> {
+    pub fn is_weak_lang_item(&self, item_def_id: DefId) -> bool {
+        let lang_items = self.lang_items();
+        let did = Some(item_def_id);
+
+        $(lang_items.$name() == did)||+
+    }
+}
+
 ) }
 
 weak_lang_items! {
index bf5bae6b20c429ef15aaf6abf518c9f59fbea389..86427bb2382cb5bc7004ec8bd01b10818e289f81 100644 (file)
@@ -1,9 +1,9 @@
 use std::{fmt, env};
 
 use mir;
-use middle::const_val::ConstEvalErr;
 use ty::{FnSig, Ty, layout};
 use ty::layout::{Size, Align};
+use rustc_data_structures::sync::Lrc;
 
 use super::{
     Pointer, Lock, AccessKind
 
 use backtrace::Backtrace;
 
+use ty;
+use ty::query::TyCtxtAt;
+use errors::DiagnosticBuilder;
+
+use syntax_pos::Span;
+use syntax::ast;
+
+pub type ConstEvalResult<'tcx> = Result<&'tcx ty::Const<'tcx>, Lrc<ConstEvalErr<'tcx>>>;
+
+#[derive(Clone, Debug, RustcEncodable, RustcDecodable)]
+pub struct ConstEvalErr<'tcx> {
+    pub span: Span,
+    pub error: ::mir::interpret::EvalError<'tcx>,
+    pub stacktrace: Vec<FrameInfo>,
+}
+
+#[derive(Clone, Debug, RustcEncodable, RustcDecodable)]
+pub struct FrameInfo {
+    pub span: Span,
+    pub location: String,
+    pub lint_root: Option<ast::NodeId>,
+}
+
+impl<'a, 'gcx, 'tcx> ConstEvalErr<'tcx> {
+    pub fn struct_error(&self,
+        tcx: TyCtxtAt<'a, 'gcx, 'tcx>,
+        message: &str)
+        -> Option<DiagnosticBuilder<'tcx>>
+    {
+        self.struct_generic(tcx, message, None)
+    }
+
+    pub fn report_as_error(&self,
+        tcx: TyCtxtAt<'a, 'gcx, 'tcx>,
+        message: &str
+    ) {
+        let err = self.struct_generic(tcx, message, None);
+        if let Some(mut err) = err {
+            err.emit();
+        }
+    }
+
+    pub fn report_as_lint(&self,
+        tcx: TyCtxtAt<'a, 'gcx, 'tcx>,
+        message: &str,
+        lint_root: ast::NodeId,
+    ) {
+        let lint = self.struct_generic(
+            tcx,
+            message,
+            Some(lint_root),
+        );
+        if let Some(mut lint) = lint {
+            lint.emit();
+        }
+    }
+
+    fn struct_generic(
+        &self,
+        tcx: TyCtxtAt<'a, 'gcx, 'tcx>,
+        message: &str,
+        lint_root: Option<ast::NodeId>,
+    ) -> Option<DiagnosticBuilder<'tcx>> {
+        match self.error.kind {
+            ::mir::interpret::EvalErrorKind::TypeckError |
+            ::mir::interpret::EvalErrorKind::TooGeneric |
+            ::mir::interpret::EvalErrorKind::CheckMatchError |
+            ::mir::interpret::EvalErrorKind::Layout(_) => return None,
+            ::mir::interpret::EvalErrorKind::ReferencedConstant(ref inner) => {
+                inner.struct_generic(tcx, "referenced constant has errors", lint_root)?.emit();
+            },
+            _ => {},
+        }
+        trace!("reporting const eval failure at {:?}", self.span);
+        let mut err = if let Some(lint_root) = lint_root {
+            let node_id = self.stacktrace
+                .iter()
+                .rev()
+                .filter_map(|frame| frame.lint_root)
+                .next()
+                .unwrap_or(lint_root);
+            tcx.struct_span_lint_node(
+                ::rustc::lint::builtin::CONST_ERR,
+                node_id,
+                tcx.span,
+                message,
+            )
+        } else {
+            struct_error(tcx, message)
+        };
+        err.span_label(self.span, self.error.to_string());
+        for FrameInfo { span, location, .. } in &self.stacktrace {
+            err.span_label(*span, format!("inside call to `{}`", location));
+        }
+        Some(err)
+    }
+}
+
+pub fn struct_error<'a, 'gcx, 'tcx>(
+    tcx: TyCtxtAt<'a, 'gcx, 'tcx>,
+    msg: &str,
+) -> DiagnosticBuilder<'tcx> {
+    struct_span_err!(tcx.sess, tcx.span, E0080, "{}", msg)
+}
+
 #[derive(Debug, Clone, RustcEncodable, RustcDecodable)]
 pub struct EvalError<'tcx> {
     pub kind: EvalErrorKind<'tcx, u64>,
@@ -150,9 +255,12 @@ pub enum EvalErrorKind<'tcx, O> {
     UnimplementedTraitSelection,
     /// Abort in case type errors are reached
     TypeckError,
+    /// Resolution can fail if we are in a too generic context
+    TooGeneric,
+    CheckMatchError,
     /// Cannot compute this constant because it depends on another one
     /// which already produced an error
-    ReferencedConstant(ConstEvalErr<'tcx>),
+    ReferencedConstant(Lrc<ConstEvalErr<'tcx>>),
     GeneratorResumedAfterReturn,
     GeneratorResumedAfterPanic,
 }
@@ -268,6 +376,10 @@ pub fn description(&self) -> &str {
                 "there were unresolved type arguments during trait selection",
             TypeckError =>
                 "encountered constants with type errors, stopping evaluation",
+            TooGeneric =>
+                "encountered overly generic constant",
+            CheckMatchError =>
+                "match checking failed",
             ReferencedConstant(_) =>
                 "referenced constant has errors",
             Overflow(mir::BinOp::Add) => "attempt to add with overflow",
index 6bd5814799ae07d6c1847c99bfc77b11528dc55c..018c2446054be5080f2870d1804f3a32be31d171 100644 (file)
@@ -8,7 +8,10 @@ macro_rules! err {
 mod error;
 mod value;
 
-pub use self::error::{EvalError, EvalResult, EvalErrorKind, AssertMessage};
+pub use self::error::{
+    EvalError, EvalResult, EvalErrorKind, AssertMessage, ConstEvalErr, struct_error,
+    FrameInfo, ConstEvalResult,
+};
 
 pub use self::value::{Scalar, Value, ConstValue};
 
index 9e3d4e60603ec8f6173dd8b8e9a6e3db7ff26b29..24595c9328208a2a1183e1b11baad07ec9f11f17 100644 (file)
@@ -2,13 +2,19 @@
 
 use ty::layout::{Align, HasDataLayout, Size};
 use ty;
+use ty::subst::Substs;
+use hir::def_id::DefId;
 
 use super::{EvalResult, Pointer, PointerArithmetic, Allocation};
 
 /// Represents a constant value in Rust. ByVal and ScalarPair are optimizations which
 /// matches Value's optimizations for easy conversions between these two types
-#[derive(Clone, Copy, Debug, Eq, PartialEq, PartialOrd, Ord, RustcEncodable, RustcDecodable, Hash)]
+#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord, RustcEncodable, RustcDecodable, Hash)]
 pub enum ConstValue<'tcx> {
+    /// Never returned from the `const_eval` query, but the HIR contains these frequently in order
+    /// to allow HIR creation to happen for everything before needing to be able to run constant
+    /// evaluation
+    Unevaluated(DefId, &'tcx Substs<'tcx>),
     /// Used only for types with layout::abi::Scalar ABI and ZSTs which use Scalar::undef()
     Scalar(Scalar),
     /// Used only for types with layout::abi::ScalarPair
@@ -30,6 +36,7 @@ pub fn from_byval_value(val: Value) -> Self {
     #[inline]
     pub fn to_byval_value(&self) -> Option<Value> {
         match *self {
+            ConstValue::Unevaluated(..) |
             ConstValue::ByRef(..) => None,
             ConstValue::ScalarPair(a, b) => Some(Value::ScalarPair(a, b)),
             ConstValue::Scalar(val) => Some(Value::Scalar(val)),
@@ -44,7 +51,8 @@ pub fn from_scalar(val: Scalar) -> Self {
     #[inline]
     pub fn to_scalar(&self) -> Option<Scalar> {
         match *self {
-            ConstValue::ByRef(..) => None,
+            ConstValue::Unevaluated(..) |
+            ConstValue::ByRef(..) |
             ConstValue::ScalarPair(..) => None,
             ConstValue::Scalar(val) => Some(val),
         }
index ee6cb398acdceecef12b853bc67886eeeb0a61ee..2198e3f6b318ef1c60f2f4af577a818eff50bd41 100644 (file)
@@ -2162,18 +2162,12 @@ fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
     }
 }
 
-/// Write a `ConstVal` in a way closer to the original source code than the `Debug` output.
+/// Write a `ConstValue` in a way closer to the original source code than the `Debug` output.
 pub fn fmt_const_val<W: Write>(fmt: &mut W, const_val: &ty::Const) -> fmt::Result {
-    use middle::const_val::ConstVal;
-    match const_val.val {
-        ConstVal::Unevaluated(..) => write!(fmt, "{:?}", const_val),
-        ConstVal::Value(val) => {
-            if let Some(value) = val.to_byval_value() {
-                print_miri_value(value, const_val.ty, fmt)
-            } else {
-                write!(fmt, "{:?}:{}", val, const_val.ty)
-            }
-        }
+    if let Some(value) = const_val.to_byval_value() {
+        print_miri_value(value, const_val.ty, fmt)
+    } else {
+        write!(fmt, "{:?}:{}", const_val.val, const_val.ty)
     }
 }
 
index 40d54885619fabe07477589a645eac60ec8aacf3..acbf5392cf54c0b7f633d2bc4c6e405b44d39ab3 100644 (file)
 use super::{ObligationCause, PredicateObligation};
 
 pub trait TraitEngine<'tcx>: 'tcx {
-    fn normalize_projection_type<'a, 'gcx>(
+    fn normalize_projection_type(
         &mut self,
-        infcx: &InferCtxt<'a, 'gcx, 'tcx>,
+        infcx: &InferCtxt<'_, 'gcx, 'tcx>,
         param_env: ty::ParamEnv<'tcx>,
         projection_ty: ty::ProjectionTy<'tcx>,
         cause: ObligationCause<'tcx>,
     ) -> Ty<'tcx>;
 
-    fn register_bound<'a, 'gcx>(
+    fn register_bound(
         &mut self,
-        infcx: &InferCtxt<'a, 'gcx, 'tcx>,
+        infcx: &InferCtxt<'_, 'gcx, 'tcx>,
         param_env: ty::ParamEnv<'tcx>,
         ty: Ty<'tcx>,
         def_id: DefId,
         cause: ObligationCause<'tcx>,
     );
 
-    fn register_predicate_obligation<'a, 'gcx>(
+    fn register_predicate_obligation(
         &mut self,
-        infcx: &InferCtxt<'a, 'gcx, 'tcx>,
+        infcx: &InferCtxt<'_, 'gcx, 'tcx>,
         obligation: PredicateObligation<'tcx>,
     );
 
-    fn select_all_or_error<'a, 'gcx>(
+    fn select_all_or_error(
         &mut self,
-        infcx: &InferCtxt<'a, 'gcx, 'tcx>,
+        infcx: &InferCtxt<'_, 'gcx, 'tcx>,
     ) -> Result<(), Vec<FulfillmentError<'tcx>>>;
 
-    fn select_where_possible<'a, 'gcx>(
+    fn select_where_possible(
         &mut self,
-        infcx: &InferCtxt<'a, 'gcx, 'tcx>,
+        infcx: &InferCtxt<'_, 'gcx, 'tcx>,
     ) -> Result<(), Vec<FulfillmentError<'tcx>>>;
 
     fn pending_obligations(&self) -> Vec<PredicateObligation<'tcx>>;
 }
 
-impl<'a, 'gcx, 'tcx> dyn TraitEngine<'tcx> {
-    pub fn new(_tcx: TyCtxt<'_, '_, 'tcx>) -> Box<Self> {
-        Box::new(FulfillmentContext::new())
-    }
+pub trait TraitEngineExt<'tcx> {
+    fn register_predicate_obligations(
+        &mut self,
+        infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+        obligations: impl IntoIterator<Item = PredicateObligation<'tcx>>,
+    );
+}
 
-    pub fn register_predicate_obligations<I>(
+impl<T: ?Sized + TraitEngine<'tcx>> TraitEngineExt<'tcx> for T {
+    fn register_predicate_obligations(
         &mut self,
-        infcx: &InferCtxt<'a, 'gcx, 'tcx>,
-        obligations: I,
-    ) where
-        I: IntoIterator<Item = PredicateObligation<'tcx>>,
-    {
+        infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+        obligations: impl IntoIterator<Item = PredicateObligation<'tcx>>,
+    ) {
         for obligation in obligations {
             self.register_predicate_obligation(infcx, obligation);
         }
     }
 }
+
+impl dyn TraitEngine<'tcx> {
+    pub fn new(_tcx: TyCtxt<'_, '_, 'tcx>) -> Box<Self> {
+        Box::new(FulfillmentContext::new())
+    }
+}
index 0d7d39ccf40dfcff2e069e7537c8d5472f5becba..e5559b7012dc978145a233602ed09f59f6dbd79b 100644 (file)
@@ -963,7 +963,7 @@ pub fn get_fn_like_arguments(&self, node: hir::map::Node) -> (Span, Vec<ArgKind>
                 ..
             }) => {
                 (self.tcx.sess.codemap().def_span(span), decl.inputs.iter()
-                        .map(|arg| match arg.clone().into_inner().node {
+                        .map(|arg| match arg.clone().node {
                     hir::TyTup(ref tys) => ArgKind::Tuple(
                         Some(arg.span),
                         tys.iter()
index 04396d73df6a2b8a79a7a792b91423f8155703e7..e97171c481f1dd25c04fea9efdb7280be14eb2f1 100644 (file)
 use rustc_data_structures::obligation_forest::{ObligationProcessor, ProcessResult};
 use std::marker::PhantomData;
 use hir::def_id::DefId;
-use middle::const_val::{ConstEvalErr, ErrKind};
+use mir::interpret::ConstEvalErr;
+use mir::interpret::EvalErrorKind;
 
 use super::CodeAmbiguity;
 use super::CodeProjectionError;
 use super::CodeSelectionError;
-use super::engine::TraitEngine;
+use super::engine::{TraitEngine, TraitEngineExt};
 use super::{FulfillmentError, FulfillmentErrorCode};
 use super::{ObligationCause, PredicateObligation, Obligation};
 use super::project;
@@ -86,16 +87,6 @@ pub fn new_ignoring_regions() -> FulfillmentContext<'tcx> {
         }
     }
 
-    pub fn register_predicate_obligations<I>(&mut self,
-                                             infcx: &InferCtxt<'a, 'gcx, 'tcx>,
-                                             obligations: I)
-        where I: IntoIterator<Item = PredicateObligation<'tcx>>
-    {
-        for obligation in obligations {
-            self.register_predicate_obligation(infcx, obligation);
-        }
-    }
-
     /// Attempts to select obligations using `selcx`. If `only_new_obligations` is true, then it
     /// only attempts to select obligations that haven't been seen before.
     fn select(&mut self, selcx: &mut SelectionContext<'a, 'gcx, 'tcx>)
@@ -511,8 +502,9 @@ fn process_obligation(&mut self,
                                     ProcessResult::Error(
                                         CodeSelectionError(ConstEvalFailure(ConstEvalErr {
                                             span: obligation.cause.span,
-                                            kind: ErrKind::CouldNotResolve.into(),
-                                        }))
+                                            error: EvalErrorKind::TooGeneric.into(),
+                                            stacktrace: vec![],
+                                        }.into()))
                                     )
                                 }
                             },
index 15f0b8eebc1db92b0d9a6a8271369f4cf20f0cd6..0290f2e3b13f08e335a1319ca22b64108e9d9517 100644 (file)
 use hir::def_id::DefId;
 use infer::outlives::env::OutlivesEnvironment;
 use middle::region;
-use middle::const_val::ConstEvalErr;
+use mir::interpret::ConstEvalErr;
 use ty::subst::Substs;
 use ty::{self, AdtKind, Slice, Ty, TyCtxt, GenericParamDefKind, ToPredicate};
 use ty::error::{ExpectedFound, TypeError};
 use ty::fold::{TypeFolder, TypeFoldable, TypeVisitor};
-use infer::canonical::{Canonical, Canonicalize};
 use infer::{InferCtxt};
 
 use rustc_data_structures::sync::Lrc;
@@ -48,7 +47,7 @@
 pub use self::select::{EvaluationResult, IntercrateAmbiguityCause, OverflowError};
 pub use self::specialize::{OverlapError, specialization_graph, translate_substs};
 pub use self::specialize::{SpecializesCache, find_associated_item};
-pub use self::engine::TraitEngine;
+pub use self::engine::{TraitEngine, TraitEngineExt};
 pub use self::util::elaborate_predicates;
 pub use self::util::supertraits;
 pub use self::util::Supertraits;
@@ -382,7 +381,7 @@ pub enum SelectionError<'tcx> {
                                 ty::PolyTraitRef<'tcx>,
                                 ty::error::TypeError<'tcx>),
     TraitNotObjectSafe(DefId),
-    ConstEvalFailure(ConstEvalErr<'tcx>),
+    ConstEvalFailure(Lrc<ConstEvalErr<'tcx>>),
     Overflow,
 }
 
@@ -1015,18 +1014,6 @@ pub fn provide(providers: &mut ty::query::Providers) {
     };
 }
 
-impl<'gcx: 'tcx, 'tcx> Canonicalize<'gcx, 'tcx> for ty::ParamEnvAnd<'tcx, Goal<'tcx>> {
-    // we ought to intern this, but I'm too lazy just now
-    type Canonicalized = Canonical<'gcx, ty::ParamEnvAnd<'gcx, Goal<'gcx>>>;
-
-    fn intern(
-        _gcx: TyCtxt<'_, 'gcx, 'gcx>,
-        value: Canonical<'gcx, Self::Lifted>,
-    ) -> Self::Canonicalized {
-        value
-    }
-}
-
 pub trait ExClauseFold<'tcx>
 where
     Self: chalk_engine::context::Context + Clone,
@@ -1053,20 +1040,3 @@ fn lift_ex_clause_to_tcx<'a, 'gcx>(
         tcx: TyCtxt<'a, 'gcx, 'tcx>,
     ) -> Option<Self::LiftedExClause>;
 }
-
-impl<'gcx: 'tcx, 'tcx, C> Canonicalize<'gcx, 'tcx> for chalk_engine::ExClause<C>
-where
-    C: chalk_engine::context::Context + Clone,
-    C: ExClauseLift<'gcx> + ExClauseFold<'tcx>,
-    C::Substitution: Clone,
-    C::RegionConstraint: Clone,
-{
-    type Canonicalized = Canonical<'gcx, C::LiftedExClause>;
-
-    fn intern(
-        _gcx: TyCtxt<'_, 'gcx, 'gcx>,
-        value: Canonical<'gcx, Self::Lifted>,
-    ) -> Self::Canonicalized {
-        value
-    }
-}
index 85bd5853d18a74c8b6420ccbc1c0194d635fa9c1..fe2965146cb7f80be1762f2657dedfa2aae4f4a1 100644 (file)
@@ -130,7 +130,7 @@ fn object_safety_violations_for_trait(self, trait_def_id: DefId)
             .filter(|item| item.kind == ty::AssociatedKind::Method)
             .filter_map(|item| {
                 self.object_safety_violation_for_method(trait_def_id, &item)
-                    .map(|code| ObjectSafetyViolation::Method(item.name, code))
+                    .map(|code| ObjectSafetyViolation::Method(item.ident.name, code))
             }).filter(|violation| {
                 if let ObjectSafetyViolation::Method(_,
                                 MethodViolationCode::WhereClauseReferencesSelf(span)) = violation {
@@ -159,7 +159,7 @@ fn object_safety_violations_for_trait(self, trait_def_id: DefId)
 
         violations.extend(self.associated_items(trait_def_id)
             .filter(|item| item.kind == ty::AssociatedKind::Const)
-            .map(|item| ObjectSafetyViolation::AssociatedConst(item.name)));
+            .map(|item| ObjectSafetyViolation::AssociatedConst(item.ident.name)));
 
         debug!("object_safety_violations_for_trait(trait_def_id={:?}) = {:?}",
                trait_def_id,
index 82f351782bbcdd71a91f981399fd905bfecf6e91..1052d029e0d648e5a0f11f2ffbf51c7bebf01f79 100644 (file)
 use hir::def_id::DefId;
 use infer::{InferCtxt, InferOk};
 use infer::type_variable::TypeVariableOrigin;
-use middle::const_val::ConstVal;
+use mir::interpret::ConstValue;
 use mir::interpret::{GlobalId};
 use rustc_data_structures::snapshot_map::{Snapshot, SnapshotMap};
-use syntax::symbol::Symbol;
+use syntax::ast::Ident;
 use ty::subst::{Subst, Substs};
 use ty::{self, ToPredicate, ToPolyTraitRef, Ty, TyCtxt};
 use ty::fold::{TypeFoldable, TypeFolder};
@@ -426,7 +426,7 @@ fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> {
     }
 
     fn fold_const(&mut self, constant: &'tcx ty::Const<'tcx>) -> &'tcx ty::Const<'tcx> {
-        if let ConstVal::Unevaluated(def_id, substs) = constant.val {
+        if let ConstValue::Unevaluated(def_id, substs) = constant.val {
             let tcx = self.selcx.tcx().global_tcx();
             if let Some(param_env) = self.tcx().lift_to_global(&self.param_env) {
                 if substs.needs_infer() || substs.has_skol() {
@@ -1349,10 +1349,10 @@ fn confirm_generator_candidate<'cx, 'gcx, 'tcx>(
                                             obligation.predicate.self_ty(),
                                             gen_sig)
         .map_bound(|(trait_ref, yield_ty, return_ty)| {
-            let name = tcx.associated_item(obligation.predicate.item_def_id).name;
-            let ty = if name == Symbol::intern("Return") {
+            let name = tcx.associated_item(obligation.predicate.item_def_id).ident.name;
+            let ty = if name == "Return" {
                 return_ty
-            } else if name == Symbol::intern("Yield") {
+            } else if name == "Yield" {
                 yield_ty
             } else {
                 bug!()
@@ -1452,7 +1452,7 @@ fn confirm_callable_candidate<'cx, 'gcx, 'tcx>(
                 projection_ty: ty::ProjectionTy::from_ref_and_name(
                     tcx,
                     trait_ref,
-                    Symbol::intern(FN_OUTPUT_NAME),
+                    Ident::from_str(FN_OUTPUT_NAME),
                 ),
                 ty: ret_type
             }
@@ -1508,7 +1508,7 @@ fn confirm_impl_candidate<'cx, 'gcx, 'tcx>(
         // checker method `check_impl_items_against_trait`, so here we
         // just return TyError.
         debug!("confirm_impl_candidate: no associated type {:?} for {:?}",
-               assoc_ty.item.name,
+               assoc_ty.item.ident,
                obligation.predicate);
         tcx.types.err
     } else {
@@ -1533,7 +1533,7 @@ fn assoc_ty_def<'cx, 'gcx, 'tcx>(
     -> specialization_graph::NodeItem<ty::AssociatedItem>
 {
     let tcx = selcx.tcx();
-    let assoc_ty_name = tcx.associated_item(assoc_ty_def_id).name;
+    let assoc_ty_name = tcx.associated_item(assoc_ty_def_id).ident;
     let trait_def_id = tcx.impl_trait_ref(impl_def_id).unwrap().def_id;
     let trait_def = tcx.trait_def(trait_def_id);
 
@@ -1546,7 +1546,7 @@ fn assoc_ty_def<'cx, 'gcx, 'tcx>(
     let impl_node = specialization_graph::Node::Impl(impl_def_id);
     for item in impl_node.items(tcx) {
         if item.kind == ty::AssociatedKind::Type &&
-                tcx.hygienic_eq(item.name, assoc_ty_name, trait_def_id) {
+                tcx.hygienic_eq(item.ident, assoc_ty_name, trait_def_id) {
             return specialization_graph::NodeItem {
                 node: specialization_graph::Node::Impl(impl_def_id),
                 item,
index af1d2c77c28a867bf40460843ebb964cffef6772..2aaa32aa032025f0d3fa0601b065c8b21014abbb 100644 (file)
@@ -9,13 +9,11 @@
 // except according to those terms.
 
 use infer::at::At;
-use infer::canonical::{Canonical, Canonicalize, QueryResult};
 use infer::InferOk;
 use std::iter::FromIterator;
-use traits::query::CanonicalTyGoal;
-use ty::{self, Ty, TyCtxt};
+use syntax::codemap::Span;
 use ty::subst::Kind;
-use rustc_data_structures::sync::Lrc;
+use ty::{self, Ty, TyCtxt};
 
 impl<'cx, 'gcx, 'tcx> At<'cx, 'gcx, 'tcx> {
     /// Given a type `ty` of some value being dropped, computes a set
@@ -45,7 +43,10 @@ pub fn dropck_outlives(&self, ty: Ty<'tcx>) -> InferOk<'tcx, Vec<Kind<'tcx>>> {
         // any destructor.
         let tcx = self.infcx.tcx;
         if trivial_dropck_outlives(tcx, ty) {
-            return InferOk { value: vec![], obligations: vec![] };
+            return InferOk {
+                value: vec![],
+                obligations: vec![],
+            };
         }
 
         let gcx = tcx.global_tcx();
@@ -54,28 +55,15 @@ pub fn dropck_outlives(&self, ty: Ty<'tcx>) -> InferOk<'tcx, Vec<Kind<'tcx>>> {
         debug!("c_ty = {:?}", c_ty);
         match &gcx.dropck_outlives(c_ty) {
             Ok(result) if result.is_proven() => {
-                match self.infcx.instantiate_query_result(
+                match self.infcx.instantiate_query_result_and_region_obligations(
                     self.cause,
                     self.param_env,
                     &orig_values,
                     result,
                 ) {
-                    Ok(InferOk {
-                        value: DropckOutlivesResult { kinds, overflows },
-                        obligations,
-                    }) => {
-                        for overflow_ty in overflows.into_iter().take(1) {
-                            let mut err = struct_span_err!(
-                                tcx.sess,
-                                span,
-                                E0320,
-                                "overflow while adding drop-check rules for {}",
-                                self.infcx.resolve_type_vars_if_possible(&ty),
-                            );
-                            err.note(&format!("overflowed on {}", overflow_ty));
-                            err.emit();
-                        }
-
+                    Ok(InferOk { value, obligations }) => {
+                        let ty = self.infcx.resolve_type_vars_if_possible(&ty);
+                        let kinds = value.into_kinds_reporting_overflows(tcx, span, ty);
                         return InferOk {
                             value: kinds,
                             obligations,
@@ -102,12 +90,44 @@ pub fn dropck_outlives(&self, ty: Ty<'tcx>) -> InferOk<'tcx, Vec<Kind<'tcx>>> {
     }
 }
 
-#[derive(Clone, Debug)]
+#[derive(Clone, Debug, Default)]
 pub struct DropckOutlivesResult<'tcx> {
     pub kinds: Vec<Kind<'tcx>>,
     pub overflows: Vec<Ty<'tcx>>,
 }
 
+impl<'tcx> DropckOutlivesResult<'tcx> {
+    pub fn report_overflows(
+        &self,
+        tcx: TyCtxt<'_, '_, 'tcx>,
+        span: Span,
+        ty: Ty<'tcx>,
+    ) {
+        for overflow_ty in self.overflows.iter().take(1) {
+            let mut err = struct_span_err!(
+                tcx.sess,
+                span,
+                E0320,
+                "overflow while adding drop-check rules for {}",
+                ty,
+            );
+            err.note(&format!("overflowed on {}", overflow_ty));
+            err.emit();
+        }
+    }
+
+    pub fn into_kinds_reporting_overflows(
+        self,
+        tcx: TyCtxt<'_, '_, 'tcx>,
+        span: Span,
+        ty: Ty<'tcx>,
+    ) -> Vec<Kind<'tcx>> {
+        self.report_overflows(tcx, span, ty);
+        let DropckOutlivesResult { kinds, overflows: _ } = self;
+        kinds
+    }
+}
+
 /// A set of constraints that need to be satisfied in order for
 /// a type to be valid for destruction.
 #[derive(Clone, Debug)]
@@ -153,17 +173,6 @@ fn from_iter<I: IntoIterator<Item = DtorckConstraint<'tcx>>>(iter: I) -> Self {
         result
     }
 }
-impl<'gcx: 'tcx, 'tcx> Canonicalize<'gcx, 'tcx> for ty::ParamEnvAnd<'tcx, Ty<'tcx>> {
-    type Canonicalized = CanonicalTyGoal<'gcx>;
-
-    fn intern(
-        _gcx: TyCtxt<'_, 'gcx, 'gcx>,
-        value: Canonical<'gcx, Self::Lifted>,
-    ) -> Self::Canonicalized {
-        value
-    }
-}
-
 BraceStructTypeFoldableImpl! {
     impl<'tcx> TypeFoldable<'tcx> for DropckOutlivesResult<'tcx> {
         kinds, overflows
@@ -181,18 +190,6 @@ impl<'a, 'tcx> Lift<'tcx> for DropckOutlivesResult<'a> {
     kinds, overflows
 });
 
-impl<'gcx: 'tcx, 'tcx> Canonicalize<'gcx, 'tcx> for QueryResult<'tcx, DropckOutlivesResult<'tcx>> {
-    // we ought to intern this, but I'm too lazy just now
-    type Canonicalized = Lrc<Canonical<'gcx, QueryResult<'gcx, DropckOutlivesResult<'gcx>>>>;
-
-    fn intern(
-        _gcx: TyCtxt<'_, 'gcx, 'gcx>,
-        value: Canonical<'gcx, Self::Lifted>,
-    ) -> Self::Canonicalized {
-        Lrc::new(value)
-    }
-}
-
 impl_stable_hash_for!(struct DtorckConstraint<'tcx> {
     outlives,
     dtorck_types,
@@ -210,7 +207,7 @@ fn intern(
 ///
 /// Note also that `needs_drop` requires a "global" type (i.e., one
 /// with erased regions), but this funtcion does not.
-fn trivial_dropck_outlives<'cx, 'tcx>(tcx: TyCtxt<'cx, '_, 'tcx>, ty: Ty<'tcx>) -> bool {
+pub fn trivial_dropck_outlives<'tcx>(tcx: TyCtxt<'_, '_, 'tcx>, ty: Ty<'tcx>) -> bool {
     match ty.sty {
         // None of these types have a destructor and hence they do not
         // require anything in particular to outlive the dtor's
index 4e028cac49abe18474503d907447d865a88038f5..c81d1123d42af045f435f5fb9f5ca3da430e070c 100644 (file)
@@ -9,11 +9,8 @@
 // except according to those terms.
 
 use infer::InferCtxt;
-use infer::canonical::{Canonical, Canonicalize};
 use traits::{EvaluationResult, PredicateObligation, SelectionContext,
              TraitQueryMode, OverflowError};
-use traits::query::CanonicalPredicateGoal;
-use ty::{ParamEnvAnd, Predicate, TyCtxt};
 
 impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> {
     /// Evaluates whether the predicate can be satisfied (by any means)
@@ -57,14 +54,3 @@ fn evaluate_obligation(
         }
     }
 }
-
-impl<'gcx: 'tcx, 'tcx> Canonicalize<'gcx, 'tcx> for ParamEnvAnd<'tcx, Predicate<'tcx>> {
-    type Canonicalized = CanonicalPredicateGoal<'gcx>;
-
-    fn intern(
-        _gcx: TyCtxt<'_, 'gcx, 'gcx>,
-        value: Canonical<'gcx, Self::Lifted>,
-    ) -> Self::Canonicalized {
-        value
-    }
-}
index 096633ddab2f7893cb6961a27b4e357c0ae7bad3..54b67edb1360b91d1d1fe3dab4433b6f4ef5c667 100644 (file)
 //! `librustc_traits`.
 
 use infer::canonical::Canonical;
+use ty::error::TypeError;
 use ty::{self, Ty};
 
 pub mod dropck_outlives;
 pub mod evaluate_obligation;
 pub mod normalize;
 pub mod normalize_erasing_regions;
+pub mod type_op;
 
 pub type CanonicalProjectionGoal<'tcx> =
     Canonical<'tcx, ty::ParamEnvAnd<'tcx, ty::ProjectionTy<'tcx>>>;
 pub type CanonicalPredicateGoal<'tcx> =
     Canonical<'tcx, ty::ParamEnvAnd<'tcx, ty::Predicate<'tcx>>>;
 
+pub type CanonicalTypeOpEqGoal<'tcx> =
+    Canonical<'tcx, ty::ParamEnvAnd<'tcx, type_op::eq::Eq<'tcx>>>;
+
+pub type CanonicalTypeOpSubtypeGoal<'tcx> =
+    Canonical<'tcx, ty::ParamEnvAnd<'tcx, type_op::subtype::Subtype<'tcx>>>;
+
+pub type CanonicalTypeOpProvePredicateGoal<'tcx> =
+    Canonical<'tcx, ty::ParamEnvAnd<'tcx, type_op::prove_predicate::ProvePredicate<'tcx>>>;
+
+pub type CanonicalTypeOpNormalizeGoal<'tcx, T> =
+    Canonical<'tcx, ty::ParamEnvAnd<'tcx, type_op::normalize::Normalize<T>>>;
+
 #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
 pub struct NoSolution;
 
 pub type Fallible<T> = Result<T, NoSolution>;
 
+impl<'tcx> From<TypeError<'tcx>> for NoSolution {
+    fn from(_: TypeError<'tcx>) -> NoSolution {
+        NoSolution
+    }
+}
+
 impl_stable_hash_for!(struct NoSolution { });
index d0ae0bdac8c095327f5d6b621f4115bdeb1e528e..a67383fb79aa06e2267e39ed5958bf4eb51487f8 100644 (file)
 
 use infer::{InferCtxt, InferOk};
 use infer::at::At;
-use infer::canonical::{Canonical, Canonicalize, QueryResult};
-use middle::const_val::ConstVal;
-use mir::interpret::GlobalId;
-use rustc_data_structures::sync::Lrc;
+use mir::interpret::{GlobalId, ConstValue};
 use traits::{Obligation, ObligationCause, PredicateObligation, Reveal};
-use traits::query::CanonicalProjectionGoal;
 use traits::project::Normalized;
 use ty::{self, Ty, TyCtxt};
 use ty::fold::{TypeFoldable, TypeFolder};
@@ -124,10 +120,10 @@ fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> {
                         let concrete_ty = generic_ty.subst(self.tcx(), substs);
                         self.anon_depth += 1;
                         if concrete_ty == ty {
-                            println!("generic_ty: {:#?}", generic_ty);
-                            println!("substs {:#?}", substs);
+                            bug!("infinite recursion generic_ty: {:#?}, substs: {:#?}, \
+                                  concrete_ty: {:#?}, ty: {:#?}", generic_ty, substs, concrete_ty,
+                                  ty);
                         }
-                        assert_ne!(concrete_ty, ty, "infinite recursion");
                         let folded_ty = self.fold_ty(concrete_ty);
                         self.anon_depth -= 1;
                         folded_ty
@@ -163,7 +159,7 @@ fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> {
                             return ty;
                         }
 
-                        match self.infcx.instantiate_query_result(
+                        match self.infcx.instantiate_query_result_and_region_obligations(
                             self.cause,
                             self.param_env,
                             &orig_values,
@@ -198,7 +194,7 @@ fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> {
     }
 
     fn fold_const(&mut self, constant: &'tcx ty::Const<'tcx>) -> &'tcx ty::Const<'tcx> {
-        if let ConstVal::Unevaluated(def_id, substs) = constant.val {
+        if let ConstValue::Unevaluated(def_id, substs) = constant.val {
             let tcx = self.infcx.tcx.global_tcx();
             if let Some(param_env) = self.tcx().lift_to_global(&self.param_env) {
                 if substs.needs_infer() || substs.has_skol() {
@@ -251,29 +247,6 @@ impl<'a, 'tcx> Lift<'tcx> for NormalizationResult<'a> {
     }
 }
 
-impl<'gcx: 'tcx, 'tcx> Canonicalize<'gcx, 'tcx> for ty::ParamEnvAnd<'tcx, ty::ProjectionTy<'tcx>> {
-    type Canonicalized = CanonicalProjectionGoal<'gcx>;
-
-    fn intern(
-        _gcx: TyCtxt<'_, 'gcx, 'gcx>,
-        value: Canonical<'gcx, Self::Lifted>,
-    ) -> Self::Canonicalized {
-        value
-    }
-}
-
-impl<'gcx: 'tcx, 'tcx> Canonicalize<'gcx, 'tcx> for QueryResult<'tcx, NormalizationResult<'tcx>> {
-    // we ought to intern this, but I'm too lazy just now
-    type Canonicalized = Lrc<Canonical<'gcx, QueryResult<'gcx, NormalizationResult<'gcx>>>>;
-
-    fn intern(
-        _gcx: TyCtxt<'_, 'gcx, 'gcx>,
-        value: Canonical<'gcx, Self::Lifted>,
-    ) -> Self::Canonicalized {
-        Lrc::new(value)
-    }
-}
-
 impl_stable_hash_for!(struct NormalizationResult<'tcx> {
     normalized_ty
 });
diff --git a/src/librustc/traits/query/type_op/custom.rs b/src/librustc/traits/query/type_op/custom.rs
new file mode 100644 (file)
index 0000000..3d10ce8
--- /dev/null
@@ -0,0 +1,100 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use infer::{InferCtxt, InferOk};
+use std::fmt;
+use traits::query::Fallible;
+
+use infer::canonical::query_result;
+use infer::canonical::QueryRegionConstraint;
+use std::rc::Rc;
+use syntax::codemap::DUMMY_SP;
+use traits::{ObligationCause, TraitEngine, TraitEngineExt};
+
+pub struct CustomTypeOp<F, G> {
+    closure: F,
+    description: G,
+}
+
+impl<F, G> CustomTypeOp<F, G> {
+    pub fn new<'gcx, 'tcx, R>(closure: F, description: G) -> Self
+    where
+        F: FnOnce(&InferCtxt<'_, 'gcx, 'tcx>) -> Fallible<InferOk<'tcx, R>>,
+        G: Fn() -> String,
+    {
+        CustomTypeOp {
+            closure,
+            description,
+        }
+    }
+}
+
+impl<'gcx, 'tcx, F, R, G> super::TypeOp<'gcx, 'tcx> for CustomTypeOp<F, G>
+where
+    F: for<'a, 'cx> FnOnce(&'a InferCtxt<'cx, 'gcx, 'tcx>) -> Fallible<InferOk<'tcx, R>>,
+    G: Fn() -> String,
+{
+    type Output = R;
+
+    /// Processes the operation and all resulting obligations,
+    /// returning the final result along with any region constraints
+    /// (they will be given over to the NLL region solver).
+    fn fully_perform(
+        self,
+        infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+    ) -> Fallible<(Self::Output, Option<Rc<Vec<QueryRegionConstraint<'tcx>>>>)> {
+        if cfg!(debug_assertions) {
+            info!("fully_perform({:?})", self);
+        }
+
+        scrape_region_constraints(infcx, || Ok((self.closure)(infcx)?))
+    }
+}
+
+impl<F, G> fmt::Debug for CustomTypeOp<F, G>
+where
+    G: Fn() -> String,
+{
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        write!(f, "{}", (self.description)())
+    }
+}
+
+/// Executes `op` and then scrapes out all the "old style" region
+/// constraints that result, creating query-region-constraints.
+fn scrape_region_constraints<'gcx, 'tcx, R>(
+    infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+    op: impl FnOnce() -> Fallible<InferOk<'tcx, R>>,
+) -> Fallible<(R, Option<Rc<Vec<QueryRegionConstraint<'tcx>>>>)> {
+    let mut fulfill_cx = TraitEngine::new(infcx.tcx);
+    let dummy_body_id = ObligationCause::dummy().body_id;
+    let InferOk { value, obligations } = infcx.commit_if_ok(|_| op())?;
+    debug_assert!(obligations.iter().all(|o| o.cause.body_id == dummy_body_id));
+    fulfill_cx.register_predicate_obligations(infcx, obligations);
+    if let Err(e) = fulfill_cx.select_all_or_error(infcx) {
+        infcx.tcx.sess.diagnostic().delay_span_bug(
+            DUMMY_SP,
+            &format!("errors selecting obligation during MIR typeck: {:?}", e),
+        );
+    }
+
+    let region_obligations = infcx.take_registered_region_obligations();
+
+    let region_constraint_data = infcx.take_and_reset_region_constraints();
+
+    let outlives =
+        query_result::make_query_outlives(infcx.tcx, region_obligations, &region_constraint_data);
+
+    if outlives.is_empty() {
+        Ok((value, None))
+    } else {
+        Ok((value, Some(Rc::new(outlives))))
+    }
+}
diff --git a/src/librustc/traits/query/type_op/eq.rs b/src/librustc/traits/query/type_op/eq.rs
new file mode 100644 (file)
index 0000000..52a087c
--- /dev/null
@@ -0,0 +1,72 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResult, QueryResult};
+use traits::query::Fallible;
+use ty::{ParamEnvAnd, Ty, TyCtxt};
+
+#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
+pub struct Eq<'tcx> {
+    pub a: Ty<'tcx>,
+    pub b: Ty<'tcx>,
+}
+
+impl<'tcx> Eq<'tcx> {
+    pub fn new(a: Ty<'tcx>, b: Ty<'tcx>) -> Self {
+        Self { a, b }
+    }
+}
+
+impl<'gcx: 'tcx, 'tcx> super::QueryTypeOp<'gcx, 'tcx> for Eq<'tcx> {
+    type QueryResult = ();
+
+    fn try_fast_path(
+        _tcx: TyCtxt<'_, 'gcx, 'tcx>,
+        key: &ParamEnvAnd<'tcx, Eq<'tcx>>,
+    ) -> Option<Self::QueryResult> {
+        if key.value.a == key.value.b {
+            Some(())
+        } else {
+            None
+        }
+    }
+
+    fn perform_query(
+        tcx: TyCtxt<'_, 'gcx, 'tcx>,
+        canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Self>>,
+    ) -> Fallible<CanonicalizedQueryResult<'gcx, ()>> {
+        tcx.type_op_eq(canonicalized)
+    }
+
+    fn shrink_to_tcx_lifetime(
+        v: &'a CanonicalizedQueryResult<'gcx, ()>,
+    ) -> &'a Canonical<'tcx, QueryResult<'tcx, ()>> {
+        v
+    }
+}
+
+BraceStructTypeFoldableImpl! {
+    impl<'tcx> TypeFoldable<'tcx> for Eq<'tcx> {
+        a,
+        b,
+    }
+}
+
+BraceStructLiftImpl! {
+    impl<'a, 'tcx> Lift<'tcx> for Eq<'a> {
+        type Lifted = Eq<'tcx>;
+        a,
+        b,
+    }
+}
+
+impl_stable_hash_for! {
+    struct Eq<'tcx> { a, b }
+}
diff --git a/src/librustc/traits/query/type_op/mod.rs b/src/librustc/traits/query/type_op/mod.rs
new file mode 100644 (file)
index 0000000..3dfa66c
--- /dev/null
@@ -0,0 +1,163 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResult, QueryRegionConstraint,
+                       QueryResult};
+use infer::{InferCtxt, InferOk};
+use std::fmt;
+use std::rc::Rc;
+use traits::query::Fallible;
+use traits::ObligationCause;
+use ty::fold::TypeFoldable;
+use ty::{Lift, ParamEnvAnd, TyCtxt};
+
+pub mod custom;
+pub mod eq;
+pub mod normalize;
+pub mod outlives;
+pub mod prove_predicate;
+use self::prove_predicate::ProvePredicate;
+pub mod subtype;
+
+/// "Type ops" are used in NLL to perform some particular action and
+/// extract out the resulting region constraints (or an error if it
+/// cannot be completed).
+pub trait TypeOp<'gcx, 'tcx>: Sized + fmt::Debug {
+    type Output;
+
+    /// Processes the operation and all resulting obligations,
+    /// returning the final result along with any region constraints
+    /// (they will be given over to the NLL region solver).
+    fn fully_perform(
+        self,
+        infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+    ) -> Fallible<(Self::Output, Option<Rc<Vec<QueryRegionConstraint<'tcx>>>>)>;
+}
+
+/// "Query type ops" are type ops that are implemented using a
+/// [canonical query][c]. The `Self` type here contains the kernel of
+/// information needed to do the operation -- `TypeOp` is actually
+/// implemented for `ParamEnvAnd<Self>`, since we always need to bring
+/// along a parameter environment as well. For query type-ops, we will
+/// first canonicalize the key and then invoke the query on the tcx,
+/// which produces the resulting query region constraints.
+///
+/// [c]: https://rust-lang-nursery.github.io/rustc-guide/traits/canonicalization.html
+pub trait QueryTypeOp<'gcx: 'tcx, 'tcx>:
+    fmt::Debug + Sized + TypeFoldable<'tcx> + Lift<'gcx>
+{
+    type QueryResult: TypeFoldable<'tcx> + Lift<'gcx>;
+
+    /// Give query the option for a simple fast path that never
+    /// actually hits the tcx cache lookup etc. Return `Some(r)` with
+    /// a final result or `None` to do the full path.
+    fn try_fast_path(
+        tcx: TyCtxt<'_, 'gcx, 'tcx>,
+        key: &ParamEnvAnd<'tcx, Self>,
+    ) -> Option<Self::QueryResult>;
+
+    /// Performs the actual query with the canonicalized key -- the
+    /// real work happens here. This method is not given an `infcx`
+    /// because it shouldn't need one -- and if it had access to one,
+    /// it might do things like invoke `sub_regions`, which would be
+    /// bad, because it would create subregion relationships that are
+    /// not captured in the return value.
+    fn perform_query(
+        tcx: TyCtxt<'_, 'gcx, 'tcx>,
+        canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Self>>,
+    ) -> Fallible<CanonicalizedQueryResult<'gcx, Self::QueryResult>>;
+
+    /// Casts a lifted query result (which is in the gcx lifetime)
+    /// into the tcx lifetime. This is always just an identity cast,
+    /// but the generic code doesn't realize it -- put another way, in
+    /// the generic code, we have a `Lifted<'gcx, Self::QueryResult>`
+    /// and we want to convert that to a `Self::QueryResult`. This is
+    /// not a priori valid, so we can't do it -- but in practice, it
+    /// is always a no-op (e.g., the lifted form of a type,
+    /// `Ty<'gcx>`, is a subtype of `Ty<'tcx>`). So we have to push
+    /// the operation into the impls that know more specifically what
+    /// `QueryResult` is. This operation would (maybe) be nicer with
+    /// something like HKTs or GATs, since then we could make
+    /// `QueryResult` parametric and `'gcx` and `'tcx` etc.
+    fn shrink_to_tcx_lifetime(
+        lifted_query_result: &'a CanonicalizedQueryResult<'gcx, Self::QueryResult>,
+    ) -> &'a Canonical<'tcx, QueryResult<'tcx, Self::QueryResult>>;
+
+    fn fully_perform_into(
+        query_key: ParamEnvAnd<'tcx, Self>,
+        infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+        output_query_region_constraints: &mut Vec<QueryRegionConstraint<'tcx>>,
+    ) -> Fallible<Self::QueryResult> {
+        if let Some(result) = QueryTypeOp::try_fast_path(infcx.tcx, &query_key) {
+            return Ok(result);
+        }
+
+        // FIXME(#33684) -- We need to use
+        // `canonicalize_hr_query_hack` here because of things
+        // like the subtype query, which go awry around
+        // `'static` otherwise.
+        let (canonical_self, canonical_var_values) = infcx.canonicalize_hr_query_hack(&query_key);
+        let canonical_result = Self::perform_query(infcx.tcx, canonical_self)?;
+        let canonical_result = Self::shrink_to_tcx_lifetime(&canonical_result);
+
+        let param_env = query_key.param_env;
+
+        let InferOk { value, obligations } = infcx
+            .instantiate_nll_query_result_and_region_obligations(
+                &ObligationCause::dummy(),
+                param_env,
+                &canonical_var_values,
+                canonical_result,
+                output_query_region_constraints,
+            )?;
+
+        // Typically, instantiating NLL query results does not
+        // create obligations. However, in some cases there
+        // are unresolved type variables, and unify them *can*
+        // create obligations. In that case, we have to go
+        // fulfill them. We do this via a (recursive) query.
+        for obligation in obligations {
+            let () = ProvePredicate::fully_perform_into(
+                obligation
+                    .param_env
+                    .and(ProvePredicate::new(obligation.predicate)),
+                infcx,
+                output_query_region_constraints,
+            )?;
+        }
+
+        Ok(value)
+    }
+}
+
+impl<'gcx: 'tcx, 'tcx, Q> TypeOp<'gcx, 'tcx> for ParamEnvAnd<'tcx, Q>
+where
+    Q: QueryTypeOp<'gcx, 'tcx>,
+{
+    type Output = Q::QueryResult;
+
+    fn fully_perform(
+        self,
+        infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+    ) -> Fallible<(Self::Output, Option<Rc<Vec<QueryRegionConstraint<'tcx>>>>)> {
+        let mut qrc = vec![];
+        let r = Q::fully_perform_into(self, infcx, &mut qrc)?;
+
+        // Promote the final query-region-constraints into a
+        // (optional) ref-counted vector:
+        let opt_qrc = if qrc.is_empty() {
+            None
+        } else {
+            Some(Rc::new(qrc))
+        };
+
+        Ok((r, opt_qrc))
+    }
+}
diff --git a/src/librustc/traits/query/type_op/normalize.rs b/src/librustc/traits/query/type_op/normalize.rs
new file mode 100644 (file)
index 0000000..0c393fa
--- /dev/null
@@ -0,0 +1,161 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResult, QueryResult};
+use std::fmt;
+use traits::query::Fallible;
+use ty::fold::TypeFoldable;
+use ty::{self, Lift, ParamEnvAnd, Ty, TyCtxt};
+
+#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
+pub struct Normalize<T> {
+    pub value: T,
+}
+
+impl<'tcx, T> Normalize<T>
+where
+    T: fmt::Debug + TypeFoldable<'tcx>,
+{
+    pub fn new(value: T) -> Self {
+        Self { value }
+    }
+}
+
+impl<'gcx: 'tcx, 'tcx, T> super::QueryTypeOp<'gcx, 'tcx> for Normalize<T>
+where
+    T: Normalizable<'gcx, 'tcx>,
+{
+    type QueryResult = T;
+
+    fn try_fast_path(_tcx: TyCtxt<'_, 'gcx, 'tcx>, key: &ParamEnvAnd<'tcx, Self>) -> Option<T> {
+        if !key.value.value.has_projections() {
+            Some(key.value.value)
+        } else {
+            None
+        }
+    }
+
+    fn perform_query(
+        tcx: TyCtxt<'_, 'gcx, 'tcx>,
+        canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Self>>,
+    ) -> Fallible<CanonicalizedQueryResult<'gcx, Self::QueryResult>> {
+        T::type_op_method(tcx, canonicalized)
+    }
+
+    fn shrink_to_tcx_lifetime(
+        v: &'a CanonicalizedQueryResult<'gcx, T>,
+    ) -> &'a Canonical<'tcx, QueryResult<'tcx, T>> {
+        T::shrink_to_tcx_lifetime(v)
+    }
+}
+
+pub trait Normalizable<'gcx, 'tcx>: fmt::Debug + TypeFoldable<'tcx> + Lift<'gcx> + Copy {
+    fn type_op_method(
+        tcx: TyCtxt<'_, 'gcx, 'tcx>,
+        canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Normalize<Self>>>,
+    ) -> Fallible<CanonicalizedQueryResult<'gcx, Self>>;
+
+    /// Convert from the `'gcx` (lifted) form of `Self` into the `tcx`
+    /// form of `Self`.
+    fn shrink_to_tcx_lifetime(
+        v: &'a CanonicalizedQueryResult<'gcx, Self>,
+    ) -> &'a Canonical<'tcx, QueryResult<'tcx, Self>>;
+}
+
+impl Normalizable<'gcx, 'tcx> for Ty<'tcx>
+where
+    'gcx: 'tcx,
+{
+    fn type_op_method(
+        tcx: TyCtxt<'_, 'gcx, 'tcx>,
+        canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Normalize<Self>>>,
+    ) -> Fallible<CanonicalizedQueryResult<'gcx, Self>> {
+        tcx.type_op_normalize_ty(canonicalized)
+    }
+
+    fn shrink_to_tcx_lifetime(
+        v: &'a CanonicalizedQueryResult<'gcx, Self>,
+    ) -> &'a Canonical<'tcx, QueryResult<'tcx, Self>> {
+        v
+    }
+}
+
+impl Normalizable<'gcx, 'tcx> for ty::Predicate<'tcx>
+where
+    'gcx: 'tcx,
+{
+    fn type_op_method(
+        tcx: TyCtxt<'_, 'gcx, 'tcx>,
+        canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Normalize<Self>>>,
+    ) -> Fallible<CanonicalizedQueryResult<'gcx, Self>> {
+        tcx.type_op_normalize_predicate(canonicalized)
+    }
+
+    fn shrink_to_tcx_lifetime(
+        v: &'a CanonicalizedQueryResult<'gcx, Self>,
+    ) -> &'a Canonical<'tcx, QueryResult<'tcx, Self>> {
+        v
+    }
+}
+
+impl Normalizable<'gcx, 'tcx> for ty::PolyFnSig<'tcx>
+where
+    'gcx: 'tcx,
+{
+    fn type_op_method(
+        tcx: TyCtxt<'_, 'gcx, 'tcx>,
+        canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Normalize<Self>>>,
+    ) -> Fallible<CanonicalizedQueryResult<'gcx, Self>> {
+        tcx.type_op_normalize_poly_fn_sig(canonicalized)
+    }
+
+    fn shrink_to_tcx_lifetime(
+        v: &'a CanonicalizedQueryResult<'gcx, Self>,
+    ) -> &'a Canonical<'tcx, QueryResult<'tcx, Self>> {
+        v
+    }
+}
+
+impl Normalizable<'gcx, 'tcx> for ty::FnSig<'tcx>
+where
+    'gcx: 'tcx,
+{
+    fn type_op_method(
+        tcx: TyCtxt<'_, 'gcx, 'tcx>,
+        canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Normalize<Self>>>,
+    ) -> Fallible<CanonicalizedQueryResult<'gcx, Self>> {
+        tcx.type_op_normalize_fn_sig(canonicalized)
+    }
+
+    fn shrink_to_tcx_lifetime(
+        v: &'a CanonicalizedQueryResult<'gcx, Self>,
+    ) -> &'a Canonical<'tcx, QueryResult<'tcx, Self>> {
+        v
+    }
+}
+
+BraceStructTypeFoldableImpl! {
+    impl<'tcx, T> TypeFoldable<'tcx> for Normalize<T> {
+        value,
+    } where T: TypeFoldable<'tcx>,
+}
+
+BraceStructLiftImpl! {
+    impl<'tcx, T> Lift<'tcx> for Normalize<T> {
+        type Lifted = Normalize<T::Lifted>;
+        value,
+    } where T: Lift<'tcx>,
+}
+
+impl_stable_hash_for! {
+    impl<'tcx, T> for struct Normalize<T> {
+        value
+    }
+}
diff --git a/src/librustc/traits/query/type_op/outlives.rs b/src/librustc/traits/query/type_op/outlives.rs
new file mode 100644 (file)
index 0000000..e41ae7a
--- /dev/null
@@ -0,0 +1,100 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResult, QueryResult};
+use traits::query::dropck_outlives::trivial_dropck_outlives;
+use traits::query::dropck_outlives::DropckOutlivesResult;
+use traits::query::Fallible;
+use ty::{ParamEnvAnd, Ty, TyCtxt};
+
+#[derive(Copy, Clone, Debug)]
+pub struct DropckOutlives<'tcx> {
+    dropped_ty: Ty<'tcx>,
+}
+
+impl<'tcx> DropckOutlives<'tcx> {
+    pub fn new(dropped_ty: Ty<'tcx>) -> Self {
+        DropckOutlives { dropped_ty }
+    }
+}
+
+impl super::QueryTypeOp<'gcx, 'tcx> for DropckOutlives<'tcx>
+where
+    'gcx: 'tcx,
+{
+    type QueryResult = DropckOutlivesResult<'tcx>;
+
+    fn try_fast_path(
+        tcx: TyCtxt<'_, 'gcx, 'tcx>,
+        key: &ParamEnvAnd<'tcx, Self>,
+    ) -> Option<Self::QueryResult> {
+        if trivial_dropck_outlives(tcx, key.value.dropped_ty) {
+            Some(DropckOutlivesResult::default())
+        } else {
+            None
+        }
+    }
+
+    fn perform_query(
+        tcx: TyCtxt<'_, 'gcx, 'tcx>,
+        canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Self>>,
+    ) -> Fallible<CanonicalizedQueryResult<'gcx, Self::QueryResult>> {
+        // Subtle: note that we are not invoking
+        // `infcx.at(...).dropck_outlives(...)` here, but rather the
+        // underlying `dropck_outlives` query. This same underlying
+        // query is also used by the
+        // `infcx.at(...).dropck_outlives(...)` fn. Avoiding the
+        // wrapper means we don't need an infcx in this code, which is
+        // good because the interface doesn't give us one (so that we
+        // know we are not registering any subregion relations or
+        // other things).
+
+        // FIXME convert to the type expected by the `dropck_outlives`
+        // query. This should eventually be fixed by changing the
+        // *underlying query*.
+        let Canonical {
+            variables,
+            value:
+                ParamEnvAnd {
+                    param_env,
+                    value: DropckOutlives { dropped_ty },
+                },
+        } = canonicalized;
+        let canonicalized = Canonical {
+            variables,
+            value: param_env.and(dropped_ty),
+        };
+
+        tcx.dropck_outlives(canonicalized)
+    }
+
+    fn shrink_to_tcx_lifetime(
+        lifted_query_result: &'a CanonicalizedQueryResult<'gcx, Self::QueryResult>,
+    ) -> &'a Canonical<'tcx, QueryResult<'tcx, Self::QueryResult>> {
+        lifted_query_result
+    }
+}
+
+BraceStructTypeFoldableImpl! {
+    impl<'tcx> TypeFoldable<'tcx> for DropckOutlives<'tcx> {
+        dropped_ty
+    }
+}
+
+BraceStructLiftImpl! {
+    impl<'a, 'tcx> Lift<'tcx> for DropckOutlives<'a> {
+        type Lifted = DropckOutlives<'tcx>;
+        dropped_ty
+    }
+}
+
+impl_stable_hash_for! {
+    struct DropckOutlives<'tcx> { dropped_ty }
+}
diff --git a/src/librustc/traits/query/type_op/prove_predicate.rs b/src/librustc/traits/query/type_op/prove_predicate.rs
new file mode 100644 (file)
index 0000000..33dc321
--- /dev/null
@@ -0,0 +1,65 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResult, QueryResult};
+use traits::query::Fallible;
+use ty::{ParamEnvAnd, Predicate, TyCtxt};
+
+#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
+pub struct ProvePredicate<'tcx> {
+    pub predicate: Predicate<'tcx>,
+}
+
+impl<'tcx> ProvePredicate<'tcx> {
+    pub fn new(predicate: Predicate<'tcx>) -> Self {
+        ProvePredicate { predicate }
+    }
+}
+
+impl<'gcx: 'tcx, 'tcx> super::QueryTypeOp<'gcx, 'tcx> for ProvePredicate<'tcx> {
+    type QueryResult = ();
+
+    fn try_fast_path(
+        _tcx: TyCtxt<'_, 'gcx, 'tcx>,
+        _key: &ParamEnvAnd<'tcx, Self>,
+    ) -> Option<Self::QueryResult> {
+        None
+    }
+
+    fn perform_query(
+        tcx: TyCtxt<'_, 'gcx, 'tcx>,
+        canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Self>>,
+    ) -> Fallible<CanonicalizedQueryResult<'gcx, ()>> {
+        tcx.type_op_prove_predicate(canonicalized)
+    }
+
+    fn shrink_to_tcx_lifetime(
+        v: &'a CanonicalizedQueryResult<'gcx, ()>,
+    ) -> &'a Canonical<'tcx, QueryResult<'tcx, ()>> {
+        v
+    }
+}
+
+BraceStructTypeFoldableImpl! {
+    impl<'tcx> TypeFoldable<'tcx> for ProvePredicate<'tcx> {
+        predicate,
+    }
+}
+
+BraceStructLiftImpl! {
+    impl<'a, 'tcx> Lift<'tcx> for ProvePredicate<'a> {
+        type Lifted = ProvePredicate<'tcx>;
+        predicate,
+    }
+}
+
+impl_stable_hash_for! {
+    struct ProvePredicate<'tcx> { predicate }
+}
diff --git a/src/librustc/traits/query/type_op/subtype.rs b/src/librustc/traits/query/type_op/subtype.rs
new file mode 100644 (file)
index 0000000..dc41bb1
--- /dev/null
@@ -0,0 +1,72 @@
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResult, QueryResult};
+use traits::query::Fallible;
+use ty::{ParamEnvAnd, Ty, TyCtxt};
+
+#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
+pub struct Subtype<'tcx> {
+    pub sub: Ty<'tcx>,
+    pub sup: Ty<'tcx>,
+}
+
+impl<'tcx> Subtype<'tcx> {
+    pub fn new(sub: Ty<'tcx>, sup: Ty<'tcx>) -> Self {
+        Self {
+            sub,
+            sup,
+        }
+    }
+}
+
+impl<'gcx: 'tcx, 'tcx> super::QueryTypeOp<'gcx, 'tcx> for Subtype<'tcx> {
+    type QueryResult = ();
+
+    fn try_fast_path(_tcx: TyCtxt<'_, 'gcx, 'tcx>, key: &ParamEnvAnd<'tcx, Self>) -> Option<()> {
+        if key.value.sub == key.value.sup {
+            Some(())
+        } else {
+            None
+        }
+    }
+
+    fn perform_query(
+        tcx: TyCtxt<'_, 'gcx, 'tcx>,
+        canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Self>>,
+    ) -> Fallible<CanonicalizedQueryResult<'gcx, ()>> {
+        tcx.type_op_subtype(canonicalized)
+    }
+
+    fn shrink_to_tcx_lifetime(
+        v: &'a CanonicalizedQueryResult<'gcx, ()>,
+    ) -> &'a Canonical<'tcx, QueryResult<'tcx, ()>> {
+        v
+    }
+}
+
+BraceStructTypeFoldableImpl! {
+    impl<'tcx> TypeFoldable<'tcx> for Subtype<'tcx> {
+        sub,
+        sup,
+    }
+}
+
+BraceStructLiftImpl! {
+    impl<'a, 'tcx> Lift<'tcx> for Subtype<'a> {
+        type Lifted = Subtype<'tcx>;
+        sub,
+        sup,
+    }
+}
+
+impl_stable_hash_for! {
+    struct Subtype<'tcx> { sub, sup }
+}
index f6ae5113cb2a5505e3ed72b0e659f9bacfd02605..f151f3b2531a682a65c6ea42a15d6b19de77cdf1 100644 (file)
@@ -129,7 +129,7 @@ pub fn find_associated_item<'a, 'tcx>(
     let trait_def = tcx.trait_def(trait_def_id);
 
     let ancestors = trait_def.ancestors(tcx, impl_data.impl_def_id);
-    match ancestors.defs(tcx, item.name, item.kind, trait_def_id).next() {
+    match ancestors.defs(tcx, item.ident, item.kind, trait_def_id).next() {
         Some(node_item) => {
             let substs = tcx.infer_ctxt().enter(|infcx| {
                 let param_env = ty::ParamEnv::reveal_all();
index e56a8662f3eb4346747c3ae36e083d49422c6a8d..b64e4228be9d21abae26c245aca4e7ff23bd30fc 100644 (file)
@@ -18,7 +18,7 @@
 use ty::{self, TyCtxt, TypeFoldable};
 use ty::fast_reject::{self, SimplifiedType};
 use rustc_data_structures::sync::Lrc;
-use syntax::ast::Name;
+use syntax::ast::Ident;
 use util::captures::Captures;
 use util::nodemap::{DefIdMap, FxHashMap};
 
@@ -372,14 +372,14 @@ impl<'a, 'gcx, 'tcx> Ancestors {
     pub fn defs(
         self,
         tcx: TyCtxt<'a, 'gcx, 'tcx>,
-        trait_item_name: Name,
+        trait_item_name: Ident,
         trait_item_kind: ty::AssociatedKind,
         trait_def_id: DefId,
     ) -> impl Iterator<Item = NodeItem<ty::AssociatedItem>> + Captures<'gcx> + Captures<'tcx> + 'a {
         self.flat_map(move |node| {
             node.items(tcx).filter(move |impl_item| {
                 impl_item.kind == trait_item_kind &&
-                tcx.hygienic_eq(impl_item.name, trait_item_name, trait_def_id)
+                tcx.hygienic_eq(impl_item.ident, trait_item_name, trait_def_id)
             }).map(move |item| NodeItem { node: node, item: item })
         })
     }
index d24c84b2556f860653902fb8aace1aa896b6940c..39e358803cbe84f7a148de58137875b746ba8af8 100644 (file)
@@ -172,7 +172,9 @@ fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lif
                 })
             }
             super::TraitNotObjectSafe(def_id) => Some(super::TraitNotObjectSafe(def_id)),
-            super::ConstEvalFailure(ref err) => tcx.lift(err).map(super::ConstEvalFailure),
+            super::ConstEvalFailure(ref err) => tcx.lift(&**err).map(|err| super::ConstEvalFailure(
+                err.into(),
+            )),
             super::Overflow => bug!(), // FIXME: ape ConstEvalFailure?
         }
     }
index d911f32ed3f1f7bc227d5f08ab4854de38011233..967a3324cfb2a92113d202133085d0b80df2d9bb 100644 (file)
@@ -54,7 +54,7 @@ pub trait TyEncoder: Encoder {
     fn position(&self) -> usize;
 }
 
-impl<'buf> TyEncoder for opaque::Encoder<'buf> {
+impl TyEncoder for opaque::Encoder {
     #[inline]
     fn position(&self) -> usize {
         self.position()
index 6cbf4fad02cb9e6a9fe97f0a2234bca6373537a7..5142a30ae574f52064cf3ef1bd960348c5d3124d 100644 (file)
@@ -64,6 +64,7 @@
 use std::cmp::Ordering;
 use std::collections::hash_map::{self, Entry};
 use std::hash::{Hash, Hasher};
+use std::fmt;
 use std::mem;
 use std::ops::Deref;
 use std::iter;
@@ -1503,8 +1504,8 @@ pub fn enter_local<F, R>(
 /// contain the TypeVariants key or if the address of the interned
 /// pointer differs. The latter case is possible if a primitive type,
 /// e.g. `()` or `u8`, was interned in a different context.
-pub trait Lift<'tcx> {
-    type Lifted: 'tcx;
+pub trait Lift<'tcx>: fmt::Debug {
+    type Lifted: fmt::Debug + 'tcx;
     fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option<Self::Lifted>;
 }
 
index e3fbadc271206e1cbb1a2224441f32c6203a0cf1..3718c436b3a0019fa5d42321449c0b1debabf39c 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-use middle::const_val::ConstVal;
+use mir::interpret::ConstValue;
 use ty::subst::Substs;
 use ty::{self, Ty, TypeFlags, TypeFoldable};
 
@@ -233,12 +233,9 @@ fn add_region(&mut self, r: ty::Region) {
 
     fn add_const(&mut self, constant: &ty::Const) {
         self.add_ty(constant.ty);
-        match constant.val {
-            ConstVal::Value(_) => {}
-            ConstVal::Unevaluated(_, substs) => {
-                self.add_flags(TypeFlags::HAS_PROJECTION);
-                self.add_substs(substs);
-            }
+        if let ConstValue::Unevaluated(_, substs) = constant.val {
+            self.add_flags(TypeFlags::HAS_PROJECTION);
+            self.add_substs(substs);
         }
     }
 
index 307e1b238386c0e912675295010249613e24e73a..f55a512908499a4b7817bea0b576ec1414d8dabc 100644 (file)
@@ -39,7 +39,7 @@
 //! These methods return true to indicate that the visitor has found what it is looking for
 //! and does not need to visit anything else.
 
-use middle::const_val::ConstVal;
+use mir::interpret::ConstValue;
 use hir::def_id::DefId;
 use ty::{self, Binder, Ty, TyCtxt, TypeFlags};
 
@@ -685,7 +685,7 @@ fn visit_region(&mut self, r: ty::Region<'tcx>) -> bool {
     }
 
     fn visit_const(&mut self, c: &'tcx ty::Const<'tcx>) -> bool {
-        if let ConstVal::Unevaluated(..) = c.val {
+        if let ConstValue::Unevaluated(..) = c.val {
             let projection_flags = TypeFlags::HAS_NORMALIZABLE_PROJECTION |
                 TypeFlags::HAS_PROJECTION;
             if projection_flags.intersects(self.flags) {
index ce7098314557939fc59623815997c138ba2b01e7..1f647d811b08931145801d1e2d974ca7a34ff68b 100644 (file)
@@ -21,7 +21,7 @@
 use hir::svh::Svh;
 use ich::Fingerprint;
 use ich::StableHashingContext;
-use infer::canonical::{Canonical, Canonicalize};
+use infer::canonical::Canonical;
 use middle::lang_items::{FnTraitLangItem, FnMutTraitLangItem, FnOnceTraitLangItem};
 use middle::privacy::AccessLevels;
 use middle::resolve_lifetime::ObjectLifetimeDefault;
@@ -177,7 +177,7 @@ pub struct ImplHeader<'tcx> {
 #[derive(Copy, Clone, Debug, PartialEq, Eq)]
 pub struct AssociatedItem {
     pub def_id: DefId,
-    pub name: Name,
+    pub ident: Ident,
     pub kind: AssociatedKind,
     pub vis: Visibility,
     pub defaultness: hir::Defaultness,
@@ -224,9 +224,9 @@ pub fn signature<'a, 'tcx>(&self, tcx: &TyCtxt<'a, 'tcx, 'tcx>) -> String {
                 // regions just fine, showing `fn(&MyType)`.
                 format!("{}", tcx.fn_sig(self.def_id).skip_binder())
             }
-            ty::AssociatedKind::Type => format!("type {};", self.name.to_string()),
+            ty::AssociatedKind::Type => format!("type {};", self.ident),
             ty::AssociatedKind::Const => {
-                format!("const {}: {:?};", self.name.to_string(), tcx.type_of(self.def_id))
+                format!("const {}: {:?};", self.ident, tcx.type_of(self.def_id))
             }
         }
     }
@@ -591,15 +591,6 @@ impl<'tcx> serialize::UseSpecializedDecodable for Ty<'tcx> {}
 
 pub type CanonicalTy<'gcx> = Canonical<'gcx, Ty<'gcx>>;
 
-impl <'gcx: 'tcx, 'tcx> Canonicalize<'gcx, 'tcx> for Ty<'tcx> {
-    type Canonicalized = CanonicalTy<'gcx>;
-
-    fn intern(_gcx: TyCtxt<'_, 'gcx, 'gcx>,
-              value: Canonical<'gcx, Self::Lifted>) -> Self::Canonicalized {
-        value
-    }
-}
-
 extern {
     /// A dummy type used to force Slice to by unsized without requiring fat pointers
     type OpaqueSliceContents;
@@ -2074,7 +2065,7 @@ pub fn eval_explicit_discr(
                     })
                 } else {
                     info!("invalid enum discriminant: {:#?}", val);
-                    ::middle::const_val::struct_error(
+                    ::mir::interpret::struct_error(
                         tcx.at(tcx.def_span(expr_did)),
                         "constant evaluation of enum discriminant resulted in non-integer",
                     ).emit();
@@ -2503,7 +2494,7 @@ fn associated_item_from_trait_item_ref(self,
         };
 
         AssociatedItem {
-            name: trait_item_ref.name,
+            ident: trait_item_ref.ident,
             kind,
             // Visibility of trait items is inherited from their traits.
             vis: Visibility::from_hir(parent_vis, trait_item_ref.id.node_id, self),
@@ -2527,8 +2518,8 @@ fn associated_item_from_impl_item_ref(self,
             hir::AssociatedItemKind::Type => (ty::AssociatedKind::Type, false),
         };
 
-        ty::AssociatedItem {
-            name: impl_item_ref.name,
+        AssociatedItem {
+            ident: impl_item_ref.ident,
             kind,
             // Visibility of trait impl items doesn't matter.
             vis: ty::Visibility::from_hir(&impl_item_ref.vis, impl_item_ref.id.node_id, self),
@@ -2553,10 +2544,10 @@ pub fn find_field_index(self, ident: Ident, variant: &VariantDef) -> Option<usiz
     pub fn associated_items(
         self,
         def_id: DefId,
-    ) -> impl Iterator<Item = ty::AssociatedItem> + 'a {
+    ) -> impl Iterator<Item = AssociatedItem> + 'a {
         let def_ids = self.associated_item_def_ids(def_id);
         Box::new((0..def_ids.len()).map(move |i| self.associated_item(def_ids[i])))
-            as Box<dyn Iterator<Item = ty::AssociatedItem> + 'a>
+            as Box<dyn Iterator<Item = AssociatedItem> + 'a>
     }
 
     /// Returns true if the impls are the same polarity and are implementing
@@ -2726,9 +2717,8 @@ pub fn span_of_impl(self, impl_did: DefId) -> Result<Span, Symbol> {
     // Hygienically compare a use-site name (`use_name`) for a field or an associated item with its
     // supposed definition name (`def_name`). The method also needs `DefId` of the supposed
     // definition's parent/scope to perform comparison.
-    pub fn hygienic_eq(self, use_name: Name, def_name: Name, def_parent_def_id: DefId) -> bool {
-        let (use_ident, def_ident) = (use_name.to_ident(), def_name.to_ident());
-        self.adjust_ident(use_ident, def_parent_def_id, DUMMY_NODE_ID).0 == def_ident
+    pub fn hygienic_eq(self, use_name: Ident, def_name: Ident, def_parent_def_id: DefId) -> bool {
+        self.adjust_ident(use_name, def_parent_def_id, DUMMY_NODE_ID).0 == def_name.modern()
     }
 
     pub fn adjust_ident(self, mut ident: Ident, scope: DefId, block: NodeId) -> (Ident, DefId) {
index cc00e9a00abb32d2d6382c5a71336cd5bfb17274..229caeb95d62105d77a15051c6edacfeac18ef2e 100644 (file)
 use dep_graph::SerializedDepNodeIndex;
 use dep_graph::DepNode;
 use hir::def_id::{CrateNum, DefId, DefIndex};
-use mir::interpret::{GlobalId, ConstValue};
-use traits::query::{CanonicalPredicateGoal, CanonicalProjectionGoal, CanonicalTyGoal};
+use mir::interpret::GlobalId;
+use traits::query::{
+    CanonicalPredicateGoal, CanonicalProjectionGoal, CanonicalTyGoal, CanonicalTypeOpEqGoal,
+    CanonicalTypeOpNormalizeGoal, CanonicalTypeOpProvePredicateGoal, CanonicalTypeOpSubtypeGoal,
+};
 use ty::{self, ParamEnvAnd, Ty, TyCtxt};
 use ty::subst::Substs;
 use ty::query::queries;
@@ -102,6 +105,54 @@ fn describe(_tcx: TyCtxt, goal: CanonicalPredicateGoal<'tcx>) -> String {
     }
 }
 
+impl<'tcx> QueryDescription<'tcx> for queries::type_op_eq<'tcx> {
+    fn describe(_tcx: TyCtxt, goal: CanonicalTypeOpEqGoal<'tcx>) -> String {
+        format!("evaluating `type_op_eq` `{:?}`", goal)
+    }
+}
+
+impl<'tcx> QueryDescription<'tcx> for queries::type_op_subtype<'tcx> {
+    fn describe(_tcx: TyCtxt, goal: CanonicalTypeOpSubtypeGoal<'tcx>) -> String {
+        format!("evaluating `type_op_subtype` `{:?}`", goal)
+    }
+}
+
+impl<'tcx> QueryDescription<'tcx> for queries::type_op_prove_predicate<'tcx> {
+    fn describe(_tcx: TyCtxt, goal: CanonicalTypeOpProvePredicateGoal<'tcx>) -> String {
+        format!("evaluating `type_op_prove_predicate` `{:?}`", goal)
+    }
+}
+
+impl<'tcx> QueryDescription<'tcx> for queries::type_op_normalize_ty<'tcx> {
+    fn describe(_tcx: TyCtxt, goal: CanonicalTypeOpNormalizeGoal<'tcx, Ty<'tcx>>) -> String {
+        format!("normalizing `{:?}`", goal)
+    }
+}
+
+impl<'tcx> QueryDescription<'tcx> for queries::type_op_normalize_predicate<'tcx> {
+    fn describe(
+        _tcx: TyCtxt,
+        goal: CanonicalTypeOpNormalizeGoal<'tcx, ty::Predicate<'tcx>>,
+    ) -> String {
+        format!("normalizing `{:?}`", goal)
+    }
+}
+
+impl<'tcx> QueryDescription<'tcx> for queries::type_op_normalize_poly_fn_sig<'tcx> {
+    fn describe(
+        _tcx: TyCtxt,
+        goal: CanonicalTypeOpNormalizeGoal<'tcx, ty::PolyFnSig<'tcx>>,
+    ) -> String {
+        format!("normalizing `{:?}`", goal)
+    }
+}
+
+impl<'tcx> QueryDescription<'tcx> for queries::type_op_normalize_fn_sig<'tcx> {
+    fn describe(_tcx: TyCtxt, goal: CanonicalTypeOpNormalizeGoal<'tcx, ty::FnSig<'tcx>>) -> String {
+        format!("normalizing `{:?}`", goal)
+    }
+}
+
 impl<'tcx> QueryDescription<'tcx> for queries::is_copy_raw<'tcx> {
     fn describe(_tcx: TyCtxt, env: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> String {
         format!("computing whether `{}` is `Copy`", env.value)
@@ -140,8 +191,8 @@ fn describe(tcx: TyCtxt, def_id: DefId) -> String {
 }
 
 impl<'tcx> QueryDescription<'tcx> for queries::const_value_to_allocation<'tcx> {
-    fn describe(_tcx: TyCtxt, (val, ty): (ConstValue<'tcx>, Ty<'tcx>)) -> String {
-        format!("converting value `{:?}` ({}) to an allocation", val, ty)
+    fn describe(_tcx: TyCtxt, val: &'tcx ty::Const<'tcx>) -> String {
+        format!("converting value `{:?}` to an allocation", val)
     }
 }
 
index 279d5ebb9901e11fbc1e6b849e4bd99d342f87ce..8423b02ee7582d2e73f74f30ba285592aeffba3b 100644 (file)
@@ -10,8 +10,8 @@
 
 //! Defines the set of legal keys that can be used in queries.
 
+use infer::canonical::Canonical;
 use hir::def_id::{CrateNum, DefId, LOCAL_CRATE, DefIndex};
-use traits::query::{CanonicalPredicateGoal, CanonicalProjectionGoal, CanonicalTyGoal};
 use ty::{self, Ty, TyCtxt};
 use ty::subst::Substs;
 use ty::fast_reject::SimplifiedType;
@@ -145,7 +145,7 @@ fn default_span(&self, tcx: TyCtxt) -> Span {
     }
 }
 
-impl<'tcx> Key for (mir::interpret::ConstValue<'tcx>, Ty<'tcx>) {
+impl<'tcx> Key for &'tcx ty::Const<'tcx> {
     fn query_crate(&self) -> CrateNum {
         LOCAL_CRATE
     }
@@ -190,27 +190,12 @@ fn default_span(&self, _tcx: TyCtxt) -> Span {
     }
 }
 
-impl<'tcx> Key for CanonicalProjectionGoal<'tcx> {
-    fn query_crate(&self) -> CrateNum {
-        LOCAL_CRATE
-    }
-
-    fn default_span(&self, _tcx: TyCtxt) -> Span {
-        DUMMY_SP
-    }
-}
-
-impl<'tcx> Key for CanonicalTyGoal<'tcx> {
-    fn query_crate(&self) -> CrateNum {
-        LOCAL_CRATE
-    }
-
-    fn default_span(&self, _tcx: TyCtxt) -> Span {
-        DUMMY_SP
-    }
-}
-
-impl<'tcx> Key for CanonicalPredicateGoal<'tcx> {
+/// Canonical query goals correspond to abstract trait operations that
+/// are not tied to any crate in particular.
+impl<'tcx, T> Key for Canonical<'tcx, T>
+where
+    T: Debug + Hash + Clone + Eq,
+{
     fn query_crate(&self) -> CrateNum {
         LOCAL_CRATE
     }
index f19bc01e198577272611abe88ce8f9f7f3fb8222..77644cdf02b116d63e173a9d5c9f97916da3941c 100644 (file)
 use middle::stability::{self, DeprecationEntry};
 use middle::lang_items::{LanguageItems, LangItem};
 use middle::exported_symbols::{SymbolExportLevel, ExportedSymbol};
-use middle::const_val::EvalResult;
+use mir::interpret::ConstEvalResult;
 use mir::mono::{CodegenUnit, Stats};
 use mir;
-use mir::interpret::{GlobalId, Allocation, ConstValue};
+use mir::interpret::{GlobalId, Allocation};
 use session::{CompileResult, CrateDisambiguator};
 use session::config::OutputFilenames;
 use traits::{self, Vtable};
 use traits::query::{CanonicalPredicateGoal, CanonicalProjectionGoal,
-                    CanonicalTyGoal, NoSolution};
+                    CanonicalTyGoal, CanonicalTypeOpEqGoal, CanonicalTypeOpSubtypeGoal,
+                    CanonicalTypeOpProvePredicateGoal, CanonicalTypeOpNormalizeGoal, NoSolution};
 use traits::query::dropck_outlives::{DtorckConstraint, DropckOutlivesResult};
 use traits::query::normalize::NormalizationResult;
 use traits::specialization_graph;
     /// Results of evaluating const items or constants embedded in
     /// other items (such as enum variant explicit discriminants).
     [] fn const_eval: const_eval_dep_node(ty::ParamEnvAnd<'tcx, GlobalId<'tcx>>)
-        -> EvalResult<'tcx>,
+        -> ConstEvalResult<'tcx>,
 
     /// Converts a constant value to an constant allocation
     [] fn const_value_to_allocation: const_value_to_allocation(
-        (ConstValue<'tcx>, Ty<'tcx>)
+        &'tcx ty::Const<'tcx>
     ) -> &'tcx Allocation,
 
     [] fn check_match: CheckMatch(DefId)
         CanonicalPredicateGoal<'tcx>
     ) -> Result<traits::EvaluationResult, traits::OverflowError>,
 
+    /// Do not call this query directly: part of the `Eq` type-op
+    [] fn type_op_eq: TypeOpEq(
+        CanonicalTypeOpEqGoal<'tcx>
+    ) -> Result<
+        Lrc<Canonical<'tcx, canonical::QueryResult<'tcx, ()>>>,
+        NoSolution,
+    >,
+
+    /// Do not call this query directly: part of the `Subtype` type-op
+    [] fn type_op_subtype: TypeOpSubtype(
+        CanonicalTypeOpSubtypeGoal<'tcx>
+    ) -> Result<
+        Lrc<Canonical<'tcx, canonical::QueryResult<'tcx, ()>>>,
+        NoSolution,
+    >,
+
+    /// Do not call this query directly: part of the `ProvePredicate` type-op
+    [] fn type_op_prove_predicate: TypeOpProvePredicate(
+        CanonicalTypeOpProvePredicateGoal<'tcx>
+    ) -> Result<
+        Lrc<Canonical<'tcx, canonical::QueryResult<'tcx, ()>>>,
+        NoSolution,
+    >,
+
+    /// Do not call this query directly: part of the `Normalize` type-op
+    [] fn type_op_normalize_ty: TypeOpNormalizeTy(
+        CanonicalTypeOpNormalizeGoal<'tcx, Ty<'tcx>>
+    ) -> Result<
+        Lrc<Canonical<'tcx, canonical::QueryResult<'tcx, Ty<'tcx>>>>,
+        NoSolution,
+    >,
+
+    /// Do not call this query directly: part of the `Normalize` type-op
+    [] fn type_op_normalize_predicate: TypeOpNormalizePredicate(
+        CanonicalTypeOpNormalizeGoal<'tcx, ty::Predicate<'tcx>>
+    ) -> Result<
+        Lrc<Canonical<'tcx, canonical::QueryResult<'tcx, ty::Predicate<'tcx>>>>,
+        NoSolution,
+    >,
+
+    /// Do not call this query directly: part of the `Normalize` type-op
+    [] fn type_op_normalize_poly_fn_sig: TypeOpNormalizePolyFnSig(
+        CanonicalTypeOpNormalizeGoal<'tcx, ty::PolyFnSig<'tcx>>
+    ) -> Result<
+        Lrc<Canonical<'tcx, canonical::QueryResult<'tcx, ty::PolyFnSig<'tcx>>>>,
+        NoSolution,
+    >,
+
+    /// Do not call this query directly: part of the `Normalize` type-op
+    [] fn type_op_normalize_fn_sig: TypeOpNormalizeFnSig(
+        CanonicalTypeOpNormalizeGoal<'tcx, ty::FnSig<'tcx>>
+    ) -> Result<
+        Lrc<Canonical<'tcx, canonical::QueryResult<'tcx, ty::FnSig<'tcx>>>>,
+        NoSolution,
+    >,
+
     [] fn substitute_normalize_and_test_predicates:
         substitute_normalize_and_test_predicates_node((DefId, &'tcx Substs<'tcx>)) -> bool,
 
@@ -513,9 +570,9 @@ fn erase_regions_ty<'tcx>(ty: Ty<'tcx>) -> DepConstructor<'tcx> {
 }
 
 fn const_value_to_allocation<'tcx>(
-    (val, ty): (ConstValue<'tcx>, Ty<'tcx>)
+    val: &'tcx ty::Const<'tcx>,
 ) -> DepConstructor<'tcx> {
-    DepConstructor::ConstValueToAllocation { val, ty }
+    DepConstructor::ConstValueToAllocation { val }
 }
 
 fn type_param_predicates<'tcx>((item_id, param_id): (DefId, DefId)) -> DepConstructor<'tcx> {
index 3285380c823922b2cf74ccc8b602c0f7be21644c..aa42b4072bd8a62a98c3b4420593b92b207bf248 100644 (file)
@@ -623,7 +623,7 @@ fn specialized_decode(&mut self) -> Result<Span, Self::Error> {
         let len = BytePos::decode(self)?;
 
         let file_lo = self.file_index_to_file(file_lo_index);
-        let lo = file_lo.lines.borrow()[line_lo - 1] + col_lo;
+        let lo = file_lo.lines[line_lo - 1] + col_lo;
         let hi = lo + len;
 
         let expn_info_tag = u8::decode(self)?;
@@ -979,7 +979,7 @@ fn specialized_encode(&mut self, node_id: &NodeId) -> Result<(), Self::Error> {
 }
 
 impl<'enc, 'a, 'tcx> SpecializedEncoder<Fingerprint>
-for CacheEncoder<'enc, 'a, 'tcx, opaque::Encoder<'enc>>
+for CacheEncoder<'enc, 'a, 'tcx, opaque::Encoder>
 {
     fn specialized_encode(&mut self, f: &Fingerprint) -> Result<(), Self::Error> {
         f.encode_opaque(&mut self.encoder)
@@ -1057,7 +1057,7 @@ impl IntEncodedWithFixedSize {
 impl UseSpecializedEncodable for IntEncodedWithFixedSize {}
 impl UseSpecializedDecodable for IntEncodedWithFixedSize {}
 
-impl<'enc> SpecializedEncoder<IntEncodedWithFixedSize> for opaque::Encoder<'enc> {
+impl SpecializedEncoder<IntEncodedWithFixedSize> for opaque::Encoder {
     fn specialized_encode(&mut self, x: &IntEncodedWithFixedSize) -> Result<(), Self::Error> {
         let start_pos = self.position();
         for i in 0 .. IntEncodedWithFixedSize::ENCODED_SIZE {
index 4679c265d5805611572b86db8fa9627e4ec03c22..e17c6fba74c6ed2fc0c406e19287bef25ad5d891 100644 (file)
@@ -1028,6 +1028,13 @@ macro_rules! force {
         DepKind::NormalizeTyAfterErasingRegions |
         DepKind::DropckOutlives |
         DepKind::EvaluateObligation |
+        DepKind::TypeOpEq |
+        DepKind::TypeOpSubtype |
+        DepKind::TypeOpProvePredicate |
+        DepKind::TypeOpNormalizeTy |
+        DepKind::TypeOpNormalizePredicate |
+        DepKind::TypeOpNormalizePolyFnSig |
+        DepKind::TypeOpNormalizeFnSig |
         DepKind::SubstituteNormalizeAndTestPredicates |
         DepKind::InstanceDefSizeEstimate |
         DepKind::ProgramClausesForEnv |
index 7603ed7102327e1b4bd9b03426a0cd3d1c761416..265c6aee397266a298e134c19063f4b7719db234 100644 (file)
@@ -14,7 +14,7 @@
 //! type equality, etc.
 
 use hir::def_id::DefId;
-use middle::const_val::ConstVal;
+use mir::interpret::ConstValue;
 use ty::subst::{Kind, UnpackedKind, Substs};
 use ty::{self, Ty, TyCtxt, TypeFoldable};
 use ty::error::{ExpectedFound, TypeError};
@@ -474,7 +474,7 @@ pub fn super_relate_tys<'a, 'gcx, 'tcx, R>(relation: &mut R,
                     return Ok(s);
                 }
                 match x.val {
-                    ConstVal::Unevaluated(def_id, substs) => {
+                    ConstValue::Unevaluated(def_id, substs) => {
                         // FIXME(eddyb) get the right param_env.
                         let param_env = ty::ParamEnv::empty();
                         match tcx.lift_to_global(&substs) {
index 79a6311185076a0b5366661722871d48ba463b25..a648dc6e7e7880a166b002760c3b6d4604c2ee2b 100644 (file)
 //! hand, though we've recently added some macros (e.g.,
 //! `BraceStructLiftImpl!`) to help with the tedium.
 
-use middle::const_val::{self, ConstVal, ConstEvalErr};
+use mir::interpret::{ConstValue, ConstEvalErr};
 use ty::{self, Lift, Ty, TyCtxt};
 use ty::fold::{TypeFoldable, TypeFolder, TypeVisitor};
 use rustc_data_structures::accumulate_vec::AccumulateVec;
 use rustc_data_structures::indexed_vec::{IndexVec, Idx};
-use rustc_data_structures::sync::Lrc;
 use mir::interpret;
 
 use std::rc::Rc;
@@ -462,10 +461,11 @@ fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lif
 impl<'a, 'tcx> Lift<'tcx> for ConstEvalErr<'a> {
     type Lifted = ConstEvalErr<'tcx>;
     fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
-        tcx.lift(&*self.kind).map(|kind| {
+        tcx.lift(&self.error).map(|error| {
             ConstEvalErr {
                 span: self.span,
-                kind: Lrc::new(kind),
+                stacktrace: self.stacktrace.clone(),
+                error,
             }
         })
     }
@@ -577,7 +577,9 @@ fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lif
             PathNotFound(ref v) => PathNotFound(v.clone()),
             UnimplementedTraitSelection => UnimplementedTraitSelection,
             TypeckError => TypeckError,
-            ReferencedConstant(ref err) => ReferencedConstant(tcx.lift(err)?),
+            TooGeneric => TooGeneric,
+            CheckMatchError => CheckMatchError,
+            ReferencedConstant(ref err) => ReferencedConstant(tcx.lift(&**err)?.into()),
             OverflowNeg => OverflowNeg,
             Overflow(op) => Overflow(op),
             DivisionByZero => DivisionByZero,
@@ -588,20 +590,6 @@ fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lif
     }
 }
 
-impl<'a, 'tcx> Lift<'tcx> for const_val::ErrKind<'a> {
-    type Lifted = const_val::ErrKind<'tcx>;
-    fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
-        use middle::const_val::ErrKind::*;
-
-        Some(match *self {
-            CouldNotResolve => CouldNotResolve,
-            TypeckError => TypeckError,
-            CheckMatchError => CheckMatchError,
-            Miri(ref e, ref frames) => return tcx.lift(e).map(|e| Miri(e, frames.clone())),
-        })
-    }
-}
-
 impl<'a, 'tcx> Lift<'tcx> for ty::layout::LayoutError<'a> {
     type Lifted = ty::layout::LayoutError<'tcx>;
     fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
@@ -1139,20 +1127,24 @@ impl<'tcx> TypeFoldable<'tcx> for ty::error::TypeError<'tcx> {
     }
 }
 
-impl<'tcx> TypeFoldable<'tcx> for ConstVal<'tcx> {
+impl<'tcx> TypeFoldable<'tcx> for ConstValue<'tcx> {
     fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
         match *self {
-            ConstVal::Value(v) => ConstVal::Value(v),
-            ConstVal::Unevaluated(def_id, substs) => {
-                ConstVal::Unevaluated(def_id, substs.fold_with(folder))
+            ConstValue::Scalar(v) => ConstValue::Scalar(v),
+            ConstValue::ScalarPair(a, b) => ConstValue::ScalarPair(a, b),
+            ConstValue::ByRef(alloc, offset) => ConstValue::ByRef(alloc, offset),
+            ConstValue::Unevaluated(def_id, substs) => {
+                ConstValue::Unevaluated(def_id, substs.fold_with(folder))
             }
         }
     }
 
     fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
         match *self {
-            ConstVal::Value(_) => false,
-            ConstVal::Unevaluated(_, substs) => substs.visit_with(visitor),
+            ConstValue::Scalar(_) |
+            ConstValue::ScalarPair(_, _) |
+            ConstValue::ByRef(_, _) => false,
+            ConstValue::Unevaluated(_, substs) => substs.visit_with(visitor),
         }
     }
 }
index d8ca817ff2bd0314021f627f4f6869d4862f8d39..996ebd722fd4d3bf4393a72b1bf7442a45e1dab9 100644 (file)
@@ -12,7 +12,7 @@
 
 use hir::def_id::DefId;
 
-use middle::const_val::ConstVal;
+use mir::interpret::ConstValue;
 use middle::region;
 use polonius_engine::Atom;
 use rustc_data_structures::indexed_vec::Idx;
 use ty::{self, AdtDef, TypeFlags, Ty, TyCtxt, TypeFoldable};
 use ty::{Slice, TyS, ParamEnvAnd, ParamEnv};
 use util::captures::Captures;
-use mir::interpret::{Scalar, Pointer, Value, ConstValue};
+use mir::interpret::{Scalar, Pointer, Value};
 
 use std::iter;
 use std::cmp::Ordering;
 use rustc_target::spec::abi;
-use syntax::ast::{self, Name};
+use syntax::ast::{self, Ident};
 use syntax::symbol::{keywords, InternedString};
 
 use serialize;
@@ -853,11 +853,11 @@ impl<'a, 'tcx> ProjectionTy<'tcx> {
     /// Construct a ProjectionTy by searching the trait from trait_ref for the
     /// associated item named item_name.
     pub fn from_ref_and_name(
-        tcx: TyCtxt, trait_ref: ty::TraitRef<'tcx>, item_name: Name
+        tcx: TyCtxt, trait_ref: ty::TraitRef<'tcx>, item_name: Ident
     ) -> ProjectionTy<'tcx> {
         let item_def_id = tcx.associated_items(trait_ref.def_id).find(|item| {
             item.kind == ty::AssociatedKind::Type &&
-            tcx.hygienic_eq(item_name, item.name, trait_ref.def_id)
+            tcx.hygienic_eq(item_name, item.ident, trait_ref.def_id)
         }).unwrap().def_id;
 
         ProjectionTy {
@@ -1751,6 +1751,13 @@ pub fn is_fn(&self) -> bool {
         }
     }
 
+    pub fn is_impl_trait(&self) -> bool {
+        match self.sty {
+            TyAnon(..) => true,
+            _ => false,
+        }
+    }
+
     pub fn ty_to_def_id(&self) -> Option<DefId> {
         match self.sty {
             TyDynamic(ref tt, ..) => tt.principal().map(|p| p.def_id()),
@@ -1852,7 +1859,7 @@ pub fn to_opt_closure_kind(&self) -> Option<ty::ClosureKind> {
 pub struct Const<'tcx> {
     pub ty: Ty<'tcx>,
 
-    pub val: ConstVal<'tcx>,
+    pub val: ConstValue<'tcx>,
 }
 
 impl<'tcx> Const<'tcx> {
@@ -1863,15 +1870,15 @@ pub fn unevaluated(
         ty: Ty<'tcx>,
     ) -> &'tcx Self {
         tcx.mk_const(Const {
-            val: ConstVal::Unevaluated(def_id, substs),
+            val: ConstValue::Unevaluated(def_id, substs),
             ty,
         })
     }
 
     #[inline]
-    pub fn from_const_val(
+    pub fn from_const_value(
         tcx: TyCtxt<'_, '_, 'tcx>,
-        val: ConstVal<'tcx>,
+        val: ConstValue<'tcx>,
         ty: Ty<'tcx>,
     ) -> &'tcx Self {
         tcx.mk_const(Const {
@@ -1880,15 +1887,6 @@ pub fn from_const_val(
         })
     }
 
-    #[inline]
-    pub fn from_const_value(
-        tcx: TyCtxt<'_, '_, 'tcx>,
-        val: ConstValue<'tcx>,
-        ty: Ty<'tcx>,
-    ) -> &'tcx Self {
-        Self::from_const_val(tcx, ConstVal::Value(val), ty)
-    }
-
     #[inline]
     pub fn from_byval_value(
         tcx: TyCtxt<'_, '_, 'tcx>,
@@ -1949,34 +1947,22 @@ pub fn to_bits(
         }
         let ty = tcx.lift_to_global(&ty).unwrap();
         let size = tcx.layout_of(ty).ok()?.size;
-        match self.val {
-            ConstVal::Value(val) => val.to_bits(size),
-            _ => None,
-        }
+        self.val.to_bits(size)
     }
 
     #[inline]
     pub fn to_ptr(&self) -> Option<Pointer> {
-        match self.val {
-            ConstVal::Value(val) => val.to_ptr(),
-            _ => None,
-        }
+        self.val.to_ptr()
     }
 
     #[inline]
     pub fn to_byval_value(&self) -> Option<Value> {
-        match self.val {
-            ConstVal::Value(val) => val.to_byval_value(),
-            _ => None,
-        }
+        self.val.to_byval_value()
     }
 
     #[inline]
     pub fn to_scalar(&self) -> Option<Scalar> {
-        match self.val {
-            ConstVal::Value(val) => val.to_scalar(),
-            _ => None,
-        }
+        self.val.to_scalar()
     }
 
     #[inline]
@@ -1988,10 +1974,7 @@ pub fn assert_bits(
         assert_eq!(self.ty, ty.value);
         let ty = tcx.lift_to_global(&ty).unwrap();
         let size = tcx.layout_of(ty).ok()?.size;
-        match self.val {
-            ConstVal::Value(val) => val.to_bits(size),
-            _ => None,
-        }
+        self.val.to_bits(size)
     }
 
     #[inline]
index 024885420f619f01ceff91ed9e6bfb8efedfc418..4e281231a4105ef258fbf451b68856e5e6967bef 100644 (file)
@@ -522,6 +522,12 @@ pub fn is_closure(self, def_id: DefId) -> bool {
         self.def_key(def_id).disambiguated_data.data == DefPathData::ClosureExpr
     }
 
+    /// True if this def-id refers to the implicit constructor for
+    /// a tuple struct like `struct Foo(u32)`.
+    pub fn is_struct_constructor(self, def_id: DefId) -> bool {
+        self.def_key(def_id).disambiguated_data.data == DefPathData::StructCtor
+    }
+
     /// Given the `DefId` of a fn or closure, returns the `DefId` of
     /// the innermost fn item that the closure is contained within.
     /// This is a significant def-id because, when we do
index ebe88d60ed12f0ec9b624d4744d10603b67e2f78..d12f73144269bf7e87676fb038526149b34d2008 100644 (file)
@@ -11,7 +11,7 @@
 //! An iterator over the type substructure.
 //! WARNING: this does not keep track of the region depth.
 
-use middle::const_val::ConstVal;
+use mir::interpret::ConstValue;
 use ty::{self, Ty};
 use rustc_data_structures::small_vec::SmallVec;
 use rustc_data_structures::accumulate_vec::IntoIter as AccIntoIter;
@@ -141,11 +141,8 @@ fn push_subtypes<'tcx>(stack: &mut TypeWalkerStack<'tcx>, parent_ty: Ty<'tcx>) {
 }
 
 fn push_const<'tcx>(stack: &mut TypeWalkerStack<'tcx>, constant: &'tcx ty::Const<'tcx>) {
-    match constant.val {
-        ConstVal::Value(_) => {}
-        ConstVal::Unevaluated(_, substs) => {
-            stack.extend(substs.types().rev());
-        }
+    if let ConstValue::Unevaluated(_, substs) = constant.val {
+        stack.extend(substs.types().rev());
     }
     stack.push(constant.ty);
 }
index 62fed1ecb668aa5402809263a630db9d6733c01b..5376acca0d8cdabf5402fc13631a2731f63abfb9 100644 (file)
@@ -9,7 +9,7 @@
 // except according to those terms.
 
 use hir::def_id::DefId;
-use middle::const_val::ConstVal;
+use mir::interpret::ConstValue;
 use infer::InferCtxt;
 use ty::subst::Substs;
 use traits;
@@ -216,18 +216,15 @@ fn compute_projection(&mut self, data: ty::ProjectionTy<'tcx>) {
     /// into `self.out`.
     fn compute_const(&mut self, constant: &'tcx ty::Const<'tcx>) {
         self.require_sized(constant.ty, traits::ConstSized);
-        match constant.val {
-            ConstVal::Value(_) => {}
-            ConstVal::Unevaluated(def_id, substs) => {
-                let obligations = self.nominal_obligations(def_id, substs);
-                self.out.extend(obligations);
-
-                let predicate = ty::Predicate::ConstEvaluatable(def_id, substs);
-                let cause = self.cause(traits::MiscObligation);
-                self.out.push(traits::Obligation::new(cause,
-                                                      self.param_env,
-                                                      predicate));
-            }
+        if let ConstValue::Unevaluated(def_id, substs) = constant.val {
+            let obligations = self.nominal_obligations(def_id, substs);
+            self.out.extend(obligations);
+
+            let predicate = ty::Predicate::ConstEvaluatable(def_id, substs);
+            let cause = self.cause(traits::MiscObligation);
+            self.out.push(traits::Obligation::new(cause,
+                                                    self.param_env,
+                                                    predicate));
         }
     }
 
index 4fe9c34c140cbdf8136258728b33931570fdffa0..deb8ac691fb3b2d772d6b7b17256c0bcc2d4bc8e 100644 (file)
@@ -10,7 +10,7 @@
 
 use hir::def_id::DefId;
 use hir::map::definitions::DefPathData;
-use middle::const_val::ConstVal;
+use mir::interpret::ConstValue;
 use middle::region::{self, BlockRemainder};
 use ty::subst::{self, Subst};
 use ty::{BrAnon, BrEnv, BrFresh, BrNamed};
@@ -429,7 +429,7 @@ fn parameterized<F: fmt::Write>(&mut self,
             ty::tls::with(|tcx|
                 print!(f, self,
                        write("{}=",
-                             tcx.associated_item(projection.projection_ty.item_def_id).name),
+                             tcx.associated_item(projection.projection_ty.item_def_id).ident),
                        print_display(projection.ty))
             )?;
         }
@@ -1195,12 +1195,12 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
                 TyArray(ty, sz) => {
                     print!(f, cx, write("["), print(ty), write("; "))?;
                     match sz.val {
-                        ConstVal::Value(..) => ty::tls::with(|tcx| {
-                            write!(f, "{}", sz.unwrap_usize(tcx))
-                        })?,
-                        ConstVal::Unevaluated(_def_id, _substs) => {
+                        ConstValue::Unevaluated(_def_id, _substs) => {
                             write!(f, "_")?;
                         }
+                        _ => ty::tls::with(|tcx| {
+                            write!(f, "{}", sz.unwrap_usize(tcx))
+                        })?,
                     }
                     write!(f, "]")
                 }
@@ -1286,7 +1286,7 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
             //   parameterized(f, self.substs, self.item_def_id, &[])
             // (which currently ICEs).
             let (trait_ref, item_name) = ty::tls::with(|tcx|
-                (self.trait_ref(tcx), tcx.associated_item(self.item_def_id).name)
+                (self.trait_ref(tcx), tcx.associated_item(self.item_def_id).ident)
             );
             print!(f, cx, print_debug(trait_ref), write("::{}", item_name))
         }
index 60d28d8098b4ca656b7b86cbb6617c63adf3dba7..373ab04de4b825bbd1ea31ee512cea24a6b9feba 100644 (file)
@@ -103,6 +103,7 @@ fn fold_item(&mut self, item: P<Item>) -> SmallVector<P<Item>> {
             format: MacroAttribute(Symbol::intern(name)),
             allow_internal_unstable: true,
             allow_internal_unsafe: false,
+            local_inner_macros: false,
             edition: hygiene::default_edition(),
         });
 
index fa9bb57275abdadd9164729bf7e082ee27790b61..241950fb6bff069b7e2d5d0a8489e440ec96c31b 100644 (file)
@@ -109,10 +109,10 @@ pub fn gather_move_from_pat<'a, 'c, 'tcx: 'c>(bccx: &BorrowckCtxt<'a, 'tcx>,
                                               cmt: &'c mc::cmt_<'tcx>) {
     let source = get_pattern_source(bccx.tcx,move_pat);
     let pat_span_path_opt = match move_pat.node {
-        PatKind::Binding(_, _, ref path1, _) => {
+        PatKind::Binding(_, _, ident, _) => {
             Some(MovePlace {
                      span: move_pat.span,
-                     name: path1.node,
+                     name: ident.name,
                      pat_source: source,
                  })
         }
index b2b4c7d777eabdc20136e9d1a7ca20326d43c165..294ae1e63a9ee6f922b836ee6d2d46faa18c4d4e 100644 (file)
@@ -46,11 +46,9 @@ fn check_unused_mut_pat(&self, pats: &[P<hir::Pat>]) {
         let tcx = self.bccx.tcx;
         let mut mutables = FxHashMap();
         for p in pats {
-            p.each_binding(|_, hir_id, span, path1| {
-                let name = path1.node;
-
+            p.each_binding(|_, hir_id, span, ident| {
                 // Skip anything that looks like `_foo`
-                if name.as_str().starts_with("_") {
+                if ident.as_str().starts_with("_") {
                     return;
                 }
 
@@ -65,7 +63,7 @@ fn check_unused_mut_pat(&self, pats: &[P<hir::Pat>]) {
                     _ => return,
                 }
 
-                mutables.entry(name).or_insert(Vec::new()).push((hir_id, span));
+                mutables.entry(ident.name).or_insert(Vec::new()).push((hir_id, span));
             });
         }
 
index d6d386c9fbe77d74cd3468c1aed99fdf985c1704..c553eca08e6b4979a929d9dafbf49c3c40e6727d 100644 (file)
@@ -230,8 +230,7 @@ fn new() -> WasmEncoder {
     }
 
     fn u32(&mut self, val: u32) {
-        let at = self.data.len();
-        leb128::write_u32_leb128(&mut self.data, at, val);
+        leb128::write_u32_leb128(&mut self.data, val);
     }
 
     fn byte(&mut self, val: u8) {
index 322924535d1f72baeb5588eecad1f606043e09e3..a4709739a23ddabcbb587813975a407d82fe9af3 100644 (file)
@@ -1381,7 +1381,6 @@ fn hash_stable<W: StableHasherResult>(&self,
 
 fn fetch_wasm_section(tcx: TyCtxt, id: DefId) -> (String, Vec<u8>) {
     use rustc::mir::interpret::GlobalId;
-    use rustc::middle::const_val::ConstVal;
 
     info!("loading wasm section {:?}", id);
 
@@ -1399,12 +1398,6 @@ fn fetch_wasm_section(tcx: TyCtxt, id: DefId) -> (String, Vec<u8>) {
     };
     let param_env = ty::ParamEnv::reveal_all();
     let val = tcx.const_eval(param_env.and(cid)).unwrap();
-
-    let const_val = match val.val {
-        ConstVal::Value(val) => val,
-        ConstVal::Unevaluated(..) => bug!("should be evaluated"),
-    };
-
-    let alloc = tcx.const_value_to_allocation((const_val, val.ty));
+    let alloc = tcx.const_value_to_allocation(val);
     (section.to_string(), alloc.bytes.clone())
 }
index 7c1035e2fcb8882f190d3e5e2f274bbcf834e98d..bbe0e34b48f2642bb1754c9b801b7c17de0250d2 100644 (file)
@@ -9,11 +9,12 @@
 // except according to those terms.
 
 use llvm::{self, ValueRef};
-use rustc::middle::const_val::{ConstVal, ConstEvalErr};
+use rustc::mir::interpret::ConstEvalErr;
 use rustc_mir::interpret::{read_target_uint, const_val_field};
 use rustc::hir::def_id::DefId;
 use rustc::mir;
 use rustc_data_structures::indexed_vec::Idx;
+use rustc_data_structures::sync::Lrc;
 use rustc::mir::interpret::{GlobalId, Pointer, Scalar, Allocation, ConstValue, AllocType};
 use rustc::ty::{self, Ty};
 use rustc::ty::layout::{self, HasDataLayout, LayoutOf, Size};
@@ -117,7 +118,7 @@ pub fn const_alloc_to_llvm(cx: &CodegenCx, alloc: &Allocation) -> ValueRef {
 pub fn codegen_static_initializer<'a, 'tcx>(
     cx: &CodegenCx<'a, 'tcx>,
     def_id: DefId)
-    -> Result<ValueRef, ConstEvalErr<'tcx>>
+    -> Result<ValueRef, Lrc<ConstEvalErr<'tcx>>>
 {
     let instance = ty::Instance::mono(cx.tcx, def_id);
     let cid = GlobalId {
@@ -128,20 +129,20 @@ pub fn codegen_static_initializer<'a, 'tcx>(
     let static_ = cx.tcx.const_eval(param_env.and(cid))?;
 
     let alloc = match static_.val {
-        ConstVal::Value(ConstValue::ByRef(alloc, n)) if n.bytes() == 0 => alloc,
+        ConstValue::ByRef(alloc, n) if n.bytes() == 0 => alloc,
         _ => bug!("static const eval returned {:#?}", static_),
     };
     Ok(const_alloc_to_llvm(cx, alloc))
 }
 
 impl<'a, 'tcx> FunctionCx<'a, 'tcx> {
-    fn const_to_const_value(
+    fn fully_evaluate(
         &mut self,
         bx: &Builder<'a, 'tcx>,
         constant: &'tcx ty::Const<'tcx>,
-    ) -> Result<ConstValue<'tcx>, ConstEvalErr<'tcx>> {
+    ) -> Result<&'tcx ty::Const<'tcx>, Lrc<ConstEvalErr<'tcx>>> {
         match constant.val {
-            ConstVal::Unevaluated(def_id, ref substs) => {
+            ConstValue::Unevaluated(def_id, ref substs) => {
                 let tcx = bx.tcx();
                 let param_env = ty::ParamEnv::reveal_all();
                 let instance = ty::Instance::resolve(tcx, param_env, def_id, substs).unwrap();
@@ -149,18 +150,17 @@ fn const_to_const_value(
                     instance,
                     promoted: None,
                 };
-                let c = tcx.const_eval(param_env.and(cid))?;
-                self.const_to_const_value(bx, c)
+                tcx.const_eval(param_env.and(cid))
             },
-            ConstVal::Value(val) => Ok(val),
+            _ => Ok(constant),
         }
     }
 
-    pub fn mir_constant_to_const_value(
+    pub fn eval_mir_constant(
         &mut self,
         bx: &Builder<'a, 'tcx>,
         constant: &mir::Constant<'tcx>,
-    ) -> Result<ConstValue<'tcx>, ConstEvalErr<'tcx>> {
+    ) -> Result<&'tcx ty::Const<'tcx>, Lrc<ConstEvalErr<'tcx>>> {
         match constant.literal {
             mir::Literal::Promoted { index } => {
                 let param_env = ty::ParamEnv::reveal_all();
@@ -173,7 +173,7 @@ pub fn mir_constant_to_const_value(
             mir::Literal::Value { value } => {
                 Ok(self.monomorphize(&value))
             }
-        }.and_then(|c| self.const_to_const_value(bx, c))
+        }.and_then(|c| self.fully_evaluate(bx, c))
     }
 
     /// process constant containing SIMD shuffle indices
@@ -182,14 +182,14 @@ pub fn simd_shuffle_indices(
         bx: &Builder<'a, 'tcx>,
         constant: &mir::Constant<'tcx>,
     ) -> (ValueRef, Ty<'tcx>) {
-        self.mir_constant_to_const_value(bx, constant)
+        self.eval_mir_constant(bx, constant)
             .and_then(|c| {
-                let field_ty = constant.ty.builtin_index().unwrap();
-                let fields = match constant.ty.sty {
+                let field_ty = c.ty.builtin_index().unwrap();
+                let fields = match c.ty.sty {
                     ty::TyArray(_, n) => n.unwrap_usize(bx.tcx()),
                     ref other => bug!("invalid simd shuffle type: {}", other),
                 };
-                let values: Result<Vec<ValueRef>, _> = (0..fields).map(|field| {
+                let values: Result<Vec<ValueRef>, Lrc<_>> = (0..fields).map(|field| {
                     let field = const_val_field(
                         bx.tcx(),
                         ty::ParamEnv::reveal_all(),
@@ -197,7 +197,6 @@ pub fn simd_shuffle_indices(
                         None,
                         mir::Field::new(field as usize),
                         c,
-                        constant.ty,
                     )?;
                     if let Some(prim) = field.to_scalar() {
                         let layout = bx.cx.layout_of(field_ty);
@@ -214,7 +213,7 @@ pub fn simd_shuffle_indices(
                     }
                 }).collect();
                 let llval = C_struct(bx.cx, &values?, false);
-                Ok((llval, constant.ty))
+                Ok((llval, c.ty))
             })
             .unwrap_or_else(|e| {
                 e.report_as_error(
index 9f32b41cb13e7913b9f8d6e5d7f0fc6a84d5e091..3d3a4400bd8108abca11973e5693f839dbc5fc32 100644 (file)
@@ -9,12 +9,13 @@
 // except according to those terms.
 
 use llvm::{ValueRef, LLVMConstInBoundsGEP};
-use rustc::middle::const_val::ConstEvalErr;
+use rustc::mir::interpret::ConstEvalErr;
 use rustc::mir;
 use rustc::mir::interpret::ConstValue;
 use rustc::ty;
 use rustc::ty::layout::{self, Align, LayoutOf, TyLayout};
 use rustc_data_structures::indexed_vec::Idx;
+use rustc_data_structures::sync::Lrc;
 
 use base;
 use common::{self, CodegenCx, C_null, C_undef, C_usize};
@@ -95,16 +96,16 @@ pub fn new_zst(cx: &CodegenCx<'a, 'tcx>,
     }
 
     pub fn from_const(bx: &Builder<'a, 'tcx>,
-                      val: ConstValue<'tcx>,
-                      ty: ty::Ty<'tcx>)
-                      -> Result<OperandRef<'tcx>, ConstEvalErr<'tcx>> {
-        let layout = bx.cx.layout_of(ty);
+                      val: &'tcx ty::Const<'tcx>)
+                      -> Result<OperandRef<'tcx>, Lrc<ConstEvalErr<'tcx>>> {
+        let layout = bx.cx.layout_of(val.ty);
 
         if layout.is_zst() {
             return Ok(OperandRef::new_zst(bx.cx, layout));
         }
 
-        let val = match val {
+        let val = match val.val {
+            ConstValue::Unevaluated(..) => bug!(),
             ConstValue::Scalar(x) => {
                 let scalar = match layout.abi {
                     layout::Abi::Scalar(ref x) => x,
@@ -408,8 +409,8 @@ pub fn codegen_operand(&mut self,
 
             mir::Operand::Constant(ref constant) => {
                 let ty = self.monomorphize(&constant.ty);
-                self.mir_constant_to_const_value(bx, constant)
-                    .and_then(|c| OperandRef::from_const(bx, c, ty))
+                self.eval_mir_constant(bx, constant)
+                    .and_then(|c| OperandRef::from_const(bx, c))
                     .unwrap_or_else(|err| {
                         match constant.literal {
                             mir::Literal::Promoted { .. } => {
index 67fd5da8c92d9e69e5154cd9d28061ee84075b3f..84f7b35d21f33c542ba2d01dd37d44bebb7c2557 100644 (file)
@@ -1075,7 +1075,7 @@ fn print_crate_info(codegen_backend: &CodegenBackend,
                     let mut cfgs = Vec::new();
                     for &(name, ref value) in sess.parse_sess.config.iter() {
                         let gated_cfg = GatedCfg::gate(&ast::MetaItem {
-                            ident: ast::Path::from_ident(name.to_ident()),
+                            ident: ast::Path::from_ident(ast::Ident::with_empty_ctxt(name)),
                             node: ast::MetaItemKind::Word,
                             span: DUMMY_SP,
                         });
index d45994adeb67bcf2da10fcbfed2c587ff9632c90..98f7873fda0e41da5c569fcb237117881aaed3f0 100644 (file)
@@ -25,6 +25,7 @@
 use std::env;
 
 use rustc::session::config::nightly_options;
+use rustc_serialize::opaque::Encoder;
 
 /// The first few bytes of files generated by incremental compilation
 const FILE_MAGIC: &'static [u8] = b"RSIC";
 /// the git commit hash.
 const RUSTC_VERSION: Option<&'static str> = option_env!("CFG_VERSION");
 
-pub fn write_file_header<W: io::Write>(stream: &mut W) -> io::Result<()> {
-    stream.write_all(FILE_MAGIC)?;
-    stream.write_all(&[(HEADER_FORMAT_VERSION >> 0) as u8,
-                       (HEADER_FORMAT_VERSION >> 8) as u8])?;
+pub fn write_file_header(stream: &mut Encoder) {
+    stream.emit_raw_bytes(FILE_MAGIC);
+    stream.emit_raw_bytes(&[(HEADER_FORMAT_VERSION >> 0) as u8,
+                       (HEADER_FORMAT_VERSION >> 8) as u8]);
 
     let rustc_version = rustc_version();
     assert_eq!(rustc_version.len(), (rustc_version.len() as u8) as usize);
-    stream.write_all(&[rustc_version.len() as u8])?;
-    stream.write_all(rustc_version.as_bytes())?;
-
-    Ok(())
+    stream.emit_raw_bytes(&[rustc_version.len() as u8]);
+    stream.emit_raw_bytes(rustc_version.as_bytes());
 }
 
 /// Reads the contents of a file with a file header as defined in this module.
index dcef0c662c3a1164ca53bbf87b24fa9f07d68400..06b0ea946d778fc99847092fbb00a60a054923dd 100644 (file)
@@ -16,7 +16,6 @@
 use rustc_data_structures::sync::join;
 use rustc_serialize::Encodable as RustcEncodable;
 use rustc_serialize::opaque::Encoder;
-use std::io::{self, Cursor};
 use std::fs;
 use std::path::PathBuf;
 
@@ -98,7 +97,7 @@ pub fn save_work_product_index(sess: &Session,
 }
 
 fn save_in<F>(sess: &Session, path_buf: PathBuf, encode: F)
-    where F: FnOnce(&mut Encoder) -> io::Result<()>
+    where F: FnOnce(&mut Encoder)
 {
     debug!("save: storing data in {}", path_buf.display());
 
@@ -121,20 +120,12 @@ fn save_in<F>(sess: &Session, path_buf: PathBuf, encode: F)
     }
 
     // generate the data in a memory buffer
-    let mut wr = Cursor::new(Vec::new());
-    file_format::write_file_header(&mut wr).unwrap();
-    match encode(&mut Encoder::new(&mut wr)) {
-        Ok(()) => {}
-        Err(err) => {
-            sess.err(&format!("could not encode dep-graph to `{}`: {}",
-                              path_buf.display(),
-                              err));
-            return;
-        }
-    }
+    let mut encoder = Encoder::new(Vec::new());
+    file_format::write_file_header(&mut encoder);
+    encode(&mut encoder);
 
     // write the data out
-    let data = wr.into_inner();
+    let data = encoder.into_inner();
     match fs::write(&path_buf, data) {
         Ok(_) => {
             debug!("save: data written to disk successfully");
@@ -149,10 +140,9 @@ fn save_in<F>(sess: &Session, path_buf: PathBuf, encode: F)
 }
 
 fn encode_dep_graph(tcx: TyCtxt,
-                    encoder: &mut Encoder)
-                    -> io::Result<()> {
+                    encoder: &mut Encoder) {
     // First encode the commandline arguments hash
-    tcx.sess.opts.dep_tracking_hash().encode(encoder)?;
+    tcx.sess.opts.dep_tracking_hash().encode(encoder).unwrap();
 
     // Encode the graph data.
     let serialized_graph = time(tcx.sess, "getting serialized graph", || {
@@ -234,14 +224,12 @@ struct Stat {
     }
 
     time(tcx.sess, "encoding serialized graph", || {
-        serialized_graph.encode(encoder)
-    })?;
-
-    Ok(())
+        serialized_graph.encode(encoder).unwrap();
+    });
 }
 
 fn encode_work_product_index(work_products: &FxHashMap<WorkProductId, WorkProduct>,
-                             encoder: &mut Encoder) -> io::Result<()> {
+                             encoder: &mut Encoder) {
     let serialized_products: Vec<_> = work_products
         .iter()
         .map(|(id, work_product)| {
@@ -252,13 +240,12 @@ fn encode_work_product_index(work_products: &FxHashMap<WorkProductId, WorkProduc
         })
         .collect();
 
-    serialized_products.encode(encoder)
+    serialized_products.encode(encoder).unwrap();
 }
 
 fn encode_query_cache(tcx: TyCtxt,
-                      encoder: &mut Encoder)
-                      -> io::Result<()> {
+                      encoder: &mut Encoder) {
     time(tcx.sess, "serialize query result cache", || {
-        tcx.serialize_query_result_cache(encoder)
+        tcx.serialize_query_result_cache(encoder).unwrap();
     })
 }
index 1b5361187f36a4b016903512588ebb0a0f7f3152..6bc364b72618f9b5be97986c08baace2f534f599 100644 (file)
@@ -151,7 +151,7 @@ fn check_generic_param(&mut self, cx: &LateContext, param: &hir::GenericParam) {
             GenericParamKind::Lifetime { .. } => {}
             GenericParamKind::Type { synthetic, .. } => {
                 if synthetic.is_none() {
-                    self.check_case(cx, "type parameter", param.name.name(), param.span);
+                    self.check_case(cx, "type parameter", param.name.ident().name, param.span);
                 }
             }
         }
@@ -258,7 +258,7 @@ fn check_crate(&mut self, cx: &LateContext, cr: &hir::Crate) {
     fn check_generic_param(&mut self, cx: &LateContext, param: &hir::GenericParam) {
         match param.kind {
             GenericParamKind::Lifetime { .. } => {
-                let name = param.name.name().as_str();
+                let name = param.name.ident().as_str();
                 self.check_snake_case(cx, "lifetime", &name, Some(param.span));
             }
             GenericParamKind::Type { .. } => {}
@@ -302,20 +302,20 @@ fn check_item(&mut self, cx: &LateContext, it: &hir::Item) {
     }
 
     fn check_trait_item(&mut self, cx: &LateContext, item: &hir::TraitItem) {
-        if let hir::TraitItemKind::Method(_, hir::TraitMethod::Required(ref names)) = item.node {
+        if let hir::TraitItemKind::Method(_, hir::TraitMethod::Required(ref pnames)) = item.node {
             self.check_snake_case(cx,
                                   "trait method",
-                                  &item.name.as_str(),
+                                  &item.ident.as_str(),
                                   Some(item.span));
-            for name in names {
-                self.check_snake_case(cx, "variable", &name.node.as_str(), Some(name.span));
+            for param_name in pnames {
+                self.check_snake_case(cx, "variable", &param_name.as_str(), Some(param_name.span));
             }
         }
     }
 
     fn check_pat(&mut self, cx: &LateContext, p: &hir::Pat) {
-        if let &PatKind::Binding(_, _, ref path1, _) = &p.node {
-            self.check_snake_case(cx, "variable", &path1.node.as_str(), Some(p.span));
+        if let &PatKind::Binding(_, _, ref ident, _) = &p.node {
+            self.check_snake_case(cx, "variable", &ident.as_str(), Some(p.span));
         }
     }
 
@@ -385,7 +385,8 @@ fn check_item(&mut self, cx: &LateContext, it: &hir::Item) {
     fn check_trait_item(&mut self, cx: &LateContext, ti: &hir::TraitItem) {
         match ti.node {
             hir::TraitItemKind::Const(..) => {
-                NonUpperCaseGlobals::check_upper_case(cx, "associated constant", ti.name, ti.span);
+                NonUpperCaseGlobals::check_upper_case(cx, "associated constant",
+                                                      ti.ident.name, ti.span);
             }
             _ => {}
         }
@@ -394,7 +395,8 @@ fn check_trait_item(&mut self, cx: &LateContext, ti: &hir::TraitItem) {
     fn check_impl_item(&mut self, cx: &LateContext, ii: &hir::ImplItem) {
         match ii.node {
             hir::ImplItemKind::Const(..) => {
-                NonUpperCaseGlobals::check_upper_case(cx, "associated constant", ii.name, ii.span);
+                NonUpperCaseGlobals::check_upper_case(cx, "associated constant",
+                                                      ii.ident.name, ii.span);
             }
             _ => {}
         }
@@ -407,7 +409,7 @@ fn check_pat(&mut self, cx: &LateContext, p: &hir::Pat) {
                 if path.segments.len() == 1 {
                     NonUpperCaseGlobals::check_upper_case(cx,
                                                           "constant in pattern",
-                                                          path.segments[0].name,
+                                                          path.segments[0].ident.name,
                                                           path.span);
                 }
             }
index dfbfcfccf7c896f9bb6bc97598c4dcad75c644e5..b4dc5f9c85b0b78f6bd760daf8c7fe1b1c9973f9 100644 (file)
@@ -182,18 +182,14 @@ fn check_pat(&mut self, cx: &LateContext, pat: &hir::Pat) {
                     // (Issue #49588)
                     continue;
                 }
-                if let PatKind::Binding(_, _, name, None) = fieldpat.node.pat.node {
-                    let binding_ident = ast::Ident::new(name.node, name.span);
-                    if cx.tcx.find_field_index(binding_ident, &variant) ==
+                if let PatKind::Binding(_, _, ident, None) = fieldpat.node.pat.node {
+                    if cx.tcx.find_field_index(ident, &variant) ==
                        Some(cx.tcx.field_index(fieldpat.node.id, cx.tables)) {
                         let mut err = cx.struct_span_lint(NON_SHORTHAND_FIELD_PATTERNS,
                                      fieldpat.span,
-                                     &format!("the `{}:` in this pattern is redundant",
-                                              name.node));
+                                     &format!("the `{}:` in this pattern is redundant", ident));
                         let subspan = cx.tcx.sess.codemap().span_through_char(fieldpat.span, ':');
-                        err.span_suggestion_short(subspan,
-                                                  "remove this",
-                                                  format!("{}", name.node));
+                        err.span_suggestion_short(subspan, "remove this", format!("{}", ident));
                         err.emit();
                     }
                 }
@@ -1082,7 +1078,8 @@ fn method_call_refers_to_method<'a, 'tcx>(cx: &LateContext<'a, 'tcx>,
                                 let container = ty::ImplContainer(vtable_impl.impl_def_id);
                                 // It matches if it comes from the same impl,
                                 // and has the same method name.
-                                container == method.container && callee_item.name == method.name
+                                container == method.container &&
+                                callee_item.ident.name == method.ident.name
                             }
 
                             // There's no way to know if this call is
index 9e4f695d28fd482b776398388bf47eec2eab2328..1985900b3e1170b05e03e4ae8bf48207997807a2 100644 (file)
@@ -817,7 +817,7 @@ pub fn get_associated_item(&self, id: DefIndex) -> ty::AssociatedItem {
         };
 
         ty::AssociatedItem {
-            name: name.as_symbol(),
+            ident: Ident::from_interned_str(name),
             kind,
             vis: item.visibility.decode(self),
             defaultness: container.defaultness(),
@@ -1138,9 +1138,9 @@ pub fn imported_filemaps(&'a self,
                                       src_hash,
                                       start_pos,
                                       end_pos,
-                                      lines,
-                                      multibyte_chars,
-                                      non_narrow_chars,
+                                      mut lines,
+                                      mut multibyte_chars,
+                                      mut non_narrow_chars,
                                       name_hash,
                                       .. } = filemap_to_import;
 
@@ -1151,15 +1151,12 @@ pub fn imported_filemaps(&'a self,
             // `CodeMap::new_imported_filemap()` will then translate those
             // coordinates to their new global frame of reference when the
             // offset of the FileMap is known.
-            let mut lines = lines.into_inner();
             for pos in &mut lines {
                 *pos = *pos - start_pos;
             }
-            let mut multibyte_chars = multibyte_chars.into_inner();
             for mbc in &mut multibyte_chars {
                 mbc.pos = mbc.pos - start_pos;
             }
-            let mut non_narrow_chars = non_narrow_chars.into_inner();
             for swc in &mut non_narrow_chars {
                 *swc = *swc - start_pos;
             }
index ce270006a9d0ad187d1a1e213363238804dbc545..d8a224d3badd8692390396d37d31e30bcc501e76 100644 (file)
 use rustc_serialize::{Encodable, Encoder, SpecializedEncoder, opaque};
 
 use std::hash::Hash;
-use std::io::prelude::*;
-use std::io::Cursor;
 use std::path::Path;
 use rustc_data_structures::sync::Lrc;
 use std::u32;
 use syntax::ast::{self, CRATE_NODE_ID};
-use syntax::codemap::Spanned;
 use syntax::attr;
-use syntax::symbol::Symbol;
+use syntax::symbol::keywords;
 use syntax_pos::{self, hygiene, FileName, FileMap, Span, DUMMY_SP};
 
 use rustc::hir::{self, PatKind};
@@ -52,7 +49,7 @@
 use rustc::hir::intravisit;
 
 pub struct EncodeContext<'a, 'tcx: 'a> {
-    opaque: opaque::Encoder<'a>,
+    opaque: opaque::Encoder,
     pub tcx: TyCtxt<'a, 'tcx, 'tcx>,
     link_meta: &'a LinkMeta,
 
@@ -76,7 +73,7 @@ macro_rules! encoder_methods {
 }
 
 impl<'a, 'tcx> Encoder for EncodeContext<'a, 'tcx> {
-    type Error = <opaque::Encoder<'a> as Encoder>::Error;
+    type Error = <opaque::Encoder as Encoder>::Error;
 
     fn emit_nil(&mut self) -> Result<(), Self::Error> {
         Ok(())
@@ -480,7 +477,7 @@ fn encode_crate_root(&mut self) -> Lazy<CrateRoot> {
 
         // Index the items
         i = self.position();
-        let index = items.write_index(&mut self.opaque.cursor);
+        let index = items.write_index(&mut self.opaque);
         let index_bytes = self.position() - i;
 
         let attrs = tcx.hir.krate_attrs();
@@ -537,7 +534,7 @@ fn encode_crate_root(&mut self) -> Lazy<CrateRoot> {
 
         if self.tcx.sess.meta_stats() {
             let mut zero_bytes = 0;
-            for e in self.opaque.cursor.get_ref() {
+            for e in self.opaque.data.iter() {
                 if *e == 0 {
                     zero_bytes += 1;
                 }
@@ -977,16 +974,15 @@ fn encode_fn_arg_names_for_body(&mut self, body_id: hir::BodyId)
             let body = self.tcx.hir.body(body_id);
             self.lazy_seq(body.arguments.iter().map(|arg| {
                 match arg.pat.node {
-                    PatKind::Binding(_, _, name, _) => name.node,
-                    _ => Symbol::intern("")
+                    PatKind::Binding(_, _, ident, _) => ident.name,
+                    _ => keywords::Invalid.name(),
                 }
             }))
         })
     }
 
-    fn encode_fn_arg_names(&mut self, names: &[Spanned<ast::Name>])
-                           -> LazySeq<ast::Name> {
-        self.lazy_seq(names.iter().map(|name| name.node))
+    fn encode_fn_arg_names(&mut self, param_names: &[ast::Ident]) -> LazySeq<ast::Name> {
+        self.lazy_seq(param_names.iter().map(|ident| ident.name))
     }
 
     fn encode_optimized_mir(&mut self, def_id: DefId) -> Option<Lazy<mir::Mir<'tcx>>> {
@@ -1797,15 +1793,15 @@ pub fn encode_metadata<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                                  link_meta: &LinkMeta)
                                  -> EncodedMetadata
 {
-    let mut cursor = Cursor::new(vec![]);
-    cursor.write_all(METADATA_HEADER).unwrap();
+    let mut encoder = opaque::Encoder::new(vec![]);
+    encoder.emit_raw_bytes(METADATA_HEADER);
 
     // Will be filled with the root position after encoding everything.
-    cursor.write_all(&[0, 0, 0, 0]).unwrap();
+    encoder.emit_raw_bytes(&[0, 0, 0, 0]);
 
-    let root = {
+    let (root, mut result) = {
         let mut ecx = EncodeContext {
-            opaque: opaque::Encoder::new(&mut cursor),
+            opaque: encoder,
             tcx,
             link_meta,
             lazy_state: LazyState::NoNode,
@@ -1821,9 +1817,9 @@ pub fn encode_metadata<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
 
         // Encode all the entries and extra information in the crate,
         // culminating in the `CrateRoot` which points to all of it.
-        ecx.encode_crate_root()
+        let root = ecx.encode_crate_root();
+        (root, ecx.opaque.into_inner())
     };
-    let mut result = cursor.into_inner();
 
     // Encode the root position.
     let header = METADATA_HEADER.len();
index 157b8385a6922e9d3e77e44b4bf0ab238ea89f24..0b4f7e579acb91fb133ee069fd7ab637458fae7c 100644 (file)
@@ -11,7 +11,7 @@
 use schema::*;
 
 use rustc::hir::def_id::{DefId, DefIndex, DefIndexAddressSpace};
-use std::io::{Cursor, Write};
+use rustc_serialize::opaque::Encoder;
 use std::slice;
 use std::u32;
 
@@ -54,15 +54,15 @@ pub fn record_index(&mut self, item: DefIndex, entry: Lazy<Entry>) {
         self.positions[space_index][array_index] = position.to_le();
     }
 
-    pub fn write_index(&self, buf: &mut Cursor<Vec<u8>>) -> LazySeq<Index> {
+    pub fn write_index(&self, buf: &mut Encoder) -> LazySeq<Index> {
         let pos = buf.position();
 
         // First we write the length of the lower range ...
-        buf.write_all(words_to_bytes(&[(self.positions[0].len() as u32).to_le()])).unwrap();
+        buf.emit_raw_bytes(words_to_bytes(&[(self.positions[0].len() as u32).to_le()]));
         // ... then the values in the lower range ...
-        buf.write_all(words_to_bytes(&self.positions[0][..])).unwrap();
+        buf.emit_raw_bytes(words_to_bytes(&self.positions[0][..]));
         // ... then the values in the higher range.
-        buf.write_all(words_to_bytes(&self.positions[1][..])).unwrap();
+        buf.emit_raw_bytes(words_to_bytes(&self.positions[1][..]));
         LazySeq::with_position_and_length(pos as usize,
             self.positions[0].len() + self.positions[1].len() + 1)
     }
index d76ca5bdf27100bb12aec617deb2b100155fc598..0fbedcaff6ee89df79564eaeb6bc9e4211d2dcec 100644 (file)
@@ -15,6 +15,7 @@
 #![feature(box_patterns)]
 #![feature(fs_read_write)]
 #![feature(libc)]
+#![feature(macro_at_most_once_rep)]
 #![cfg_attr(stage0, feature(macro_lifetime_matcher))]
 #![feature(proc_macro_internals)]
 #![feature(quote)]
index 6d77364aae02a6b783beceef0339cf6aec44e611..7b7568cfcfc39509a4c056fd4553f7da293b883b 100644 (file)
@@ -76,7 +76,37 @@ fn mir_borrowck<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> BorrowC
     let input_mir = tcx.mir_validated(def_id);
     debug!("run query mir_borrowck: {}", tcx.item_path_str(def_id));
 
-    if !tcx.has_attr(def_id, "rustc_mir_borrowck") && !tcx.use_mir_borrowck() {
+    let mut return_early;
+
+    // Return early if we are not supposed to use MIR borrow checker for this function.
+    return_early = !tcx.has_attr(def_id, "rustc_mir_borrowck") && !tcx.use_mir_borrowck();
+
+    if tcx.is_struct_constructor(def_id) {
+        // We are not borrow checking the automatically generated struct constructors
+        // because we want to accept structs such as this (taken from the `linked-hash-map`
+        // crate):
+        // ```rust
+        // struct Qey<Q: ?Sized>(Q);
+        // ```
+        // MIR of this struct constructor looks something like this:
+        // ```rust
+        // fn Qey(_1: Q) -> Qey<Q>{
+        //     let mut _0: Qey<Q>;                  // return place
+        //
+        //     bb0: {
+        //         (_0.0: Q) = move _1;             // bb0[0]: scope 0 at src/main.rs:1:1: 1:26
+        //         return;                          // bb0[1]: scope 0 at src/main.rs:1:1: 1:26
+        //     }
+        // }
+        // ```
+        // The problem here is that `(_0.0: Q) = move _1;` is valid only if `Q` is
+        // of statically known size, which is not known to be true because of the
+        // `Q: ?Sized` constraint. However, it is true because the constructor can be
+        // called only when `Q` is of statically known size.
+        return_early = true;
+    }
+
+    if return_early {
         return BorrowCheckResult {
             closure_requirements: None,
             used_mut_upvars: SmallVec::new(),
@@ -245,6 +275,7 @@ fn do_mir_borrowck<'a, 'gcx, 'tcx>(
         mir_def_id: def_id,
         move_data: &mdpe.move_data,
         param_env: param_env,
+        location_table,
         movable_generator,
         locals_are_invalidated_at_exit: match tcx.hir.body_owner_kind(id) {
             hir::BodyOwnerKind::Const | hir::BodyOwnerKind::Static(_) => false,
@@ -332,6 +363,11 @@ pub struct MirBorrowckCtxt<'cx, 'gcx: 'tcx, 'tcx: 'cx> {
     mir: &'cx Mir<'tcx>,
     mir_def_id: DefId,
     move_data: &'cx MoveData<'tcx>,
+
+    /// Map from MIR `Location` to `LocationIndex`; created
+    /// when MIR borrowck begins.
+    location_table: &'cx LocationTable,
+
     param_env: ParamEnv<'gcx>,
     movable_generator: bool,
     /// This keeps track of whether local variables are free-ed when the function
@@ -946,8 +982,7 @@ fn check_access_for_conflict(
         let mut error_reported = false;
         let tcx = self.tcx;
         let mir = self.mir;
-        let location_table = &LocationTable::new(mir);
-        let location = location_table.start_index(context.loc);
+        let location = self.location_table.start_index(context.loc);
         let borrow_set = self.borrow_set.clone();
         each_borrow_involving_path(
             self,
index 6cb8e64b9f5b46d0226b27c2158abe524f75156d..2523711f936e1bc80578105de2bb234062dc192c 100644 (file)
@@ -12,7 +12,7 @@
 use dataflow::indexes::BorrowIndex;
 use polonius_engine::AllFacts as PoloniusAllFacts;
 use polonius_engine::Atom;
-use rustc::ty::RegionVid;
+use rustc::ty::{RegionVid, TyCtxt};
 use rustc_data_structures::indexed_vec::Idx;
 use std::error::Error;
 use std::fmt::Debug;
 crate type AllFacts = PoloniusAllFacts<RegionVid, BorrowIndex, LocationIndex>;
 
 crate trait AllFactsExt {
+    /// Returns true if there is a need to gather `AllFacts` given the
+    /// current `-Z` flags.
+    fn enabled(tcx: TyCtxt<'_, '_, '_>) -> bool;
+
     fn write_to_dir(
         &self,
         dir: impl AsRef<Path>,
@@ -31,6 +35,12 @@ fn write_to_dir(
 }
 
 impl AllFactsExt for AllFacts {
+    /// Return
+    fn enabled(tcx: TyCtxt<'_, '_, '_>) -> bool {
+        tcx.sess.opts.debugging_opts.nll_facts
+            || tcx.sess.opts.debugging_opts.polonius
+    }
+
     fn write_to_dir(
         &self,
         dir: impl AsRef<Path>,
index dcb52a3b18a724cf2829b9a5653471815ea2625d..e26665e8291bf9bf825434cbdf8fc6a84bf1a17f 100644 (file)
@@ -91,9 +91,7 @@ pub(in borrow_check) fn compute_regions<'cx, 'gcx, 'tcx>(
     Option<Rc<Output<RegionVid, BorrowIndex, LocationIndex>>>,
     Option<ClosureRegionRequirements<'gcx>>,
 ) {
-    let mut all_facts = if infcx.tcx.sess.opts.debugging_opts.nll_facts
-        || infcx.tcx.sess.opts.debugging_opts.polonius
-    {
+    let mut all_facts = if AllFacts::enabled(infcx.tcx) {
         Some(AllFacts::default())
     } else {
         None
index 0eeacda467e03336ceac4d643d01beeae507ba7d..2e1f7fc9e70076103522dee7d0374ca0dd61a8c6 100644 (file)
 use super::universal_regions::UniversalRegions;
 use borrow_check::nll::region_infer::values::ToElementIndex;
 use rustc::hir::def_id::DefId;
+use rustc::infer::canonical::QueryRegionConstraint;
 use rustc::infer::error_reporting::nice_region_error::NiceRegionError;
 use rustc::infer::region_constraints::{GenericKind, VarInfos};
 use rustc::infer::InferCtxt;
 use rustc::infer::NLLRegionVariableOrigin;
-use rustc::infer::RegionObligation;
 use rustc::infer::RegionVariableOrigin;
-use rustc::infer::SubregionOrigin;
 use rustc::mir::{
     ClosureOutlivesRequirement, ClosureOutlivesSubject, ClosureRegionRequirements, Local, Location,
     Mir,
 };
-use rustc::traits::ObligationCause;
-use rustc::ty::{self, RegionVid, Ty, TypeFoldable};
+use rustc::ty::{self, RegionVid, Ty, TyCtxt, TypeFoldable};
 use rustc::util::common::{self, ErrorReported};
 use rustc_data_structures::bitvec::BitVector;
 use rustc_data_structures::indexed_vec::{Idx, IndexVec};
 use std::fmt;
 use std::rc::Rc;
-use syntax::ast;
 use syntax_pos::Span;
 
 mod annotation;
@@ -1162,16 +1159,15 @@ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
 pub trait ClosureRegionRequirementsExt<'gcx, 'tcx> {
     fn apply_requirements(
         &self,
-        infcx: &InferCtxt<'_, 'gcx, 'tcx>,
-        body_id: ast::NodeId,
+        tcx: TyCtxt<'_, 'gcx, 'tcx>,
         location: Location,
         closure_def_id: DefId,
         closure_substs: ty::ClosureSubsts<'tcx>,
-    );
+    ) -> Vec<QueryRegionConstraint<'tcx>>;
 
     fn subst_closure_mapping<T>(
         &self,
-        infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+        tcx: TyCtxt<'_, 'gcx, 'tcx>,
         closure_mapping: &IndexVec<RegionVid, ty::Region<'tcx>>,
         value: &T,
     ) -> T
@@ -1194,14 +1190,11 @@ impl<'gcx, 'tcx> ClosureRegionRequirementsExt<'gcx, 'tcx> for ClosureRegionRequi
     /// requirements.
     fn apply_requirements(
         &self,
-        infcx: &InferCtxt<'_, 'gcx, 'tcx>,
-        body_id: ast::NodeId,
+        tcx: TyCtxt<'_, 'gcx, 'tcx>,
         location: Location,
         closure_def_id: DefId,
         closure_substs: ty::ClosureSubsts<'tcx>,
-    ) {
-        let tcx = infcx.tcx;
-
+    ) -> Vec<QueryRegionConstraint<'tcx>> {
         debug!(
             "apply_requirements(location={:?}, closure_def_id={:?}, closure_substs={:?})",
             location, closure_def_id, closure_substs
@@ -1215,59 +1208,52 @@ fn apply_requirements(
         // into a vector.  These are the regions that we will be
         // relating to one another.
         let closure_mapping =
-            &UniversalRegions::closure_mapping(infcx, user_closure_ty, self.num_external_vids);
+            &UniversalRegions::closure_mapping(tcx, user_closure_ty, self.num_external_vids);
         debug!("apply_requirements: closure_mapping={:?}", closure_mapping);
 
         // Create the predicates.
-        for outlives_requirement in &self.outlives_requirements {
-            let outlived_region = closure_mapping[outlives_requirement.outlived_free_region];
-
-            // FIXME, this origin is not entirely suitable.
-            let origin = SubregionOrigin::CallRcvr(outlives_requirement.blame_span);
-
-            match outlives_requirement.subject {
-                ClosureOutlivesSubject::Region(region) => {
-                    let region = closure_mapping[region];
-                    debug!(
-                        "apply_requirements: region={:?} \
-                         outlived_region={:?} \
-                         outlives_requirement={:?}",
-                        region, outlived_region, outlives_requirement,
-                    );
-                    infcx.sub_regions(origin, outlived_region, region);
-                }
+        self.outlives_requirements
+            .iter()
+            .map(|outlives_requirement| {
+                let outlived_region = closure_mapping[outlives_requirement.outlived_free_region];
+
+                match outlives_requirement.subject {
+                    ClosureOutlivesSubject::Region(region) => {
+                        let region = closure_mapping[region];
+                        debug!(
+                            "apply_requirements: region={:?} \
+                             outlived_region={:?} \
+                             outlives_requirement={:?}",
+                            region, outlived_region, outlives_requirement,
+                        );
+                        ty::Binder::dummy(ty::OutlivesPredicate(region.into(), outlived_region))
+                    }
 
-                ClosureOutlivesSubject::Ty(ty) => {
-                    let ty = self.subst_closure_mapping(infcx, closure_mapping, &ty);
-                    debug!(
-                        "apply_requirements: ty={:?} \
-                         outlived_region={:?} \
-                         outlives_requirement={:?}",
-                        ty, outlived_region, outlives_requirement,
-                    );
-                    infcx.register_region_obligation(
-                        body_id,
-                        RegionObligation {
-                            sup_type: ty,
-                            sub_region: outlived_region,
-                            cause: ObligationCause::misc(outlives_requirement.blame_span, body_id),
-                        },
-                    );
+                    ClosureOutlivesSubject::Ty(ty) => {
+                        let ty = self.subst_closure_mapping(tcx, closure_mapping, &ty);
+                        debug!(
+                            "apply_requirements: ty={:?} \
+                             outlived_region={:?} \
+                             outlives_requirement={:?}",
+                            ty, outlived_region, outlives_requirement,
+                        );
+                        ty::Binder::dummy(ty::OutlivesPredicate(ty.into(), outlived_region))
+                    }
                 }
-            }
-        }
+            })
+            .collect()
     }
 
     fn subst_closure_mapping<T>(
         &self,
-        infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+        tcx: TyCtxt<'_, 'gcx, 'tcx>,
         closure_mapping: &IndexVec<RegionVid, ty::Region<'tcx>>,
         value: &T,
     ) -> T
     where
         T: TypeFoldable<'tcx>,
     {
-        infcx.tcx.fold_regions(value, &mut false, |r, _depth| {
+        tcx.fold_regions(value, &mut false, |r, _depth| {
             if let ty::ReClosureBound(vid) = r {
                 closure_mapping[*vid]
             } else {
index 06aaf6810faa3662984ba0dab079f57420e69579..900899b9cdebe7a3907a6a48cf3825b30bc3a169 100644 (file)
 use borrow_check::nll::region_infer::{OutlivesConstraint, RegionTest, TypeTest};
 use borrow_check::nll::type_check::Locations;
 use borrow_check::nll::universal_regions::UniversalRegions;
-use rustc::infer::region_constraints::Constraint;
-use rustc::infer::region_constraints::RegionConstraintData;
-use rustc::infer::region_constraints::{Verify, VerifyBound};
+use rustc::infer::canonical::QueryRegionConstraint;
+use rustc::infer::outlives::obligations::{TypeOutlives, TypeOutlivesDelegate};
+use rustc::infer::region_constraints::{GenericKind, VerifyBound};
+use rustc::infer::{self, SubregionOrigin};
 use rustc::mir::{Location, Mir};
-use rustc::ty;
+use rustc::ty::subst::UnpackedKind;
+use rustc::ty::{self, TyCtxt};
 use syntax::codemap::Span;
 
-crate struct ConstraintConversion<'a, 'tcx: 'a> {
+crate struct ConstraintConversion<'a, 'gcx: 'tcx, 'tcx: 'a> {
+    tcx: TyCtxt<'a, 'gcx, 'tcx>,
     mir: &'a Mir<'tcx>,
     universal_regions: &'a UniversalRegions<'tcx>,
     location_table: &'a LocationTable,
+    region_bound_pairs: &'a [(ty::Region<'tcx>, GenericKind<'tcx>)],
+    implicit_region_bound: Option<ty::Region<'tcx>>,
+    param_env: ty::ParamEnv<'tcx>,
+    locations: Locations,
     outlives_constraints: &'a mut Vec<OutlivesConstraint>,
     type_tests: &'a mut Vec<TypeTest<'tcx>>,
     all_facts: &'a mut Option<AllFacts>,
-
 }
 
-impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> {
+impl<'a, 'gcx, 'tcx> ConstraintConversion<'a, 'gcx, 'tcx> {
     crate fn new(
+        tcx: TyCtxt<'a, 'gcx, 'tcx>,
         mir: &'a Mir<'tcx>,
         universal_regions: &'a UniversalRegions<'tcx>,
         location_table: &'a LocationTable,
+        region_bound_pairs: &'a [(ty::Region<'tcx>, GenericKind<'tcx>)],
+        implicit_region_bound: Option<ty::Region<'tcx>>,
+        param_env: ty::ParamEnv<'tcx>,
+        locations: Locations,
         outlives_constraints: &'a mut Vec<OutlivesConstraint>,
         type_tests: &'a mut Vec<TypeTest<'tcx>>,
         all_facts: &'a mut Option<AllFacts>,
     ) -> Self {
         Self {
+            tcx,
             mir,
             universal_regions,
             location_table,
+            region_bound_pairs,
+            implicit_region_bound,
+            param_env,
+            locations,
             outlives_constraints,
             type_tests,
             all_facts,
         }
     }
 
-    crate fn convert(
-        &mut self,
-        locations: Locations,
-        data: &RegionConstraintData<'tcx>,
-    ) {
-        debug!("generate: constraints at: {:#?}", locations);
-        let RegionConstraintData {
-            constraints,
-            verifys,
-            givens,
-        } = data;
-
-        let span = self
-            .mir
-            .source_info(locations.from_location().unwrap_or(Location::START))
-            .span;
-
-        let at_location = locations.at_location().unwrap_or(Location::START);
-
-        for constraint in constraints.keys() {
-            debug!("generate: constraint: {:?}", constraint);
-            let (a_vid, b_vid) = match constraint {
-                Constraint::VarSubVar(a_vid, b_vid) => (*a_vid, *b_vid),
-                Constraint::RegSubVar(a_r, b_vid) => (self.to_region_vid(a_r), *b_vid),
-                Constraint::VarSubReg(a_vid, b_r) => (*a_vid, self.to_region_vid(b_r)),
-                Constraint::RegSubReg(a_r, b_r) => {
-                    (self.to_region_vid(a_r), self.to_region_vid(b_r))
-                }
-            };
-
-            // We have the constraint that `a_vid <= b_vid`. Add
-            // `b_vid: a_vid` to our region checker. Note that we
-            // reverse direction, because `regioncx` talks about
-            // "outlives" (`>=`) whereas the region constraints
-            // talk about `<=`.
-            self.add_outlives(span, b_vid, a_vid, at_location);
-
-            // In the new analysis, all outlives relations etc
-            // "take effect" at the mid point of the statement
-            // that requires them, so ignore the `at_location`.
-            if let Some(all_facts) = &mut self.all_facts {
-                if let Some(from_location) = locations.from_location() {
-                    all_facts.outlives.push((
-                        b_vid,
-                        a_vid,
-                        self.location_table.mid_index(from_location),
-                    ));
-                } else {
-                    for location in self.location_table.all_points() {
-                        all_facts.outlives.push((b_vid, a_vid, location));
+    pub(super) fn convert_all(&mut self, query_constraints: &[QueryRegionConstraint<'tcx>]) {
+        for query_constraint in query_constraints {
+            self.convert(query_constraint);
+        }
+    }
+
+    pub(super) fn convert(&mut self, query_constraint: &QueryRegionConstraint<'tcx>) {
+        debug!("generate: constraints at: {:#?}", self.locations);
+
+        // Extract out various useful fields we'll need below.
+        let ConstraintConversion {
+            tcx,
+            region_bound_pairs,
+            implicit_region_bound,
+            param_env,
+            ..
+        } = *self;
+
+        // At the moment, we never generate any "higher-ranked"
+        // region constraints like `for<'a> 'a: 'b`. At some point
+        // when we move to universes, we will, and this assertion
+        // will start to fail.
+        let ty::OutlivesPredicate(k1, r2) =
+            query_constraint.no_late_bound_regions().unwrap_or_else(|| {
+                span_bug!(
+                    self.span(),
+                    "query_constraint {:?} contained bound regions",
+                    query_constraint,
+                );
+            });
+
+        match k1.unpack() {
+            UnpackedKind::Lifetime(r1) => {
+                let r1_vid = self.to_region_vid(r1);
+                let r2_vid = self.to_region_vid(r2);
+                self.add_outlives(r1_vid, r2_vid);
+
+                // In the new analysis, all outlives relations etc
+                // "take effect" at the mid point of the statement
+                // that requires them, so ignore the `at_location`.
+                if let Some(all_facts) = &mut self.all_facts {
+                    if let Some(from_location) = self.locations.from_location() {
+                        all_facts.outlives.push((
+                            r1_vid,
+                            r2_vid,
+                            self.location_table.mid_index(from_location),
+                        ));
+                    } else {
+                        for location in self.location_table.all_points() {
+                            all_facts.outlives.push((r1_vid, r2_vid, location));
+                        }
                     }
                 }
             }
-        }
 
-        for verify in verifys {
-            let type_test = self.verify_to_type_test(verify, span, locations);
-            self.add_type_test(type_test);
+            UnpackedKind::Type(t1) => {
+                // we don't actually use this for anything, but
+                // the `TypeOutlives` code needs an origin.
+                let origin = infer::RelateParamBound(self.span(), t1);
+
+                TypeOutlives::new(
+                    &mut *self,
+                    tcx,
+                    region_bound_pairs,
+                    implicit_region_bound,
+                    param_env,
+                ).type_must_outlive(origin, t1, r2);
+            }
         }
-
-        assert!(
-            givens.is_empty(),
-            "MIR type-checker does not use givens (thank goodness)"
-        );
     }
 
     fn verify_to_type_test(
         &self,
-        verify: &Verify<'tcx>,
-        span: Span,
-        locations: Locations,
+        generic_kind: GenericKind<'tcx>,
+        region: ty::Region<'tcx>,
+        bound: VerifyBound<'tcx>,
     ) -> TypeTest<'tcx> {
-        let generic_kind = verify.kind;
-
-        let lower_bound = self.to_region_vid(verify.region);
+        let lower_bound = self.to_region_vid(region);
 
-        let point = locations.at_location().unwrap_or(Location::START);
+        let point = self.locations.at_location().unwrap_or(Location::START);
 
-        let test = self.verify_bound_to_region_test(&verify.bound);
+        let test = self.verify_bound_to_region_test(&bound);
 
         TypeTest {
             generic_kind,
             lower_bound,
             point,
-            span,
+            span: self.span(),
             test,
         }
     }
@@ -168,13 +187,16 @@ fn to_region_vid(&self, r: ty::Region<'tcx>) -> ty::RegionVid {
         self.universal_regions.to_region_vid(r)
     }
 
-    fn add_outlives(
-        &mut self,
-        span: Span,
-        sup: ty::RegionVid,
-        sub: ty::RegionVid,
-        point: Location,
-    ) {
+    fn span(&self) -> Span {
+        self.mir
+            .source_info(self.locations.from_location().unwrap_or(Location::START))
+            .span
+    }
+
+    fn add_outlives(&mut self, sup: ty::RegionVid, sub: ty::RegionVid) {
+        let span = self.span();
+        let point = self.locations.at_location().unwrap_or(Location::START);
+
         self.outlives_constraints.push(OutlivesConstraint {
             span,
             sub,
@@ -188,3 +210,29 @@ fn add_type_test(&mut self, type_test: TypeTest<'tcx>) {
         self.type_tests.push(type_test);
     }
 }
+
+impl<'a, 'b, 'gcx, 'tcx> TypeOutlivesDelegate<'tcx>
+    for &'a mut ConstraintConversion<'b, 'gcx, 'tcx>
+{
+    fn push_sub_region_constraint(
+        &mut self,
+        _origin: SubregionOrigin<'tcx>,
+        a: ty::Region<'tcx>,
+        b: ty::Region<'tcx>,
+    ) {
+        let b = self.universal_regions.to_region_vid(b);
+        let a = self.universal_regions.to_region_vid(a);
+        self.add_outlives(b, a);
+    }
+
+    fn push_verify(
+        &mut self,
+        _origin: SubregionOrigin<'tcx>,
+        kind: GenericKind<'tcx>,
+        a: ty::Region<'tcx>,
+        bound: VerifyBound<'tcx>,
+    ) {
+        let type_test = self.verify_to_type_test(kind, a, bound);
+        self.add_type_test(type_test);
+    }
+}
index d44eed65201cd91112c3ed96ffd607a9a6b8875f..770a0614811dc32ddcce753f8cc26e1f95c2bf4b 100644 (file)
@@ -23,6 +23,7 @@
 use rustc::infer::InferOk;
 use rustc::mir::visit::TyContext;
 use rustc::mir::*;
+use rustc::traits::query::type_op::custom::CustomTypeOp;
 use rustc::traits::{ObligationCause, PredicateObligations};
 use rustc::ty::subst::Subst;
 use rustc::ty::Ty;
@@ -50,7 +51,7 @@ pub(super) fn equate_inputs_and_outputs(
         // Equate expected input tys with those in the MIR.
         let argument_locals = (1..).map(Local::new);
         for (&unnormalized_input_ty, local) in unnormalized_input_tys.iter().zip(argument_locals) {
-            let input_ty = self.normalize(&unnormalized_input_ty, Locations::All);
+            let input_ty = self.normalize(unnormalized_input_ty, Locations::All);
             let mir_input_ty = mir.local_decls[local].ty;
             self.equate_normalized_input_or_output(input_ty, mir_input_ty);
         }
@@ -70,72 +71,76 @@ pub(super) fn equate_inputs_and_outputs(
             "equate_inputs_and_outputs: unnormalized_output_ty={:?}",
             unnormalized_output_ty
         );
-        let output_ty = self.normalize(&unnormalized_output_ty, Locations::All);
+        let output_ty = self.normalize(unnormalized_output_ty, Locations::All);
         debug!(
             "equate_inputs_and_outputs: normalized output_ty={:?}",
             output_ty
         );
+        let param_env = self.param_env;
         let mir_output_ty = mir.local_decls[RETURN_PLACE].ty;
         let anon_type_map =
             self.fully_perform_op(
                 Locations::All,
-                || format!("input_output"),
-                |cx| {
-                    let mut obligations = ObligationAccumulator::default();
-
-                    let dummy_body_id = ObligationCause::dummy().body_id;
-                    let (output_ty, anon_type_map) = obligations.add(infcx.instantiate_anon_types(
-                        mir_def_id,
-                        dummy_body_id,
-                        cx.param_env,
-                        &output_ty,
-                    ));
-                    debug!(
-                        "equate_inputs_and_outputs: instantiated output_ty={:?}",
-                        output_ty
-                    );
-                    debug!(
-                        "equate_inputs_and_outputs: anon_type_map={:#?}",
-                        anon_type_map
-                    );
-
-                    debug!(
-                        "equate_inputs_and_outputs: mir_output_ty={:?}",
-                        mir_output_ty
-                    );
-                    obligations.add(
-                        infcx
-                            .at(&ObligationCause::dummy(), cx.param_env)
-                            .eq(output_ty, mir_output_ty)?,
-                    );
-
-                    for (&anon_def_id, anon_decl) in &anon_type_map {
-                        let anon_defn_ty = tcx.type_of(anon_def_id);
-                        let anon_defn_ty = anon_defn_ty.subst(tcx, anon_decl.substs);
-                        let anon_defn_ty = renumber::renumber_regions(
-                            cx.infcx,
-                            TyContext::Location(Location::START),
-                            &anon_defn_ty,
+                CustomTypeOp::new(
+                    |infcx| {
+                        let mut obligations = ObligationAccumulator::default();
+
+                        let dummy_body_id = ObligationCause::dummy().body_id;
+                        let (output_ty, anon_type_map) =
+                            obligations.add(infcx.instantiate_anon_types(
+                                mir_def_id,
+                                dummy_body_id,
+                                param_env,
+                                &output_ty,
+                            ));
+                        debug!(
+                            "equate_inputs_and_outputs: instantiated output_ty={:?}",
+                            output_ty
                         );
                         debug!(
-                            "equate_inputs_and_outputs: concrete_ty={:?}",
-                            anon_decl.concrete_ty
+                            "equate_inputs_and_outputs: anon_type_map={:#?}",
+                            anon_type_map
+                        );
+
+                        debug!(
+                            "equate_inputs_and_outputs: mir_output_ty={:?}",
+                            mir_output_ty
                         );
-                        debug!("equate_inputs_and_outputs: anon_defn_ty={:?}", anon_defn_ty);
                         obligations.add(
                             infcx
-                                .at(&ObligationCause::dummy(), cx.param_env)
-                                .eq(anon_decl.concrete_ty, anon_defn_ty)?,
+                                .at(&ObligationCause::dummy(), param_env)
+                                .eq(output_ty, mir_output_ty)?,
                         );
-                    }
-
-                    debug!("equate_inputs_and_outputs: equated");
 
-                    Ok(InferOk {
-                        value: Some(anon_type_map),
-                        obligations: obligations.into_vec(),
-                    })
-                },
+                        for (&anon_def_id, anon_decl) in &anon_type_map {
+                            let anon_defn_ty = tcx.type_of(anon_def_id);
+                            let anon_defn_ty = anon_defn_ty.subst(tcx, anon_decl.substs);
+                            let anon_defn_ty = renumber::renumber_regions(
+                                infcx,
+                                TyContext::Location(Location::START),
+                                &anon_defn_ty,
+                            );
+                            debug!(
+                                "equate_inputs_and_outputs: concrete_ty={:?}",
+                                anon_decl.concrete_ty
+                            );
+                            debug!("equate_inputs_and_outputs: anon_defn_ty={:?}", anon_defn_ty);
+                            obligations.add(
+                                infcx
+                                    .at(&ObligationCause::dummy(), param_env)
+                                    .eq(anon_decl.concrete_ty, anon_defn_ty)?,
+                            );
+                        }
+
+                        debug!("equate_inputs_and_outputs: equated");
+
+                        Ok(InferOk {
+                            value: Some(anon_type_map),
+                            obligations: obligations.into_vec(),
+                        })
+                    },
+                    || format!("input_output"),
+                ),
             ).unwrap_or_else(|terr| {
                 span_mirbug!(
                     self,
@@ -155,14 +160,16 @@ pub(super) fn equate_inputs_and_outputs(
         if let Some(anon_type_map) = anon_type_map {
             self.fully_perform_op(
                 Locations::All,
-                || format!("anon_type_map"),
-                |_cx| {
-                    infcx.constrain_anon_types(&anon_type_map, universal_regions);
-                    Ok(InferOk {
-                        value: (),
-                        obligations: vec![],
-                    })
-                },
+                CustomTypeOp::new(
+                    |_cx| {
+                        infcx.constrain_anon_types(&anon_type_map, universal_regions);
+                        Ok(InferOk {
+                            value: (),
+                            obligations: vec![],
+                        })
+                    },
+                    || format!("anon_type_map"),
+                ),
             ).unwrap();
         }
     }
index 80f5fe4184f9db508757dd51fd05f566fce0d1f1..f27de92c6215a9eecdb7bfafd7fb07ef7588efac 100644 (file)
 use dataflow::move_paths::{HasMoveData, MoveData};
 use dataflow::MaybeInitializedPlaces;
 use dataflow::{FlowAtLocation, FlowsAtLocation};
-use rustc::infer::region_constraints::RegionConstraintData;
+use rustc::infer::canonical::QueryRegionConstraint;
 use rustc::mir::Local;
 use rustc::mir::{BasicBlock, Location, Mir};
-use rustc::traits::ObligationCause;
-use rustc::ty::subst::Kind;
+use rustc::traits::query::dropck_outlives::DropckOutlivesResult;
+use rustc::traits::query::type_op::outlives::DropckOutlives;
+use rustc::traits::query::type_op::TypeOp;
 use rustc::ty::{Ty, TypeFoldable};
 use rustc_data_structures::fx::FxHashMap;
 use std::rc::Rc;
@@ -70,8 +71,8 @@ struct TypeLivenessGenerator<'gen, 'typeck, 'flow, 'gcx, 'tcx>
 }
 
 struct DropData<'tcx> {
-    dropped_kinds: Vec<Kind<'tcx>>,
-    region_constraint_data: Option<Rc<RegionConstraintData<'tcx>>>,
+    dropck_result: DropckOutlivesResult<'tcx>,
+    region_constraint_data: Option<Rc<Vec<QueryRegionConstraint<'tcx>>>>,
 }
 
 impl<'gen, 'typeck, 'flow, 'gcx, 'tcx> TypeLivenessGenerator<'gen, 'typeck, 'flow, 'gcx, 'tcx> {
@@ -170,8 +171,7 @@ fn push_type_live_constraint<T>(
         );
 
         cx.tcx().for_each_free_region(&value, |live_region| {
-            cx
-                .constraints
+            cx.constraints
                 .liveness_set
                 .push((live_region, location, cause.clone()));
         });
@@ -199,14 +199,19 @@ fn add_drop_live_constraint(
         });
 
         if let Some(data) = &drop_data.region_constraint_data {
-            self.cx
-                .push_region_constraints(location.at_self(), data.clone());
+            self.cx.push_region_constraints(location.at_self(), data);
         }
 
+        drop_data.dropck_result.report_overflows(
+            self.cx.infcx.tcx,
+            self.mir.source_info(location).span,
+            dropped_ty,
+        );
+
         // All things in the `outlives` array may be touched by
         // the destructor and must be live at this point.
         let cause = Cause::DropVar(dropped_local, location);
-        for &kind in &drop_data.dropped_kinds {
+        for &kind in &drop_data.dropck_result.kinds {
             Self::push_type_live_constraint(&mut self.cx, kind, location, cause);
         }
     }
@@ -217,19 +222,14 @@ fn compute_drop_data(
     ) -> DropData<'tcx> {
         debug!("compute_drop_data(dropped_ty={:?})", dropped_ty,);
 
-        let (dropped_kinds, region_constraint_data) =
-            cx.fully_perform_op_and_get_region_constraint_data(
-                || format!("compute_drop_data(dropped_ty={:?})", dropped_ty),
-                |cx| {
-                    Ok(cx
-                        .infcx
-                        .at(&ObligationCause::dummy(), cx.param_env)
-                        .dropck_outlives(dropped_ty))
-                },
-            ).unwrap();
+        let param_env = cx.param_env;
+        let (dropck_result, region_constraint_data) = param_env
+            .and(DropckOutlives::new(dropped_ty))
+            .fully_perform(cx.infcx)
+            .unwrap();
 
         DropData {
-            dropped_kinds,
+            dropck_result,
             region_constraint_data,
         }
     }
index d25cec7979140cdee49791fcfd523fe1c72bdfb6..2da2b10edb8032355f06089d3df72b409ea68e1d 100644 (file)
 use dataflow::FlowAtLocation;
 use dataflow::MaybeInitializedPlaces;
 use rustc::hir::def_id::DefId;
-use rustc::infer::region_constraints::{GenericKind, RegionConstraintData};
-use rustc::infer::{InferCtxt, InferOk, InferResult, LateBoundRegionConversionTime, UnitResult};
+use rustc::infer::canonical::QueryRegionConstraint;
+use rustc::infer::region_constraints::GenericKind;
+use rustc::infer::{InferCtxt, LateBoundRegionConversionTime};
 use rustc::mir::interpret::EvalErrorKind::BoundsCheck;
 use rustc::mir::tcx::PlaceTy;
 use rustc::mir::visit::{PlaceContext, Visitor};
 use rustc::mir::*;
-use rustc::traits::query::NoSolution;
-use rustc::traits::{self, ObligationCause, Normalized, TraitEngine};
-use rustc::ty::error::TypeError;
+use rustc::traits::query::type_op;
+use rustc::traits::query::{Fallible, NoSolution};
 use rustc::ty::fold::TypeFoldable;
 use rustc::ty::{self, ToPolyTraitRef, Ty, TyCtxt, TypeVariants};
 use std::fmt;
-use std::rc::Rc;
 use syntax_pos::{Span, DUMMY_SP};
 use transform::{MirPass, MirSource};
 use util::liveness::LivenessResults;
@@ -286,9 +285,10 @@ fn sanitize_constant(&mut self, constant: &Constant<'tcx>, location: Location) {
 
                     let instantiated_predicates =
                         tcx.predicates_of(def_id).instantiate(tcx, substs);
-                    let predicates =
-                        type_checker.normalize(&instantiated_predicates.predicates, location);
-                    type_checker.prove_predicates(predicates, location);
+                    type_checker.normalize_and_prove_instantiated_predicates(
+                        instantiated_predicates,
+                        location,
+                    );
                 }
 
                 value.ty
@@ -344,7 +344,7 @@ fn sanitize_place(
             Place::Static(box Static { def_id, ty: sty }) => {
                 let sty = self.sanitize_type(place, sty);
                 let ty = self.tcx().type_of(def_id);
-                let ty = self.cx.normalize(&ty, location);
+                let ty = self.cx.normalize(ty, location);
                 if let Err(terr) = self.cx.eq_types(ty, sty, location.at_self()) {
                     span_mirbug!(
                         self,
@@ -731,15 +731,11 @@ fn new(
     fn fully_perform_op<R>(
         &mut self,
         locations: Locations,
-        describe_op: impl Fn() -> String,
-        op: impl FnOnce(&mut Self) -> InferResult<'tcx, R>,
-    ) -> Result<R, TypeError<'tcx>> {
-        let (r, opt_data) = self.fully_perform_op_and_get_region_constraint_data(
-            || format!("{} at {:?}", describe_op(), locations),
-            op,
-        )?;
-
-        if let Some(data) = opt_data {
+        op: impl type_op::TypeOp<'gcx, 'tcx, Output = R>,
+    ) -> Fallible<R> {
+        let (r, opt_data) = op.fully_perform(self.infcx)?;
+
+        if let Some(data) = &opt_data {
             self.push_region_constraints(locations, data);
         }
 
@@ -749,7 +745,7 @@ fn fully_perform_op<R>(
     fn push_region_constraints(
         &mut self,
         locations: Locations,
-        data: Rc<RegionConstraintData<'tcx>>,
+        data: &[QueryRegionConstraint<'tcx>],
     ) {
         debug!(
             "push_region_constraints: constraints generated at {:?} are {:#?}",
@@ -758,55 +754,18 @@ fn push_region_constraints(
 
         if let Some(borrowck_context) = &mut self.borrowck_context {
             constraint_conversion::ConstraintConversion::new(
+                self.infcx.tcx,
                 self.mir,
                 borrowck_context.universal_regions,
                 borrowck_context.location_table,
+                self.region_bound_pairs,
+                self.implicit_region_bound,
+                self.param_env,
+                locations,
                 &mut self.constraints.outlives_constraints,
                 &mut self.constraints.type_tests,
                 &mut borrowck_context.all_facts,
-            ).convert(locations, &data);
-        }
-    }
-
-    /// Helper for `fully_perform_op`, but also used on its own
-    /// sometimes to enable better caching: executes `op` fully (along
-    /// with resulting obligations) and returns the full set of region
-    /// obligations. If the same `op` were to be performed at some
-    /// other location, then the same set of region obligations would
-    /// be generated there, so this can be useful for caching.
-    fn fully_perform_op_and_get_region_constraint_data<R>(
-        &mut self,
-        describe_op: impl Fn() -> String,
-        op: impl FnOnce(&mut Self) -> InferResult<'tcx, R>,
-    ) -> Result<(R, Option<Rc<RegionConstraintData<'tcx>>>), TypeError<'tcx>> {
-        if cfg!(debug_assertions) {
-            info!(
-                "fully_perform_op_and_get_region_constraint_data({})",
-                describe_op(),
-            );
-        }
-
-        let mut fulfill_cx = TraitEngine::new(self.infcx.tcx);
-        let dummy_body_id = ObligationCause::dummy().body_id;
-        let InferOk { value, obligations } = self.infcx.commit_if_ok(|_| op(self))?;
-        debug_assert!(obligations.iter().all(|o| o.cause.body_id == dummy_body_id));
-        fulfill_cx.register_predicate_obligations(self.infcx, obligations);
-        if let Err(e) = fulfill_cx.select_all_or_error(self.infcx) {
-            span_mirbug!(self, "", "errors selecting obligation: {:?}", e);
-        }
-
-        self.infcx.process_registered_region_obligations(
-            self.region_bound_pairs,
-            self.implicit_region_bound,
-            self.param_env,
-            dummy_body_id,
-        );
-
-        let data = self.infcx.take_and_reset_region_constraints();
-        if data.is_empty() {
-            Ok((value, None))
-        } else {
-            Ok((value, Some(Rc::new(data))))
+            ).convert_all(&data);
         }
     }
 
@@ -815,38 +774,17 @@ fn sub_types(
         sub: Ty<'tcx>,
         sup: Ty<'tcx>,
         locations: Locations,
-    ) -> UnitResult<'tcx> {
-        // Micro-optimization.
-        if sub == sup {
-            return Ok(());
-        }
-
+    ) -> Fallible<()> {
+        let param_env = self.param_env;
         self.fully_perform_op(
             locations,
-            || format!("sub_types({:?} <: {:?})", sub, sup),
-            |this| {
-                this.infcx
-                    .at(&ObligationCause::dummy(), this.param_env)
-                    .sup(sup, sub)
-            },
+            param_env.and(type_op::subtype::Subtype::new(sub, sup)),
         )
     }
 
-    fn eq_types(&mut self, a: Ty<'tcx>, b: Ty<'tcx>, locations: Locations) -> UnitResult<'tcx> {
-        // Micro-optimization.
-        if a == b {
-            return Ok(());
-        }
-
-        self.fully_perform_op(
-            locations,
-            || format!("eq_types({:?} = {:?})", a, b),
-            |this| {
-                this.infcx
-                    .at(&ObligationCause::dummy(), this.param_env)
-                    .eq(b, a)
-            },
-        )
+    fn eq_types(&mut self, a: Ty<'tcx>, b: Ty<'tcx>, locations: Locations) -> Fallible<()> {
+        let param_env = self.param_env;
+        self.fully_perform_op(locations, param_env.and(type_op::eq::Eq::new(b, a)))
     }
 
     fn tcx(&self) -> TyCtxt<'a, 'gcx, 'tcx> {
@@ -873,6 +811,11 @@ fn check_stmt(&mut self, mir: &Mir<'tcx>, stmt: &Statement<'tcx>, location: Loca
                     );
                 }
                 self.check_rvalue(mir, rv, location);
+                let trait_ref = ty::TraitRef {
+                    def_id: tcx.lang_items().sized_trait().unwrap(),
+                    substs: tcx.mk_substs_trait(place_ty, &[]),
+                };
+                self.prove_trait_ref(trait_ref, location);
             }
             StatementKind::SetDiscriminant {
                 ref place,
@@ -1035,7 +978,7 @@ fn check_terminator(
                     LateBoundRegionConversionTime::FnCall,
                     &sig,
                 );
-                let sig = self.normalize(&sig, term_location);
+                let sig = self.normalize(sig, term_location);
                 self.check_call_dest(mir, term, &sig, destination, term_location);
 
                 self.prove_predicates(
@@ -1323,7 +1266,7 @@ fn aggregate_field_ty(
                 let variant = &def.variants[variant_index];
                 let adj_field_index = active_field_index.unwrap_or(field_index);
                 if let Some(field) = variant.fields.get(adj_field_index) {
-                    Ok(self.normalize(&field.ty(tcx, substs), location))
+                    Ok(self.normalize(field.ty(tcx, substs), location))
                 } else {
                     Err(FieldAccessError::OutOfRange {
                         field_count: variant.fields.len(),
@@ -1397,7 +1340,7 @@ fn check_rvalue(&mut self, mir: &Mir<'tcx>, rvalue: &Rvalue<'tcx>, location: Loc
                     // function definition. When we extract the
                     // signature, it comes from the `fn_sig` query,
                     // and hence may contain unnormalized results.
-                    let fn_sig = self.normalize(&fn_sig, location);
+                    let fn_sig = self.normalize(fn_sig, location);
 
                     let ty_fn_ptr_from = tcx.mk_fn_ptr(fn_sig);
 
@@ -1442,7 +1385,7 @@ fn check_rvalue(&mut self, mir: &Mir<'tcx>, rvalue: &Rvalue<'tcx>, location: Loc
                     // function definition. When we extract the
                     // signature, it comes from the `fn_sig` query,
                     // and hence may contain unnormalized results.
-                    let fn_sig = self.normalize(&fn_sig, location);
+                    let fn_sig = self.normalize(fn_sig, location);
 
                     let ty_fn_ptr_from = tcx.safe_to_unsafe_fn_ty(fn_sig);
 
@@ -1568,14 +1511,17 @@ fn prove_aggregate_predicates(
                 if let Some(closure_region_requirements) =
                     tcx.mir_borrowck(*def_id).closure_requirements
                 {
-                    let dummy_body_id = ObligationCause::dummy().body_id;
-                    closure_region_requirements.apply_requirements(
-                        self.infcx,
-                        dummy_body_id,
+                    let closure_constraints = closure_region_requirements.apply_requirements(
+                        self.infcx.tcx,
                         location,
                         *def_id,
                         *substs,
                     );
+
+                    self.push_region_constraints(
+                        location.at_self(),
+                        &closure_constraints,
+                    );
                 }
 
                 tcx.predicates_of(*def_id).instantiate(tcx, substs.substs)
@@ -1588,9 +1534,7 @@ fn prove_aggregate_predicates(
             AggregateKind::Array(_) | AggregateKind::Tuple => ty::InstantiatedPredicates::empty(),
         };
 
-        let predicates = self.normalize(&instantiated_predicates.predicates, location);
-        debug!("prove_aggregate_predicates: predicates={:?}", predicates);
-        self.prove_predicates(predicates, location);
+        self.normalize_and_prove_instantiated_predicates(instantiated_predicates, location);
     }
 
     fn prove_trait_ref(&mut self, trait_ref: ty::TraitRef<'tcx>, location: Location) {
@@ -1602,46 +1546,45 @@ fn prove_trait_ref(&mut self, trait_ref: ty::TraitRef<'tcx>, location: Location)
         );
     }
 
-    fn prove_predicates<T>(&mut self, predicates: T, location: Location)
-    where
-        T: IntoIterator<Item = ty::Predicate<'tcx>> + Clone,
-    {
-        let cause = ObligationCause::dummy();
-        let obligations: Vec<_> = predicates
-            .into_iter()
-            .map(|p| traits::Obligation::new(cause.clone(), self.param_env, p))
-            .collect();
-
-        // Micro-optimization
-        if obligations.is_empty() {
-            return;
+    fn normalize_and_prove_instantiated_predicates(
+        &mut self,
+        instantiated_predicates: ty::InstantiatedPredicates<'tcx>,
+        location: Location,
+    ) {
+        for predicate in instantiated_predicates.predicates {
+            let predicate = self.normalize(predicate, location);
+            self.prove_predicate(predicate, location);
         }
+    }
 
-        // This intermediate vector is mildly unfortunate, in that we
-        // sometimes create it even when logging is disabled, but only
-        // if debug-info is enabled, and I doubt it is actually
-        // expensive. -nmatsakis
-        let predicates_vec: Vec<_> = if cfg!(debug_assertions) {
-            obligations.iter().map(|o| o.predicate).collect()
-        } else {
-            Vec::new()
-        };
+    fn prove_predicates(
+        &mut self,
+        predicates: impl IntoIterator<Item = ty::Predicate<'tcx>>,
+        location: Location,
+    ) {
+        for predicate in predicates {
+            debug!(
+                "prove_predicates(predicate={:?}, location={:?})",
+                predicate, location,
+            );
 
+            self.prove_predicate(predicate, location);
+        }
+    }
+
+    fn prove_predicate(&mut self, predicate: ty::Predicate<'tcx>, location: Location) {
         debug!(
-            "prove_predicates(predicates={:?}, location={:?})",
-            predicates_vec, location,
+            "prove_predicate(predicate={:?}, location={:?})",
+            predicate, location,
         );
 
+        let param_env = self.param_env;
         self.fully_perform_op(
             location.at_self(),
-            || format!("prove_predicates({:?})", predicates_vec),
-            |_this| {
-                Ok(InferOk {
-                    value: (),
-                    obligations,
-                })
-            },
-        ).unwrap()
+            param_env.and(type_op::prove_predicate::ProvePredicate::new(predicate)),
+        ).unwrap_or_else(|NoSolution| {
+            span_mirbug!(self, NoSolution, "could not prove {:?}", predicate);
+        })
     }
 
     fn typeck_mir(&mut self, mir: &Mir<'tcx>) {
@@ -1670,35 +1613,19 @@ fn typeck_mir(&mut self, mir: &Mir<'tcx>) {
         }
     }
 
-    fn normalize<T>(&mut self, value: &T, location: impl ToLocations) -> T
+    fn normalize<T>(&mut self, value: T, location: impl ToLocations) -> T
     where
-        T: fmt::Debug + TypeFoldable<'tcx>,
+        T: type_op::normalize::Normalizable<'gcx, 'tcx> + Copy,
     {
-        // Micro-optimization: avoid work when we don't have to
-        if !value.has_projections() {
-            return value.clone();
-        }
-
         debug!("normalize(value={:?}, location={:?})", value, location);
+        let param_env = self.param_env;
         self.fully_perform_op(
             location.to_locations(),
-            || format!("normalize(value={:?})", value),
-            |this| {
-                let Normalized { value, obligations } = this
-                    .infcx
-                    .at(&ObligationCause::dummy(), this.param_env)
-                    .normalize(value)
-                    .unwrap_or_else(|NoSolution| {
-                        span_bug!(
-                            this.last_span,
-                            "normalization of `{:?}` failed at {:?}",
-                            value,
-                            location,
-                        );
-                    });
-                Ok(InferOk { value, obligations })
-            },
-        ).unwrap()
+            param_env.and(type_op::normalize::Normalize::new(value)),
+        ).unwrap_or_else(|NoSolution| {
+            span_mirbug!(self, NoSolution, "failed to normalize `{:?}`", value);
+            value
+        })
     }
 }
 
@@ -1720,18 +1647,18 @@ fn run_pass<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, src: MirSource, mir: &
             // broken MIR, so try not to report duplicate errors.
             return;
         }
+
+        if tcx.is_struct_constructor(def_id) {
+            // We just assume that the automatically generated struct constructors are
+            // correct. See the comment in the `mir_borrowck` implementation for an
+            // explanation why we need this.
+            return;
+        }
+
         let param_env = tcx.param_env(def_id);
         tcx.infer_ctxt().enter(|infcx| {
-            let _ = type_check_internal(
-                &infcx,
-                def_id,
-                param_env,
-                mir,
-                &[],
-                None,
-                None,
-                &mut |_| (),
-            );
+            let _ =
+                type_check_internal(&infcx, def_id, param_env, mir, &[], None, None, &mut |_| ());
 
             // For verification purposes, we just ignore the resulting
             // region constraint sets. Not our problem. =)
index 2bb96a856ce6d5a692c28aac5b0c32dc510c9a4e..ec8cd386679c39970592a29f82593069cfe3e189 100644 (file)
@@ -238,13 +238,13 @@ pub fn new(
     /// `'1: '2`, then the caller would impose the constraint that
     /// `V[1]: V[2]`.
     pub fn closure_mapping(
-        infcx: &InferCtxt<'_, '_, 'tcx>,
+        tcx: TyCtxt<'_, '_, 'tcx>,
         closure_ty: Ty<'tcx>,
         expected_num_vars: usize,
     ) -> IndexVec<RegionVid, ty::Region<'tcx>> {
         let mut region_mapping = IndexVec::with_capacity(expected_num_vars);
-        region_mapping.push(infcx.tcx.types.re_static);
-        infcx.tcx.for_each_free_region(&closure_ty, |fr| {
+        region_mapping.push(tcx.types.re_static);
+        tcx.for_each_free_region(&closure_ty, |fr| {
             region_mapping.push(fr);
         });
 
index 85671414618038781c9e4c2f81dcedf885e2112a..ab20b7bfea2b9490f599e4b62a33034d08e82a26 100644 (file)
@@ -538,8 +538,8 @@ fn construct_fn<'a, 'gcx, 'tcx, A>(hir: Cx<'a, 'gcx, 'tcx>,
                 mutability: Mutability::Not,
             };
             if let Some(hir::map::NodeBinding(pat)) = tcx.hir.find(var_id) {
-                if let hir::PatKind::Binding(_, _, ref name, _) = pat.node {
-                    decl.debug_name = name.node;
+                if let hir::PatKind::Binding(_, _, ident, _) = pat.node {
+                    decl.debug_name = ident.name;
 
                     let bm = *hir.tables.pat_binding_modes()
                                         .get(pat.hir_id)
@@ -675,8 +675,8 @@ fn args_and_body(&mut self,
             // If this is a simple binding pattern, give the local a nice name for debuginfo.
             let mut name = None;
             if let Some(pat) = pattern {
-                if let hir::PatKind::Binding(_, _, ref ident, _) = pat.node {
-                    name = Some(ident.node);
+                if let hir::PatKind::Binding(_, _, ident, _) = pat.node {
+                    name = Some(ident.name);
                 }
             }
 
index 8ff1738394e1b12e03737440e47024096f3a8b50..167e031fbeec1dd633ff82357f5c9a1c1c4622d9 100644 (file)
@@ -235,7 +235,7 @@ pub fn trait_method(&mut self,
         let method_name = Symbol::intern(method_name);
         let substs = self.tcx.mk_substs_trait(self_ty, params);
         for item in self.tcx.associated_items(trait_def_id) {
-            if item.kind == ty::AssociatedKind::Method && item.name == method_name {
+            if item.kind == ty::AssociatedKind::Method && item.ident.name == method_name {
                 let method_ty = self.tcx.type_of(item.def_id);
                 let method_ty = method_ty.subst(self.tcx, substs);
                 return (method_ty,
index 41024e60202739161e46548566c4764385f8ac24..83361ea57c371e49aac538747b06130ed0351874 100644 (file)
@@ -12,8 +12,6 @@
 use self::Usefulness::*;
 use self::WitnessPreference::*;
 
-use rustc::middle::const_val::ConstVal;
-
 use rustc_data_structures::fx::FxHashMap;
 use rustc_data_structures::indexed_vec::Idx;
 
@@ -544,14 +542,9 @@ fn max_slice_length<'p, 'a: 'p, 'tcx: 'a, I>(
 
     for row in patterns {
         match *row.kind {
-            PatternKind::Constant {
-                value: const_val @ &ty::Const {
-                    val: ConstVal::Value(..),
-                    ..
-                }
-            } => {
-                if let Some(ptr) = const_val.to_ptr() {
-                    let is_array_ptr = const_val.ty
+            PatternKind::Constant { value } => {
+                if let Some(ptr) = value.to_ptr() {
+                    let is_array_ptr = value.ty
                         .builtin_deref(true)
                         .and_then(|t| t.ty.builtin_index())
                         .map_or(false, |t| t == cx.tcx.types.u8);
@@ -933,13 +926,14 @@ fn slice_pat_covered_by_constructor<'tcx>(
     suffix: &[Pattern<'tcx>]
 ) -> Result<bool, ErrorReported> {
     let data: &[u8] = match *ctor {
-        ConstantValue(&ty::Const { val: ConstVal::Value(const_val), ty }) => {
-            let val = match const_val {
-                ConstValue::ByRef(..) => bug!("unexpected ConstValue::ByRef"),
+        ConstantValue(const_val) => {
+            let val = match const_val.val {
+                ConstValue::Unevaluated(..) |
+                ConstValue::ByRef(..) => bug!("unexpected ConstValue: {:?}", const_val),
                 ConstValue::Scalar(val) | ConstValue::ScalarPair(val, _) => val,
             };
             if let Ok(ptr) = val.to_ptr() {
-                let is_array_ptr = ty
+                let is_array_ptr = const_val.ty
                     .builtin_deref(true)
                     .and_then(|t| t.ty.builtin_index())
                     .map_or(false, |t| t == tcx.types.u8);
index 24301e970f506dee709e332988ee24bd85796450..e04cdcfa02773f520d4d0c1c06b78b133f78b775 100644 (file)
@@ -140,13 +140,13 @@ fn report_inlining_errors(&self, pat_span: Span) {
                 }
                 PatternError::FloatBug => {
                     // FIXME(#31407) this is only necessary because float parsing is buggy
-                    ::rustc::middle::const_val::struct_error(
+                    ::rustc::mir::interpret::struct_error(
                         self.tcx.at(pat_span),
                         "could not evaluate float literal (see issue #31407)",
                     ).emit();
                 }
                 PatternError::NonConstPath(span) => {
-                    ::rustc::middle::const_val::struct_error(
+                    ::rustc::mir::interpret::struct_error(
                         self.tcx.at(span),
                         "runtime values cannot be referenced in patterns",
                     ).emit();
@@ -308,7 +308,7 @@ fn check_irrefutable(&self, pat: &'tcx Pat, origin: &str) {
 
 fn check_for_bindings_named_the_same_as_variants(cx: &MatchVisitor, pat: &Pat) {
     pat.walk(|p| {
-        if let PatKind::Binding(_, _, name, None) = p.node {
+        if let PatKind::Binding(_, _, ident, None) = p.node {
             let bm = *cx.tables
                         .pat_binding_modes()
                         .get(p.hir_id)
@@ -321,17 +321,17 @@ fn check_for_bindings_named_the_same_as_variants(cx: &MatchVisitor, pat: &Pat) {
             let pat_ty = cx.tables.pat_ty(p);
             if let ty::TyAdt(edef, _) = pat_ty.sty {
                 if edef.is_enum() && edef.variants.iter().any(|variant| {
-                    variant.name == name.node && variant.ctor_kind == CtorKind::Const
+                    variant.name == ident.name && variant.ctor_kind == CtorKind::Const
                 }) {
                     let ty_path = cx.tcx.item_path_str(edef.did);
                     let mut err = struct_span_warn!(cx.tcx.sess, p.span, E0170,
                         "pattern binding `{}` is named the same as one \
                          of the variants of the type `{}`",
-                        name.node, ty_path);
+                        ident, ty_path);
                     err.span_suggestion_with_applicability(
                         p.span,
                         "to match on the variant, qualify the path",
-                        format!("{}::{}", ty_path, name.node),
+                        format!("{}::{}", ty_path, ident),
                         Applicability::MachineApplicable
                     );
                     err.emit();
index 9b92a8b5e7861edff98293cf2e23de07beee3eb3..4d0e3e826e8789f01efa54b5c2328aea90093929 100644 (file)
@@ -18,7 +18,6 @@
 
 use interpret::{const_val_field, const_variant_index, self};
 
-use rustc::middle::const_val::ConstVal;
 use rustc::mir::{fmt_const_val, Field, BorrowKind, Mutability};
 use rustc::mir::interpret::{Scalar, GlobalId, ConstValue, Value};
 use rustc::ty::{self, TyCtxt, AdtDef, Ty, Region};
@@ -122,13 +121,6 @@ pub enum PatternKind<'tcx> {
     },
 }
 
-fn print_const_val(value: &ty::Const, f: &mut fmt::Formatter) -> fmt::Result {
-    match value.val {
-        ConstVal::Value(..) => fmt_const_val(f, value),
-        ConstVal::Unevaluated(..) => bug!("{:?} not printable in a pattern", value)
-    }
-}
-
 impl<'tcx> fmt::Display for Pattern<'tcx> {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         match *self.kind {
@@ -236,15 +228,15 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
                 write!(f, "{}", subpattern)
             }
             PatternKind::Constant { value } => {
-                print_const_val(value, f)
+                fmt_const_val(f, value)
             }
             PatternKind::Range { lo, hi, end } => {
-                print_const_val(lo, f)?;
+                fmt_const_val(f, lo)?;
                 match end {
                     RangeEnd::Included => write!(f, "...")?,
                     RangeEnd::Excluded => write!(f, "..")?,
                 }
-                print_const_val(hi, f)
+                fmt_const_val(f, hi)
             }
             PatternKind::Slice { ref prefix, ref slice, ref suffix } |
             PatternKind::Array { ref prefix, ref slice, ref suffix } => {
@@ -461,7 +453,7 @@ fn lower_pattern_unadjusted(&mut self, pat: &'tcx hir::Pat) -> Pattern<'tcx> {
                 }
             }
 
-            PatKind::Binding(_, id, ref name, ref sub) => {
+            PatKind::Binding(_, id, ident, ref sub) => {
                 let var_ty = self.tables.node_id_to_type(pat.hir_id);
                 let region = match var_ty.sty {
                     ty::TyRef(r, _, _) => Some(r),
@@ -491,14 +483,14 @@ fn lower_pattern_unadjusted(&mut self, pat: &'tcx hir::Pat) -> Pattern<'tcx> {
                     if let ty::TyRef(_, rty, _) = ty.sty {
                         ty = rty;
                     } else {
-                        bug!("`ref {}` has wrong type {}", name.node, ty);
+                        bug!("`ref {}` has wrong type {}", ident, ty);
                     }
                 }
 
                 PatternKind::Binding {
                     mutability,
                     mode,
-                    name: name.node,
+                    name: ident.name,
                     var: id,
                     ty: var_ty,
                     subpattern: self.lower_opt_pattern(sub),
@@ -795,13 +787,10 @@ fn const_to_pat(
         debug!("const_to_pat: cv={:#?}", cv);
         let adt_subpattern = |i, variant_opt| {
             let field = Field::new(i);
-            let val = match cv.val {
-                ConstVal::Value(miri) => const_val_field(
-                    self.tcx, self.param_env, instance,
-                    variant_opt, field, miri, cv.ty,
-                ).expect("field access failed"),
-                _ => bug!("{:#?} is not a valid adt", cv),
-            };
+            let val = const_val_field(
+                self.tcx, self.param_env, instance,
+                variant_opt, field, cv,
+            ).expect("field access failed");
             self.const_to_pat(instance, val, id, span)
         };
         let adt_subpatterns = |n, variant_opt| {
@@ -840,24 +829,18 @@ fn const_to_pat(
                 PatternKind::Wild
             },
             ty::TyAdt(adt_def, substs) if adt_def.is_enum() => {
-                match cv.val {
-                    ConstVal::Value(val) => {
-                        let variant_index = const_variant_index(
-                            self.tcx, self.param_env, instance, val, cv.ty
-                        ).expect("const_variant_index failed");
-                        let subpatterns = adt_subpatterns(
-                            adt_def.variants[variant_index].fields.len(),
-                            Some(variant_index),
-                        );
-                        PatternKind::Variant {
-                            adt_def,
-                            substs,
-                            variant_index,
-                            subpatterns,
-                        }
-                    },
-                    ConstVal::Unevaluated(..) =>
-                        span_bug!(span, "{:#?} is not a valid enum constant", cv),
+                let variant_index = const_variant_index(
+                    self.tcx, self.param_env, instance, cv
+                ).expect("const_variant_index failed");
+                let subpatterns = adt_subpatterns(
+                    adt_def.variants[variant_index].fields.len(),
+                    Some(variant_index),
+                );
+                PatternKind::Variant {
+                    adt_def,
+                    substs,
+                    variant_index,
+                    subpatterns,
                 }
             },
             ty::TyAdt(adt_def, _) => {
index 3fcf1b5c8ed599e84d4b442cbf67ed7e63536f06..35422b11bd733714524b7aa9c24289079b654a0c 100644 (file)
@@ -1,6 +1,5 @@
 use rustc::hir;
-use rustc::middle::const_val::{ConstEvalErr, ErrKind};
-use rustc::middle::const_val::ErrKind::{TypeckError, CheckMatchError};
+use rustc::mir::interpret::{ConstEvalErr};
 use rustc::mir;
 use rustc::ty::{self, TyCtxt, Ty, Instance};
 use rustc::ty::layout::{self, LayoutOf, Primitive};
@@ -18,7 +17,6 @@
 
 use std::fmt;
 use std::error::Error;
-use rustc_data_structures::sync::Lrc;
 
 pub fn mk_borrowck_eval_cx<'a, 'mir, 'tcx>(
     tcx: TyCtxt<'a, 'tcx, 'tcx>,
@@ -76,7 +74,7 @@ pub fn value_to_const_value<'tcx>(
     val: Value,
     ty: Ty<'tcx>,
 ) -> &'tcx ty::Const<'tcx> {
-    let layout = ecx.tcx.layout_of(ty::ParamEnv::reveal_all().and(ty)).unwrap();
+    let layout = ecx.layout_of(ty).unwrap();
     match (val, &layout.abi) {
         (Value::Scalar(Scalar::Bits { defined: 0, ..}), _) if layout.is_zst() => {},
         (Value::ByRef(..), _) |
@@ -106,7 +104,8 @@ pub fn value_to_const_value<'tcx>(
             let (frames, span) = ecx.generate_stacktrace(None);
             let err = ConstEvalErr {
                 span,
-                kind: ErrKind::Miri(err, frames).into(),
+                error: err,
+                stacktrace: frames,
             };
             err.report_as_error(
                 ecx.tcx,
@@ -426,13 +425,13 @@ pub fn const_val_field<'a, 'tcx>(
     instance: ty::Instance<'tcx>,
     variant: Option<usize>,
     field: mir::Field,
-    value: ConstValue<'tcx>,
-    ty: Ty<'tcx>,
-) -> ::rustc::middle::const_val::EvalResult<'tcx> {
-    trace!("const_val_field: {:?}, {:?}, {:?}, {:?}", instance, field, value, ty);
+    value: &'tcx ty::Const<'tcx>,
+) -> ::rustc::mir::interpret::ConstEvalResult<'tcx> {
+    trace!("const_val_field: {:?}, {:?}, {:?}", instance, field, value);
     let mut ecx = mk_eval_cx(tcx, instance, param_env).unwrap();
     let result = (|| {
-        let value = ecx.const_value_to_value(value, ty)?;
+        let ty = value.ty;
+        let value = ecx.const_to_value(value.val)?;
         let layout = ecx.layout_of(ty)?;
         let (ptr, align) = match value {
             Value::ByRef(ptr, align) => (ptr, align),
@@ -467,11 +466,11 @@ pub fn const_val_field<'a, 'tcx>(
     })();
     result.map_err(|err| {
         let (trace, span) = ecx.generate_stacktrace(None);
-        let err = ErrKind::Miri(err, trace);
         ConstEvalErr {
-            kind: err.into(),
+            error: err,
+            stacktrace: trace,
             span,
-        }
+        }.into()
     })
 }
 
@@ -479,30 +478,29 @@ pub fn const_variant_index<'a, 'tcx>(
     tcx: TyCtxt<'a, 'tcx, 'tcx>,
     param_env: ty::ParamEnv<'tcx>,
     instance: ty::Instance<'tcx>,
-    val: ConstValue<'tcx>,
-    ty: Ty<'tcx>,
+    val: &'tcx ty::Const<'tcx>,
 ) -> EvalResult<'tcx, usize> {
-    trace!("const_variant_index: {:?}, {:?}, {:?}", instance, val, ty);
+    trace!("const_variant_index: {:?}, {:?}", instance, val);
     let mut ecx = mk_eval_cx(tcx, instance, param_env).unwrap();
-    let value = ecx.const_value_to_value(val, ty)?;
+    let value = ecx.const_to_value(val.val)?;
     let (ptr, align) = match value {
         Value::ScalarPair(..) | Value::Scalar(_) => {
-            let layout = ecx.layout_of(ty)?;
+            let layout = ecx.layout_of(val.ty)?;
             let ptr = ecx.memory.allocate(layout.size, layout.align, Some(MemoryKind::Stack))?.into();
-            ecx.write_value_to_ptr(value, ptr, layout.align, ty)?;
+            ecx.write_value_to_ptr(value, ptr, layout.align, val.ty)?;
             (ptr, layout.align)
         },
         Value::ByRef(ptr, align) => (ptr, align),
     };
     let place = Place::from_scalar_ptr(ptr, align);
-    ecx.read_discriminant_as_variant_index(place, ty)
+    ecx.read_discriminant_as_variant_index(place, val.ty)
 }
 
 pub fn const_value_to_allocation_provider<'a, 'tcx>(
     tcx: TyCtxt<'a, 'tcx, 'tcx>,
-    (val, ty): (ConstValue<'tcx>, Ty<'tcx>),
+    val: &'tcx ty::Const<'tcx>,
 ) -> &'tcx Allocation {
-    match val {
+    match val.val {
         ConstValue::ByRef(alloc, offset) => {
             assert_eq!(offset.bytes(), 0);
             return alloc;
@@ -515,20 +513,20 @@ pub fn const_value_to_allocation_provider<'a, 'tcx>(
             ty::ParamEnv::reveal_all(),
             CompileTimeEvaluator,
             ());
-        let value = ecx.const_value_to_value(val, ty)?;
-        let layout = ecx.layout_of(ty)?;
+        let value = ecx.const_to_value(val.val)?;
+        let layout = ecx.layout_of(val.ty)?;
         let ptr = ecx.memory.allocate(layout.size, layout.align, Some(MemoryKind::Stack))?;
-        ecx.write_value_to_ptr(value, ptr.into(), layout.align, ty)?;
+        ecx.write_value_to_ptr(value, ptr.into(), layout.align, val.ty)?;
         let alloc = ecx.memory.get(ptr.alloc_id)?;
         Ok(tcx.intern_const_alloc(alloc.clone()))
     };
-    result().expect("unable to convert ConstVal to Allocation")
+    result().expect("unable to convert ConstValue to Allocation")
 }
 
 pub fn const_eval_provider<'a, 'tcx>(
     tcx: TyCtxt<'a, 'tcx, 'tcx>,
     key: ty::ParamEnvAnd<'tcx, GlobalId<'tcx>>,
-) -> ::rustc::middle::const_val::EvalResult<'tcx> {
+) -> ::rustc::mir::interpret::ConstEvalResult<'tcx> {
     trace!("const eval: {:?}", key);
     let cid = key.value;
     let def_id = cid.instance.def.def_id();
@@ -540,9 +538,10 @@ pub fn const_eval_provider<'a, 'tcx>(
         // Do match-check before building MIR
         if tcx.check_match(def_id).is_err() {
             return Err(ConstEvalErr {
-                kind: Lrc::new(CheckMatchError),
+                error: EvalErrorKind::CheckMatchError.into(),
+                stacktrace: vec![],
                 span,
-            });
+            }.into());
         }
 
         if let hir::BodyOwnerKind::Const = tcx.hir.body_owner_kind(id) {
@@ -552,9 +551,10 @@ pub fn const_eval_provider<'a, 'tcx>(
         // Do not continue into miri if typeck errors occurred; it will fail horribly
         if tables.tainted_by_errors {
             return Err(ConstEvalErr {
-                kind: Lrc::new(TypeckError),
+                error: EvalErrorKind::CheckMatchError.into(),
+                stacktrace: vec![],
                 span,
-            });
+            }.into());
         }
     };
 
@@ -566,15 +566,15 @@ pub fn const_eval_provider<'a, 'tcx>(
         Ok(value_to_const_value(&ecx, val, miri_ty))
     }).map_err(|err| {
         let (trace, span) = ecx.generate_stacktrace(None);
-        let err = ErrKind::Miri(err, trace);
         let err = ConstEvalErr {
-            kind: err.into(),
+            error: err,
+            stacktrace: trace,
             span,
         };
         if tcx.is_static(def_id).is_some() {
             err.report_as_error(ecx.tcx, "could not evaluate static initializer");
         }
-        err
+        err.into()
     })
 }
 
index ea667273ecead0b3fc066d528d8fa9502055a244..6dc65a2404d11ab9d0c947ffb05181856cbcffe4 100644 (file)
@@ -3,14 +3,13 @@
 use rustc::hir::def_id::DefId;
 use rustc::hir::def::Def;
 use rustc::hir::map::definitions::DefPathData;
-use rustc::middle::const_val::ConstVal;
 use rustc::mir;
 use rustc::ty::layout::{self, Size, Align, HasDataLayout, IntegerExt, LayoutOf, TyLayout};
 use rustc::ty::subst::{Subst, Substs};
 use rustc::ty::{self, Ty, TyCtxt, TypeAndMut};
 use rustc::ty::query::TyCtxtAt;
 use rustc_data_structures::indexed_vec::{IndexVec, Idx};
-use rustc::middle::const_val::FrameInfo;
+use rustc::mir::interpret::FrameInfo;
 use syntax::codemap::{self, Span};
 use syntax::ast::Mutability;
 use rustc::mir::interpret::{
@@ -233,12 +232,18 @@ pub fn str_to_value(&mut self, s: &str) -> EvalResult<'tcx, Value> {
         Ok(Scalar::Ptr(ptr).to_value_with_len(s.len() as u64, self.tcx.tcx))
     }
 
-    pub fn const_value_to_value(
+    pub fn const_to_value(
         &mut self,
         val: ConstValue<'tcx>,
-        _ty: Ty<'tcx>,
     ) -> EvalResult<'tcx, Value> {
         match val {
+            ConstValue::Unevaluated(def_id, substs) => {
+                let instance = self.resolve(def_id, substs)?;
+                self.read_global_as_value(GlobalId {
+                    instance,
+                    promoted: None,
+                })
+            }
             ConstValue::ByRef(alloc, offset) => {
                 // FIXME: Allocate new AllocId for all constants inside
                 let id = self.memory.allocate_value(alloc.clone(), Some(MemoryKind::Stack))?;
@@ -249,23 +254,6 @@ pub fn const_value_to_value(
         }
     }
 
-    pub(super) fn const_to_value(
-        &mut self,
-        const_val: &ConstVal<'tcx>,
-        ty: Ty<'tcx>
-    ) -> EvalResult<'tcx, Value> {
-        match *const_val {
-            ConstVal::Unevaluated(def_id, substs) => {
-                let instance = self.resolve(def_id, substs)?;
-                self.read_global_as_value(GlobalId {
-                    instance,
-                    promoted: None,
-                }, ty)
-            }
-            ConstVal::Value(val) => self.const_value_to_value(val, ty)
-        }
-    }
-
     pub(super) fn resolve(&self, def_id: DefId, substs: &'tcx Substs<'tcx>) -> EvalResult<'tcx, ty::Instance<'tcx>> {
         trace!("resolve: {:?}, {:#?}", def_id, substs);
         trace!("substs: {:#?}", self.substs());
@@ -280,7 +268,7 @@ pub(super) fn resolve(&self, def_id: DefId, substs: &'tcx Substs<'tcx>) -> EvalR
             self.param_env,
             def_id,
             substs,
-        ).ok_or_else(|| EvalErrorKind::TypeckError.into()) // turn error prop into a panic to expose associated type in const issue
+        ).ok_or_else(|| EvalErrorKind::TooGeneric.into())
     }
 
     pub(super) fn type_is_sized(&self, ty: Ty<'tcx>) -> bool {
@@ -739,7 +727,7 @@ pub(super) fn eval_rvalue_into_place(
                                     self.param_env,
                                     def_id,
                                     substs,
-                                ).ok_or_else(|| EvalErrorKind::TypeckError.into());
+                                ).ok_or_else(|| EvalErrorKind::TooGeneric.into());
                                 let fn_ptr = self.memory.create_fn_alloc(instance?);
                                 let valty = ValTy {
                                     value: Value::Scalar(fn_ptr.into()),
@@ -849,14 +837,14 @@ pub fn eval_operand(&mut self, op: &mir::Operand<'tcx>) -> EvalResult<'tcx, ValT
                 use rustc::mir::Literal;
                 let mir::Constant { ref literal, .. } = **constant;
                 let value = match *literal {
-                    Literal::Value { ref value } => self.const_to_value(&value.val, ty)?,
+                    Literal::Value { ref value } => self.const_to_value(value.val)?,
 
                     Literal::Promoted { index } => {
                         let instance = self.frame().instance;
                         self.read_global_as_value(GlobalId {
                             instance,
                             promoted: Some(index),
-                        }, ty)?
+                        })?
                     }
                 };
 
@@ -1036,18 +1024,9 @@ pub fn write_discriminant_value(
         Ok(())
     }
 
-    pub fn read_global_as_value(&mut self, gid: GlobalId<'tcx>, ty: Ty<'tcx>) -> EvalResult<'tcx, Value> {
-        if self.tcx.is_static(gid.instance.def_id()).is_some() {
-            let alloc_id = self
-                .tcx
-                .alloc_map
-                .lock()
-                .intern_static(gid.instance.def_id());
-            let layout = self.layout_of(ty)?;
-            return Ok(Value::ByRef(Scalar::Ptr(alloc_id.into()), layout.align))
-        }
+    pub fn read_global_as_value(&mut self, gid: GlobalId<'tcx>) -> EvalResult<'tcx, Value> {
         let cv = self.const_eval(gid)?;
-        self.const_to_value(&cv.val, ty)
+        self.const_to_value(cv.val)
     }
 
     pub fn const_eval(&self, gid: GlobalId<'tcx>) -> EvalResult<'tcx, &'tcx ty::Const<'tcx>> {
index ad571fbe90d5d0918a585da04bd817a35fa2dfa9..9e5b6be3e9122eff1ff95d39bcf8e458002e7fb6 100644 (file)
@@ -7,7 +7,6 @@
 use rustc::ty::query::TyCtxtAt;
 use rustc::ty::layout::{self, Align, TargetDataLayout, Size};
 use syntax::ast::Mutability;
-use rustc::middle::const_val::ConstVal;
 
 use rustc_data_structures::fx::{FxHashSet, FxHashMap};
 use rustc::mir::interpret::{Pointer, AllocId, Allocation, AccessKind, Value,
@@ -285,16 +284,12 @@ fn const_eval_static(&self, def_id: DefId) -> EvalResult<'tcx, &'tcx Allocation>
             instance,
             promoted: None,
         };
-        self.tcx.const_eval(ParamEnv::reveal_all().and(gid)).map_err(|_| {
+        self.tcx.const_eval(ParamEnv::reveal_all().and(gid)).map_err(|err| {
             // no need to report anything, the const_eval call takes care of that for statics
             assert!(self.tcx.is_static(def_id).is_some());
-            EvalErrorKind::TypeckError.into()
+            EvalErrorKind::ReferencedConstant(err).into()
         }).map(|val| {
-            let const_val = match val.val {
-                ConstVal::Value(val) => val,
-                ConstVal::Unevaluated(..) => bug!("should be evaluated"),
-            };
-            self.tcx.const_value_to_allocation((const_val, val.ty))
+            self.tcx.const_value_to_allocation(val)
         })
     }
 
index ef69cb574e07e6b70571899b23b92ba1f72dfc16..3a046cd800a3e1db0419999a10f86ed804c40d61 100644 (file)
 
 use rustc::hir::map as hir_map;
 use rustc::hir::def_id::DefId;
-use rustc::middle::const_val::ConstVal;
 use rustc::mir::interpret::{AllocId, ConstValue};
 use rustc::middle::lang_items::{ExchangeMallocFnLangItem, StartFnLangItem};
 use rustc::ty::subst::Substs;
@@ -1031,6 +1030,7 @@ fn is_root(&self, def_id: DefId) -> bool {
             MonoItemCollectionMode::Lazy => {
                 self.entry_fn == Some(def_id) ||
                 self.tcx.is_reachable_non_generic(def_id) ||
+                self.tcx.is_weak_lang_item(def_id) ||
                 self.tcx.codegen_fn_attrs(def_id).flags.contains(
                     CodegenFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL)
             }
@@ -1115,10 +1115,10 @@ fn create_mono_items_for_default_impls<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
             if let Some(trait_ref) = tcx.impl_trait_ref(impl_def_id) {
                 let overridden_methods: FxHashSet<_> =
                     impl_item_refs.iter()
-                                  .map(|iiref| iiref.name)
+                                  .map(|iiref| iiref.ident.modern())
                                   .collect();
                 for method in tcx.provided_trait_methods(trait_ref.def_id) {
-                    if overridden_methods.contains(&method.name) {
+                    if overridden_methods.contains(&method.ident.modern()) {
                         continue;
                     }
 
@@ -1209,15 +1209,12 @@ fn collect_neighbours<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
         match tcx.const_eval(param_env.and(cid)) {
             Ok(val) => collect_const(tcx, val, instance.substs, output),
             Err(err) => {
-                use rustc::middle::const_val::ErrKind;
                 use rustc::mir::interpret::EvalErrorKind;
-                if let ErrKind::Miri(ref miri, ..) = *err.kind {
-                    if let EvalErrorKind::ReferencedConstant(_) = miri.kind {
-                        err.report_as_error(
-                            tcx.at(mir.promoted[i].span),
-                            "erroneous constant used",
-                        );
-                    }
+                if let EvalErrorKind::ReferencedConstant(_) = err.error.kind {
+                    err.report_as_error(
+                        tcx.at(mir.promoted[i].span),
+                        "erroneous constant used",
+                    );
                 }
             },
         }
@@ -1242,7 +1239,7 @@ fn collect_const<'a, 'tcx>(
     debug!("visiting const {:?}", *constant);
 
     let val = match constant.val {
-        ConstVal::Unevaluated(def_id, substs) => {
+        ConstValue::Unevaluated(def_id, substs) => {
             let param_env = ty::ParamEnv::reveal_all();
             let substs = tcx.subst_and_normalize_erasing_regions(
                 param_substs,
@@ -1273,16 +1270,16 @@ fn collect_const<'a, 'tcx>(
         _ => constant.val,
     };
     match val {
-        ConstVal::Unevaluated(..) => bug!("const eval yielded unevaluated const"),
-        ConstVal::Value(ConstValue::ScalarPair(Scalar::Ptr(a), Scalar::Ptr(b))) => {
+        ConstValue::Unevaluated(..) => bug!("const eval yielded unevaluated const"),
+        ConstValue::ScalarPair(Scalar::Ptr(a), Scalar::Ptr(b)) => {
             collect_miri(tcx, a.alloc_id, output);
             collect_miri(tcx, b.alloc_id, output);
         }
-        ConstVal::Value(ConstValue::ScalarPair(_, Scalar::Ptr(ptr))) |
-        ConstVal::Value(ConstValue::ScalarPair(Scalar::Ptr(ptr), _)) |
-        ConstVal::Value(ConstValue::Scalar(Scalar::Ptr(ptr))) =>
+        ConstValue::ScalarPair(_, Scalar::Ptr(ptr)) |
+        ConstValue::ScalarPair(Scalar::Ptr(ptr), _) |
+        ConstValue::Scalar(Scalar::Ptr(ptr)) =>
             collect_miri(tcx, ptr.alloc_id, output),
-        ConstVal::Value(ConstValue::ByRef(alloc, _offset)) => {
+        ConstValue::ByRef(alloc, _offset) => {
             for &id in alloc.relocations.values() {
                 collect_miri(tcx, id, output);
             }
index 27f8254bf8a784a7394101615bb9c09acd5e6e9e..0428489fd8d78b06c831d1559a2b17740a8042be 100644 (file)
@@ -441,7 +441,7 @@ fn push_type_params<I>(&self,
 
         for projection in projections {
             let projection = projection.skip_binder();
-            let name = &self.tcx.associated_item(projection.item_def_id).name.as_str();
+            let name = &self.tcx.associated_item(projection.item_def_id).ident.as_str();
             output.push_str(name);
             output.push_str("=");
             self.push_type_name(projection.ty, output);
index 24382815084619dee7a74c01d18979e26b5309a1..3b3c28f6ae2655c2dbd9df61e247ad1be3e33b81 100644 (file)
@@ -17,7 +17,7 @@
 use rustc::mir::{NullOp, StatementKind, Statement, BasicBlock, LocalKind};
 use rustc::mir::{TerminatorKind, ClearCrossCrate, SourceInfo, BinOp, ProjectionElem};
 use rustc::mir::visit::{Visitor, PlaceContext};
-use rustc::middle::const_val::{ConstVal, ConstEvalErr, ErrKind};
+use rustc::mir::interpret::ConstEvalErr;
 use rustc::ty::{TyCtxt, self, Instance};
 use rustc::mir::interpret::{Value, Scalar, GlobalId, EvalResult};
 use interpret::EvalContext;
@@ -45,8 +45,11 @@ fn run_pass<'a, 'tcx>(&self,
             return;
         }
         match tcx.describe_def(source.def_id) {
-            // skip statics because they'll be evaluated by miri anyway
+            // skip statics/consts because they'll be evaluated by miri anyway
+            Some(Def::Const(..)) |
             Some(Def::Static(..)) => return,
+            // we still run on associated constants, because they might not get evaluated
+            // within the current crate
             _ => {},
         }
         trace!("ConstProp starting for {:?}", source.def_id);
@@ -145,7 +148,8 @@ fn use_ecx<F, T>(
                 let (frames, span) = self.ecx.generate_stacktrace(None);
                 let err = ConstEvalErr {
                     span,
-                    kind: ErrKind::Miri(err, frames).into(),
+                    error: err,
+                    stacktrace: frames,
                 };
                 err.report_as_lint(
                     self.ecx.tcx,
@@ -159,54 +163,30 @@ fn use_ecx<F, T>(
         r
     }
 
-    fn const_eval(&mut self, cid: GlobalId<'tcx>, source_info: SourceInfo) -> Option<Const<'tcx>> {
-        let value = match self.tcx.const_eval(self.param_env.and(cid)) {
-            Ok(val) => val,
-            Err(err) => {
-                err.report_as_error(
-                    self.tcx.at(err.span),
-                    "constant evaluation error",
-                );
-                return None;
-            },
-        };
-        let val = match value.val {
-            ConstVal::Value(v) => {
-                self.use_ecx(source_info, |this| this.ecx.const_value_to_value(v, value.ty))?
-            },
-            _ => bug!("eval produced: {:?}", value),
-        };
-        let val = (val, value.ty, source_info.span);
-        trace!("evaluated {:?} to {:?}", cid, val);
-        Some(val)
-    }
-
     fn eval_constant(
         &mut self,
         c: &Constant<'tcx>,
         source_info: SourceInfo,
     ) -> Option<Const<'tcx>> {
         match c.literal {
-            Literal::Value { value } => match value.val {
-                ConstVal::Value(v) => {
-                    let v = self.use_ecx(source_info, |this| {
-                        this.ecx.const_value_to_value(v, value.ty)
-                    })?;
-                    Some((v, value.ty, c.span))
-                },
-                ConstVal::Unevaluated(did, substs) => {
-                    let instance = Instance::resolve(
-                        self.tcx,
-                        self.param_env,
-                        did,
-                        substs,
-                    )?;
-                    let cid = GlobalId {
-                        instance,
-                        promoted: None,
-                    };
-                    self.const_eval(cid, source_info)
-                },
+            Literal::Value { value } => {
+                self.ecx.tcx.span = source_info.span;
+                match self.ecx.const_to_value(value.val) {
+                    Ok(val) => Some((val, value.ty, c.span)),
+                    Err(error) => {
+                        let (stacktrace, span) = self.ecx.generate_stacktrace(None);
+                        let err = ConstEvalErr {
+                            span,
+                            error,
+                            stacktrace,
+                        };
+                        err.report_as_error(
+                            self.tcx.at(source_info.span),
+                            "could not evaluate constant",
+                        );
+                        None
+                    },
+                }
             },
             // evaluate the promoted and replace the constant with the evaluated result
             Literal::Promoted { index } => {
index 6448ba17e3464480eb8d2c478dead103c1a5e1d0..144ebce76e14cbe01f5137a041c4d77249ae76c1 100644 (file)
@@ -20,7 +20,7 @@
 use rustc_data_structures::fx::FxHashSet;
 use rustc::hir;
 use rustc::hir::def_id::DefId;
-use rustc::middle::const_val::ConstVal;
+use rustc::mir::interpret::ConstValue;
 use rustc::traits::{self, TraitEngine};
 use rustc::ty::{self, TyCtxt, Ty, TypeFoldable};
 use rustc::ty::cast::CastTy;
@@ -611,7 +611,7 @@ fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) {
             }
             Operand::Constant(ref constant) => {
                 if let Literal::Value {
-                    value: &ty::Const { val: ConstVal::Unevaluated(def_id, _), ty, .. }
+                    value: &ty::Const { val: ConstValue::Unevaluated(def_id, _), ty, .. }
                 } = constant.literal {
                     // Don't peek inside trait associated constants.
                     if self.tcx.trait_of_item(def_id).is_some() {
index ea15f4c75b96c73bb0bd7336242812ac7be2f398..b1ab86674cf90fa74388effcfa7b49e6c403f3a3 100644 (file)
@@ -108,6 +108,7 @@ pub fn register_syntax_extension(&mut self, name: ast::Name, extension: SyntaxEx
                 def_info: _,
                 allow_internal_unstable,
                 allow_internal_unsafe,
+                local_inner_macros,
                 unstable_feature,
                 edition,
             } => {
@@ -117,6 +118,7 @@ pub fn register_syntax_extension(&mut self, name: ast::Name, extension: SyntaxEx
                     def_info: Some((nid, self.krate_span)),
                     allow_internal_unstable,
                     allow_internal_unsafe,
+                    local_inner_macros,
                     unstable_feature,
                     edition,
                 }
@@ -152,6 +154,7 @@ pub fn register_macro(&mut self, name: &str, expander: MacroExpanderFn) {
             def_info: None,
             allow_internal_unstable: false,
             allow_internal_unsafe: false,
+            local_inner_macros: false,
             unstable_feature: None,
             edition: hygiene::default_edition(),
         });
index de087049267b1f1ed82a4db643497692355a3061..2aecbf32ec5a1e81fc53f166cee4cb5853187fa0 100644 (file)
@@ -830,8 +830,8 @@ fn visit_qpath(&mut self, qpath: &'tcx hir::QPath, id: ast::NodeId, span: Span)
             let is_local_static = if let Def::Static(..) = def { def_id.is_local() } else { false };
             if !self.item_is_accessible(def_id) && !is_local_static {
                 let name = match *qpath {
-                    hir::QPath::Resolved(_, ref path) => format!("{}", path),
-                    hir::QPath::TypeRelative(_, ref segment) => segment.name.to_string(),
+                    hir::QPath::Resolved(_, ref path) => path.to_string(),
+                    hir::QPath::TypeRelative(_, ref segment) => segment.ident.to_string(),
                 };
                 let msg = format!("{} `{}` is private", def.kind_name(), name);
                 self.tcx.sess.span_err(span, &msg);
index 2052918747b39301f1f14364c7d244b77a7eb00c..464356eb5d242206d57395cf70be46516bc230ec 100644 (file)
@@ -1497,17 +1497,17 @@ fn resolve_str_path(
         args: Option<P<hir::GenericArgs>>,
         is_value: bool
     ) -> hir::Path {
-        let mut segments = iter::once(keywords::CrateRoot.name())
+        let mut segments = iter::once(keywords::CrateRoot.ident())
             .chain(
                 crate_root.into_iter()
                     .chain(components.iter().cloned())
-                    .map(Symbol::intern)
-            ).map(hir::PathSegment::from_name).collect::<Vec<_>>();
+                    .map(Ident::from_str)
+            ).map(hir::PathSegment::from_ident).collect::<Vec<_>>();
 
         if let Some(args) = args {
-            let name = segments.last().unwrap().name;
+            let ident = segments.last().unwrap().ident;
             *segments.last_mut().unwrap() = hir::PathSegment {
-                name,
+                ident,
                 args: Some(args),
                 infer_types: true,
             };
@@ -1550,16 +1550,16 @@ pub fn resolve_str_path_error(&mut self, span: Span, path_str: &str, is_value: b
             hir::Path {
                 span,
                 def: Def::Err,
-                segments: iter::once(keywords::CrateRoot.name()).chain({
-                    path_str.split("::").skip(1).map(Symbol::intern)
-                }).map(hir::PathSegment::from_name).collect(),
+                segments: iter::once(keywords::CrateRoot.ident()).chain({
+                    path_str.split("::").skip(1).map(Ident::from_str)
+                }).map(hir::PathSegment::from_ident).collect(),
             }
         } else {
             hir::Path {
                 span,
                 def: Def::Err,
-                segments: path_str.split("::").map(Symbol::intern)
-                                  .map(hir::PathSegment::from_name).collect(),
+                segments: path_str.split("::").map(Ident::from_str)
+                                  .map(hir::PathSegment::from_ident).collect(),
             }
         };
         self.resolve_hir_path_cb(&mut path, is_value, |_, _, _| errored = true);
@@ -1572,13 +1572,11 @@ pub fn resolve_str_path_error(&mut self, span: Span, path_str: &str, is_value: b
 
     /// resolve_hir_path, but takes a callback in case there was an error
     fn resolve_hir_path_cb<F>(&mut self, path: &mut hir::Path, is_value: bool, error_callback: F)
-            where F: for<'c, 'b> FnOnce(&'c mut Resolver, Span, ResolutionError<'b>)
-        {
+        where F: for<'c, 'b> FnOnce(&'c mut Resolver, Span, ResolutionError<'b>)
+    {
         let namespace = if is_value { ValueNS } else { TypeNS };
         let hir::Path { ref segments, span, ref mut def } = *path;
-        let path: Vec<Ident> = segments.iter()
-            .map(|seg| Ident::new(seg.name, span))
-            .collect();
+        let path: Vec<_> = segments.iter().map(|seg| seg.ident).collect();
         // FIXME (Manishearth): Intra doc links won't get warned of epoch changes
         match self.resolve_path(&path, Some(namespace), true, span, CrateLint::No) {
             PathResult::Module(module) => *def = module.def().unwrap(),
@@ -3507,7 +3505,7 @@ fn lint_if_path_starts_with_module(
 
         match path.get(1) {
             // If this import looks like `crate::...` it's already good
-            Some(name) if name.name == keywords::Crate.name() => return,
+            Some(ident) if ident.name == keywords::Crate.name() => return,
             // Otherwise go below to see if it's an extern crate
             Some(_) => {}
             // If the path has length one (and it's `CrateRoot` most likely)
index ebdaa456170b215a33bd16d653d040821e8fd934..c9d00f80b0ba325b31d073fc997b24d203d2baf0 100644 (file)
@@ -451,11 +451,18 @@ pub fn resolve_macro_to_def_inner(&mut self, scope: Mark, path: &ast::Path,
                                   kind: MacroKind, force: bool)
                                   -> Result<Def, Determinacy> {
         let ast::Path { ref segments, span } = *path;
-        let path: Vec<_> = segments.iter().map(|seg| seg.ident).collect();
+        let mut path: Vec<_> = segments.iter().map(|seg| seg.ident).collect();
         let invocation = self.invocations[&scope];
         let module = invocation.module.get();
         self.current_module = if module.is_trait() { module.parent.unwrap() } else { module };
 
+        // Possibly apply the macro helper hack
+        if self.use_extern_macros && kind == MacroKind::Bang && path.len() == 1 &&
+           path[0].span.ctxt().outer().expn_info().map_or(false, |info| info.local_inner_macros) {
+            let root = Ident::new(keywords::DollarCrate.name(), path[0].span);
+            path.insert(0, root);
+        }
+
         if path.len() > 1 {
             if !self.use_extern_macros && self.gated_errors.insert(span) {
                 let msg = "non-ident macro paths are experimental";
index 262c0e40abc549788d487b01c192a83e6bdbfd33..311978ec248091709a905f9aecf82b49b920dc88 100644 (file)
@@ -316,14 +316,14 @@ fn process_method(
         sig: &'l ast::MethodSig,
         body: Option<&'l ast::Block>,
         id: ast::NodeId,
-        name: ast::Ident,
+        ident: ast::Ident,
         generics: &'l ast::Generics,
         vis: ast::Visibility,
         span: Span,
     ) {
-        debug!("process_method: {}:{}", id, name);
+        debug!("process_method: {}:{}", id, ident);
 
-        if let Some(mut method_data) = self.save_ctxt.get_method_data(id, name.name, span) {
+        if let Some(mut method_data) = self.save_ctxt.get_method_data(id, ident.name, span) {
             let sig_str = ::make_signature(&sig.decl, &generics);
             if body.is_some() {
                 self.nest_tables(
@@ -335,7 +335,7 @@ fn process_method(
             self.process_generic_params(&generics, span, &method_data.qualname, id);
 
             method_data.value = sig_str;
-            method_data.sig = sig::method_signature(id, name, generics, sig, &self.save_ctxt);
+            method_data.sig = sig::method_signature(id, ident, generics, sig, &self.save_ctxt);
             self.dumper.dump_def(&access_from!(self.save_ctxt, vis, id), method_data);
         }
 
index deb91774175667d148bc7b79a7f48b1f1990c940..89d30fd666a883d88d67c80b9ff4c046cc68ff23 100644 (file)
@@ -438,7 +438,7 @@ pub fn get_method_data(&self, id: ast::NodeId, name: ast::Name, span: Span) -> O
                                 qualname.push_str(&self.tcx.item_path_str(def_id));
                                 self.tcx
                                     .associated_items(def_id)
-                                    .find(|item| item.name == name)
+                                    .find(|item| item.ident.name == name)
                                     .map(|item| decl_id = Some(item.def_id));
                             }
                             qualname.push_str(">");
@@ -775,7 +775,8 @@ fn fn_type(path: &ast::Path) -> bool {
                     let ti = self.tcx.associated_item(decl_id);
                     self.tcx
                         .associated_items(ti.container.id())
-                        .find(|item| item.name == ti.name && item.defaultness.has_value())
+                        .find(|item| item.ident.name == ti.ident.name &&
+                                     item.defaultness.has_value())
                         .map(|item| item.def_id)
                 } else {
                     None
index a1242621cb18c5882197fd25c265bad632b7bac6..6062fe03e6a16d4651581af570ea2c42d86cb638 100644 (file)
@@ -10,9 +10,7 @@
 
 use chalk_engine::fallible::Fallible as ChalkEngineFallible;
 use chalk_engine::{context, hh::HhGoal, DelayedLiteral, ExClause};
-use rustc::infer::canonical::{
-    Canonical, CanonicalVarValues, Canonicalize, QueryRegionConstraint, QueryResult,
-};
+use rustc::infer::canonical::{Canonical, CanonicalVarValues, QueryRegionConstraint, QueryResult};
 use rustc::infer::{InferCtxt, InferOk, LateBoundRegionConversionTime};
 use rustc::traits::{
     WellFormed,
@@ -519,14 +517,3 @@ impl<'a, 'tcx> Lift<'tcx> for ConstrainedSubst<'a> {
         subst, constraints
     }
 }
-
-impl<'gcx: 'tcx, 'tcx> Canonicalize<'gcx, 'tcx> for ConstrainedSubst<'tcx> {
-    type Canonicalized = Canonical<'gcx, ConstrainedSubst<'gcx>>;
-
-    fn intern(
-        _gcx: TyCtxt<'_, 'gcx, 'gcx>,
-        value: Canonical<'gcx, ConstrainedSubst<'gcx>>,
-    ) -> Self::Canonicalized {
-        value
-    }
-}
index 219c6b9aefba51f72be5cacf674fcfd98413d0b6..5f9060b36234644c9f59ba75fc44240662c2b075 100644 (file)
@@ -8,19 +8,27 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-use rustc::infer::canonical::{Canonical, QueryResult};
 use rustc::hir::def_id::DefId;
-use rustc::traits::{FulfillmentContext, Normalized, ObligationCause};
+use rustc::infer::canonical::{Canonical, QueryResult};
+use rustc::traits::query::dropck_outlives::{DropckOutlivesResult, DtorckConstraint};
 use rustc::traits::query::{CanonicalTyGoal, NoSolution};
-use rustc::traits::query::dropck_outlives::{DtorckConstraint, DropckOutlivesResult};
-use rustc::ty::{self, ParamEnvAnd, Ty, TyCtxt};
+use rustc::traits::{FulfillmentContext, Normalized, ObligationCause, TraitEngineExt};
+use rustc::ty::query::Providers;
 use rustc::ty::subst::{Subst, Substs};
+use rustc::ty::{self, ParamEnvAnd, Ty, TyCtxt};
 use rustc::util::nodemap::FxHashSet;
 use rustc_data_structures::sync::Lrc;
 use syntax::codemap::{Span, DUMMY_SP};
-use util;
 
-crate fn dropck_outlives<'tcx>(
+crate fn provide(p: &mut Providers) {
+    *p = Providers {
+        dropck_outlives,
+        adt_dtorck_constraint,
+        ..*p
+    };
+}
+
+fn dropck_outlives<'tcx>(
     tcx: TyCtxt<'_, 'tcx, 'tcx>,
     goal: CanonicalTyGoal<'tcx>,
 ) -> Result<Lrc<Canonical<'tcx, QueryResult<'tcx, DropckOutlivesResult<'tcx>>>>, NoSolution> {
             canonical_inference_vars,
         ) = infcx.instantiate_canonical_with_fresh_inference_vars(DUMMY_SP, &goal);
 
-        let mut result = DropckOutlivesResult { kinds: vec![], overflows: vec![] };
+        let mut result = DropckOutlivesResult {
+            kinds: vec![],
+            overflows: vec![],
+        };
 
         // A stack of types left to process. Each round, we pop
         // something from the stack and invoke
 
         debug!("dropck_outlives: result = {:#?}", result);
 
-        util::make_query_response(infcx, canonical_inference_vars, result, fulfill_cx)
+        infcx.make_canonicalized_query_result(canonical_inference_vars, result, fulfill_cx)
     })
 }
 
@@ -184,7 +195,8 @@ fn dtorck_constraint_for_ty<'a, 'gcx, 'tcx>(
             dtorck_constraint_for_ty(tcx, span, for_ty, depth + 1, ety)
         }
 
-        ty::TyTuple(tys) => tys.iter()
+        ty::TyTuple(tys) => tys
+            .iter()
             .map(|ty| dtorck_constraint_for_ty(tcx, span, for_ty, depth + 1, ty))
             .collect(),
 
@@ -222,7 +234,10 @@ fn dtorck_constraint_for_ty<'a, 'gcx, 'tcx>(
                 dtorck_types: vec![],
                 overflows: vec![],
             };
-            debug!("dtorck_constraint: generator {:?} => {:?}", def_id, constraint);
+            debug!(
+                "dtorck_constraint: generator {:?} => {:?}",
+                def_id, constraint
+            );
 
             Ok(constraint)
         }
@@ -291,7 +306,8 @@ fn dtorck_constraint_for_ty<'a, 'gcx, 'tcx>(
         return Ok(result);
     }
 
-    let mut result = def.all_fields()
+    let mut result = def
+        .all_fields()
         .map(|field| tcx.type_of(field.did))
         .map(|fty| dtorck_constraint_for_ty(tcx, span, fty, 0, fty))
         .collect::<Result<DtorckConstraint, NoSolution>>()?;
index 21259bbcd38ff9df779a2e9d7923079e05126fe7..e8a3447902fd357de4a93de6b63224130187a922 100644 (file)
 use rustc::traits::{EvaluationResult, Obligation, ObligationCause,
                     OverflowError, SelectionContext, TraitQueryMode};
 use rustc::traits::query::CanonicalPredicateGoal;
+use rustc::ty::query::Providers;
 use rustc::ty::{ParamEnvAnd, TyCtxt};
 use syntax::codemap::DUMMY_SP;
 
-crate fn evaluate_obligation<'tcx>(
+crate fn provide(p: &mut Providers) {
+    *p = Providers {
+        evaluate_obligation,
+        ..*p
+    };
+}
+
+fn evaluate_obligation<'tcx>(
     tcx: TyCtxt<'_, 'tcx, 'tcx>,
     goal: CanonicalPredicateGoal<'tcx>,
 ) -> Result<EvaluationResult, OverflowError> {
index c3135439204e7c69456fe2243d9d207cbfb4875e..1da3907915a0724a78d8e1547fe94c0f307ce889 100644 (file)
 mod evaluate_obligation;
 mod normalize_projection_ty;
 mod normalize_erasing_regions;
-mod util;
 pub mod lowering;
+mod type_op;
 
 use rustc::ty::query::Providers;
 
 pub fn provide(p: &mut Providers) {
-    *p = Providers {
-        dropck_outlives: dropck_outlives::dropck_outlives,
-        adt_dtorck_constraint: dropck_outlives::adt_dtorck_constraint,
-        normalize_projection_ty: normalize_projection_ty::normalize_projection_ty,
-        normalize_ty_after_erasing_regions:
-            normalize_erasing_regions::normalize_ty_after_erasing_regions,
-        program_clauses_for: lowering::program_clauses_for,
-        program_clauses_for_env: lowering::program_clauses_for_env,
-        evaluate_obligation: evaluate_obligation::evaluate_obligation,
-        ..*p
-    };
+    dropck_outlives::provide(p);
+    evaluate_obligation::provide(p);
+    lowering::provide(p);
+    normalize_projection_ty::provide(p);
+    normalize_erasing_regions::provide(p);
+    type_op::provide(p);
 }
index 0270e970976eaa59906bccbfda1f1783ee4e462c..214376b2e532b14a406e1d8854c23c4cb6d64241 100644 (file)
@@ -14,6 +14,7 @@
 use rustc::hir::{self, ImplPolarity};
 use rustc::traits::{Clause, Clauses, DomainGoal, Goal, PolyDomainGoal, ProgramClause,
                     WhereClause, FromEnv, WellFormed};
+use rustc::ty::query::Providers;
 use rustc::ty::subst::Substs;
 use rustc::ty::{self, Slice, TyCtxt};
 use rustc_data_structures::fx::FxHashSet;
 
 use std::iter;
 
+crate fn provide(p: &mut Providers) {
+    *p = Providers {
+        program_clauses_for,
+        program_clauses_for_env,
+        ..*p
+    };
+}
+
 crate trait Lower<T> {
     /// Lower a rustc construct (e.g. `ty::TraitPredicate`) to a chalk-like type.
     fn lower(&self) -> T;
@@ -408,7 +417,7 @@ pub fn program_clauses_for_associated_type_value<'a, 'tcx>(
     let hypotheses = vec![trait_implemented];
 
     // `<A0 as Trait<A1..An>>::AssocType<Pn+1..Pm>`
-    let projection_ty = ty::ProjectionTy::from_ref_and_name(tcx, trait_ref, item.name);
+    let projection_ty = ty::ProjectionTy::from_ref_and_name(tcx, trait_ref, item.ident);
 
     // `Normalize(<A0 as Trait<A1..An>>::AssocType<Pn+1..Pm> -> T)`
     let normalize_goal = DomainGoal::Normalize(ty::ProjectionPredicate { projection_ty, ty });
index 299433d479dc60d86d69ca5f4cb898482509bb4c..a85983d0e9a8164a6b9c5c27b8d337bc979c370c 100644 (file)
 
 use rustc::traits::{Normalized, ObligationCause};
 use rustc::traits::query::NoSolution;
+use rustc::ty::query::Providers;
 use rustc::ty::{self, ParamEnvAnd, Ty, TyCtxt};
 use std::sync::atomic::Ordering;
 
-crate fn normalize_ty_after_erasing_regions<'tcx>(
+crate fn provide(p: &mut Providers) {
+    *p = Providers {
+        normalize_ty_after_erasing_regions,
+        ..*p
+    };
+}
+
+fn normalize_ty_after_erasing_regions<'tcx>(
     tcx: TyCtxt<'_, 'tcx, 'tcx>,
     goal: ParamEnvAnd<'tcx, Ty<'tcx>>,
 ) -> Ty<'tcx> {
index a9ac53972e4756cad51c3490b767a73936b89b10..1c0f677fbf3cbf369f226ff07c9eb350006d89bf 100644 (file)
@@ -9,45 +9,55 @@
 // except according to those terms.
 
 use rustc::infer::canonical::{Canonical, QueryResult};
-use rustc::traits::{self, FulfillmentContext, ObligationCause, SelectionContext};
-use rustc::traits::query::{CanonicalProjectionGoal, NoSolution, normalize::NormalizationResult};
+use rustc::traits::query::{normalize::NormalizationResult, CanonicalProjectionGoal, NoSolution};
+use rustc::traits::{self, ObligationCause, SelectionContext, TraitEngineExt};
+use rustc::ty::query::Providers;
 use rustc::ty::{ParamEnvAnd, TyCtxt};
 use rustc_data_structures::sync::Lrc;
+use std::sync::atomic::Ordering;
 use syntax::ast::DUMMY_NODE_ID;
 use syntax_pos::DUMMY_SP;
-use util;
-use std::sync::atomic::Ordering;
 
-crate fn normalize_projection_ty<'tcx>(
+crate fn provide(p: &mut Providers) {
+    *p = Providers {
+        normalize_projection_ty,
+        ..*p
+    };
+}
+
+fn normalize_projection_ty<'tcx>(
     tcx: TyCtxt<'_, 'tcx, 'tcx>,
     goal: CanonicalProjectionGoal<'tcx>,
 ) -> Result<Lrc<Canonical<'tcx, QueryResult<'tcx, NormalizationResult<'tcx>>>>, NoSolution> {
     debug!("normalize_provider(goal={:#?})", goal);
 
-    tcx.sess.perf_stats.normalize_projection_ty.fetch_add(1, Ordering::Relaxed);
-    tcx.infer_ctxt().enter(|ref infcx| {
-        let (
-            ParamEnvAnd {
+    tcx.sess
+        .perf_stats
+        .normalize_projection_ty
+        .fetch_add(1, Ordering::Relaxed);
+    tcx.infer_ctxt().enter_canonical_trait_query(
+        &goal,
+        |infcx,
+         fulfill_cx,
+         ParamEnvAnd {
+             param_env,
+             value: goal,
+         }| {
+            let selcx = &mut SelectionContext::new(infcx);
+            let cause = ObligationCause::misc(DUMMY_SP, DUMMY_NODE_ID);
+            let mut obligations = vec![];
+            let answer = traits::normalize_projection_type(
+                selcx,
                 param_env,
-                value: goal,
-            },
-            canonical_inference_vars,
-        ) = infcx.instantiate_canonical_with_fresh_inference_vars(DUMMY_SP, &goal);
-        let fulfill_cx = &mut FulfillmentContext::new();
-        let selcx = &mut SelectionContext::new(infcx);
-        let cause = ObligationCause::misc(DUMMY_SP, DUMMY_NODE_ID);
-        let mut obligations = vec![];
-        let answer =
-            traits::normalize_projection_type(selcx, param_env, goal, cause, 0, &mut obligations);
-        fulfill_cx.register_predicate_obligations(infcx, obligations);
-
-        // Now that we have fulfilled as much as we can, create a solution
-        // from what we've learned.
-        util::make_query_response(
-            infcx,
-            canonical_inference_vars,
-            NormalizationResult { normalized_ty: answer },
-            fulfill_cx,
-        )
-    })
+                goal,
+                cause,
+                0,
+                &mut obligations,
+            );
+            fulfill_cx.register_predicate_obligations(infcx, obligations);
+            Ok(NormalizationResult {
+                normalized_ty: answer,
+            })
+        },
+    )
 }
diff --git a/src/librustc_traits/type_op.rs b/src/librustc_traits/type_op.rs
new file mode 100644 (file)
index 0000000..8fe4290
--- /dev/null
@@ -0,0 +1,127 @@
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use rustc::infer::canonical::{Canonical, QueryResult};
+use rustc::infer::InferCtxt;
+use rustc::traits::query::type_op::eq::Eq;
+use rustc::traits::query::type_op::normalize::Normalize;
+use rustc::traits::query::type_op::prove_predicate::ProvePredicate;
+use rustc::traits::query::type_op::subtype::Subtype;
+use rustc::traits::query::{Fallible, NoSolution};
+use rustc::traits::{FulfillmentContext, Normalized, Obligation, ObligationCause, TraitEngine,
+                    TraitEngineExt};
+use rustc::ty::query::Providers;
+use rustc::ty::{FnSig, Lift, ParamEnvAnd, PolyFnSig, Predicate, Ty, TyCtxt, TypeFoldable};
+use rustc_data_structures::sync::Lrc;
+use std::fmt;
+
+crate fn provide(p: &mut Providers) {
+    *p = Providers {
+        type_op_eq,
+        type_op_prove_predicate,
+        type_op_subtype,
+        type_op_normalize_ty,
+        type_op_normalize_predicate,
+        type_op_normalize_fn_sig,
+        type_op_normalize_poly_fn_sig,
+        ..*p
+    };
+}
+
+fn type_op_eq<'tcx>(
+    tcx: TyCtxt<'_, 'tcx, 'tcx>,
+    canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, Eq<'tcx>>>,
+) -> Result<Lrc<Canonical<'tcx, QueryResult<'tcx, ()>>>, NoSolution> {
+    tcx.infer_ctxt()
+        .enter_canonical_trait_query(&canonicalized, |infcx, fulfill_cx, key| {
+            let (param_env, Eq { a, b }) = key.into_parts();
+            Ok(infcx
+                .at(&ObligationCause::dummy(), param_env)
+                .eq(a, b)?
+                .into_value_registering_obligations(infcx, fulfill_cx))
+        })
+}
+
+fn type_op_normalize<T>(
+    infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+    fulfill_cx: &mut FulfillmentContext<'tcx>,
+    key: ParamEnvAnd<'tcx, Normalize<T>>,
+) -> Fallible<T>
+where
+    T: fmt::Debug + TypeFoldable<'tcx> + Lift<'gcx>,
+{
+    let (param_env, Normalize { value }) = key.into_parts();
+    let Normalized { value, obligations } = infcx
+        .at(&ObligationCause::dummy(), param_env)
+        .normalize(&value)?;
+    fulfill_cx.register_predicate_obligations(infcx, obligations);
+    Ok(value)
+}
+
+fn type_op_normalize_ty(
+    tcx: TyCtxt<'_, 'tcx, 'tcx>,
+    canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, Normalize<Ty<'tcx>>>>,
+) -> Result<Lrc<Canonical<'tcx, QueryResult<'tcx, Ty<'tcx>>>>, NoSolution> {
+    tcx.infer_ctxt()
+        .enter_canonical_trait_query(&canonicalized, type_op_normalize)
+}
+
+fn type_op_normalize_predicate(
+    tcx: TyCtxt<'_, 'tcx, 'tcx>,
+    canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, Normalize<Predicate<'tcx>>>>,
+) -> Result<Lrc<Canonical<'tcx, QueryResult<'tcx, Predicate<'tcx>>>>, NoSolution> {
+    tcx.infer_ctxt()
+        .enter_canonical_trait_query(&canonicalized, type_op_normalize)
+}
+
+fn type_op_normalize_fn_sig(
+    tcx: TyCtxt<'_, 'tcx, 'tcx>,
+    canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, Normalize<FnSig<'tcx>>>>,
+) -> Result<Lrc<Canonical<'tcx, QueryResult<'tcx, FnSig<'tcx>>>>, NoSolution> {
+    tcx.infer_ctxt()
+        .enter_canonical_trait_query(&canonicalized, type_op_normalize)
+}
+
+fn type_op_normalize_poly_fn_sig(
+    tcx: TyCtxt<'_, 'tcx, 'tcx>,
+    canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, Normalize<PolyFnSig<'tcx>>>>,
+) -> Result<Lrc<Canonical<'tcx, QueryResult<'tcx, PolyFnSig<'tcx>>>>, NoSolution> {
+    tcx.infer_ctxt()
+        .enter_canonical_trait_query(&canonicalized, type_op_normalize)
+}
+
+fn type_op_subtype<'tcx>(
+    tcx: TyCtxt<'_, 'tcx, 'tcx>,
+    canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, Subtype<'tcx>>>,
+) -> Result<Lrc<Canonical<'tcx, QueryResult<'tcx, ()>>>, NoSolution> {
+    tcx.infer_ctxt()
+        .enter_canonical_trait_query(&canonicalized, |infcx, fulfill_cx, key| {
+            let (param_env, Subtype { sub, sup }) = key.into_parts();
+            Ok(infcx
+                .at(&ObligationCause::dummy(), param_env)
+                .sup(sup, sub)?
+                .into_value_registering_obligations(infcx, fulfill_cx))
+        })
+}
+
+fn type_op_prove_predicate<'tcx>(
+    tcx: TyCtxt<'_, 'tcx, 'tcx>,
+    canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, ProvePredicate<'tcx>>>,
+) -> Result<Lrc<Canonical<'tcx, QueryResult<'tcx, ()>>>, NoSolution> {
+    tcx.infer_ctxt()
+        .enter_canonical_trait_query(&canonicalized, |infcx, fulfill_cx, key| {
+            let (param_env, ProvePredicate { predicate }) = key.into_parts();
+            fulfill_cx.register_predicate_obligation(
+                infcx,
+                Obligation::new(ObligationCause::dummy(), param_env, predicate),
+            );
+            Ok(())
+        })
+}
diff --git a/src/librustc_traits/util.rs b/src/librustc_traits/util.rs
deleted file mode 100644 (file)
index cdf20bd..0000000
+++ /dev/null
@@ -1,123 +0,0 @@
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use rustc::infer::InferCtxt;
-use rustc::infer::canonical::{CanonicalVarValues, Canonicalize, Certainty, QueryResult};
-use rustc::infer::region_constraints::{Constraint, RegionConstraintData};
-use rustc::traits::{FulfillmentContext, TraitEngine};
-use rustc::traits::query::NoSolution;
-use rustc::ty;
-use std::fmt::Debug;
-
-/// The canonicalization form of `QueryResult<'tcx, T>`.
-type CanonicalizedQueryResult<'gcx, 'tcx, T> =
-    <QueryResult<'tcx, T> as Canonicalize<'gcx, 'tcx>>::Canonicalized;
-
-crate fn make_query_response<'gcx, 'tcx, T>(
-    infcx: &InferCtxt<'_, 'gcx, 'tcx>,
-    inference_vars: CanonicalVarValues<'tcx>,
-    answer: T,
-    fulfill_cx: &mut FulfillmentContext<'tcx>,
-) -> Result<CanonicalizedQueryResult<'gcx, 'tcx, T>, NoSolution>
-where
-    T: Debug,
-    QueryResult<'tcx, T>: Canonicalize<'gcx, 'tcx>,
-{
-    let tcx = infcx.tcx;
-
-    debug!(
-        "make_query_response(\
-         inference_vars={:?}, \
-         answer={:?})",
-        inference_vars, answer,
-    );
-
-    // Select everything, returning errors.
-    let true_errors = match fulfill_cx.select_where_possible(infcx) {
-        Ok(()) => vec![],
-        Err(errors) => errors,
-    };
-    debug!("true_errors = {:#?}", true_errors);
-
-    if !true_errors.is_empty() {
-        // FIXME -- we don't indicate *why* we failed to solve
-        debug!("make_query_response: true_errors={:#?}", true_errors);
-        return Err(NoSolution);
-    }
-
-    // Anything left unselected *now* must be an ambiguity.
-    let ambig_errors = match fulfill_cx.select_all_or_error(infcx) {
-        Ok(()) => vec![],
-        Err(errors) => errors,
-    };
-    debug!("ambig_errors = {:#?}", ambig_errors);
-
-    let region_obligations = infcx.take_registered_region_obligations();
-
-    let region_constraints = infcx.with_region_constraints(|region_constraints| {
-        let RegionConstraintData {
-            constraints,
-            verifys,
-            givens,
-        } = region_constraints;
-
-        assert!(verifys.is_empty());
-        assert!(givens.is_empty());
-
-        let mut outlives: Vec<_> = constraints
-            .into_iter()
-            .map(|(k, _)| match *k {
-                // Swap regions because we are going from sub (<=) to outlives
-                // (>=).
-                Constraint::VarSubVar(v1, v2) => ty::OutlivesPredicate(
-                    tcx.mk_region(ty::ReVar(v2)).into(),
-                    tcx.mk_region(ty::ReVar(v1)),
-                ),
-                Constraint::VarSubReg(v1, r2) => {
-                    ty::OutlivesPredicate(r2.into(), tcx.mk_region(ty::ReVar(v1)))
-                }
-                Constraint::RegSubVar(r1, v2) => {
-                    ty::OutlivesPredicate(tcx.mk_region(ty::ReVar(v2)).into(), r1)
-                }
-                Constraint::RegSubReg(r1, r2) => ty::OutlivesPredicate(r2.into(), r1),
-            })
-            .map(ty::Binder::dummy) // no bound regions in the code above
-            .collect();
-
-        outlives.extend(
-            region_obligations
-                .into_iter()
-                .map(|(_, r_o)| ty::OutlivesPredicate(r_o.sup_type.into(), r_o.sub_region))
-                .map(ty::Binder::dummy) // no bound regions in the code above
-        );
-
-        outlives
-    });
-
-    let certainty = if ambig_errors.is_empty() {
-        Certainty::Proven
-    } else {
-        Certainty::Ambiguous
-    };
-
-    let (canonical_result, _) = infcx.canonicalize_response(&QueryResult {
-        var_values: inference_vars,
-        region_constraints,
-        certainty,
-        value: answer,
-    });
-
-    debug!(
-        "make_query_response: canonical_result = {:#?}",
-        canonical_result
-    );
-
-    Ok(canonical_result)
-}
index f3912c3042d7fceef539782e094ed0dcf2aa18d4..762dc5d26f5a4f3a02972740a0b3b8580f763c21 100644 (file)
@@ -83,7 +83,7 @@ fn projected_ty_from_poly_trait_ref(&self,
 }
 
 struct ConvertedBinding<'tcx> {
-    item_name: ast::Name,
+    item_name: ast::Ident,
     ty: Ty<'tcx>,
     span: Span,
 }
@@ -342,7 +342,7 @@ fn create_substs_for_ast_path(&self,
 
         let assoc_bindings = generic_args.bindings.iter().map(|binding| {
             ConvertedBinding {
-                item_name: binding.name,
+                item_name: binding.ident,
                 ty: self.ast_ty_to_ty(&binding.ty),
                 span: binding.span,
             }
@@ -485,12 +485,12 @@ fn create_substs_for_ast_trait_ref(&self,
 
     fn trait_defines_associated_type_named(&self,
                                            trait_def_id: DefId,
-                                           assoc_name: ast::Name)
+                                           assoc_name: ast::Ident)
                                            -> bool
     {
         self.tcx().associated_items(trait_def_id).any(|item| {
             item.kind == ty::AssociatedKind::Type &&
-            self.tcx().hygienic_eq(assoc_name, item.name, trait_def_id)
+            self.tcx().hygienic_eq(assoc_name, item.ident, trait_def_id)
         })
     }
 
@@ -569,9 +569,9 @@ fn ast_type_binding_to_poly_projection_predicate(
         }?;
 
         let (assoc_ident, def_scope) =
-            tcx.adjust_ident(binding.item_name.to_ident(), candidate.def_id(), ref_id);
+            tcx.adjust_ident(binding.item_name, candidate.def_id(), ref_id);
         let assoc_ty = tcx.associated_items(candidate.def_id()).find(|i| {
-            i.kind == ty::AssociatedKind::Type && i.name.to_ident() == assoc_ident
+            i.kind == ty::AssociatedKind::Type && i.ident.modern() == assoc_ident
         }).expect("missing associated type");
 
         if !assoc_ty.vis.is_accessible_from(def_scope, tcx) {
@@ -711,10 +711,10 @@ fn conv_object_ty_poly_trait_ref(&self,
             let trait_def_id = assoc_item.container.id();
             struct_span_err!(tcx.sess, span, E0191,
                 "the value of the associated type `{}` (from the trait `{}`) must be specified",
-                        assoc_item.name,
+                        assoc_item.ident,
                         tcx.item_path_str(trait_def_id))
                         .span_label(span, format!(
-                            "missing associated type `{}` value", assoc_item.name))
+                            "missing associated type `{}` value", assoc_item.ident))
                         .emit();
         }
 
@@ -778,7 +778,7 @@ fn report_ambiguous_associated_type(&self,
     // any ambiguity.
     fn find_bound_for_assoc_item(&self,
                                  ty_param_def_id: DefId,
-                                 assoc_name: ast::Name,
+                                 assoc_name: ast::Ident,
                                  span: Span)
                                  -> Result<ty::PolyTraitRef<'tcx>, ErrorReported>
     {
@@ -807,7 +807,7 @@ fn find_bound_for_assoc_item(&self,
     fn one_bound_for_assoc_type<I>(&self,
                                 mut bounds: I,
                                 ty_param_name: &str,
-                                assoc_name: ast::Name,
+                                assoc_name: ast::Ident,
                                 span: Span)
         -> Result<ty::PolyTraitRef<'tcx>, ErrorReported>
         where I: Iterator<Item=ty::PolyTraitRef<'tcx>>
@@ -837,7 +837,7 @@ fn one_bound_for_assoc_type<I>(&self,
             for bound in bounds {
                 let bound_span = self.tcx().associated_items(bound.def_id()).find(|item| {
                     item.kind == ty::AssociatedKind::Type &&
-                    self.tcx().hygienic_eq(assoc_name, item.name, bound.def_id())
+                    self.tcx().hygienic_eq(assoc_name, item.ident, bound.def_id())
                 })
                 .and_then(|item| self.tcx().hir.span_if_local(item.def_id));
 
@@ -873,7 +873,7 @@ pub fn associated_path_def_to_ty(&self,
                                      -> (Ty<'tcx>, Def)
     {
         let tcx = self.tcx();
-        let assoc_name = item_segment.name;
+        let assoc_name = item_segment.ident;
 
         debug!("associated_path_def_to_ty: {:?}::{}", ty, assoc_name);
 
@@ -895,8 +895,7 @@ pub fn associated_path_def_to_ty(&self,
 
                 let candidates =
                     traits::supertraits(tcx, ty::Binder::bind(trait_ref))
-                    .filter(|r| self.trait_defines_associated_type_named(r.def_id(),
-                                                                         assoc_name));
+                    .filter(|r| self.trait_defines_associated_type_named(r.def_id(), assoc_name));
 
                 match self.one_bound_for_assoc_type(candidates, "Self", assoc_name, span) {
                     Ok(bound) => bound,
@@ -923,10 +922,10 @@ pub fn associated_path_def_to_ty(&self,
         };
 
         let trait_did = bound.def_id();
-        let (assoc_ident, def_scope) = tcx.adjust_ident(assoc_name.to_ident(), trait_did, ref_id);
+        let (assoc_ident, def_scope) = tcx.adjust_ident(assoc_name, trait_did, ref_id);
         let item = tcx.associated_items(trait_did).find(|i| {
             Namespace::from(i.kind) == Namespace::Type &&
-            i.name.to_ident() == assoc_ident
+            i.ident.modern() == assoc_ident
         })
         .expect("missing associated type");
 
@@ -963,7 +962,7 @@ fn qpath_to_ty(&self,
             self.report_ambiguous_associated_type(span,
                                                   "Type",
                                                   &path_str,
-                                                  &item_segment.name.as_str());
+                                                  &item_segment.ident.as_str());
             return tcx.types.err;
         };
 
index 4274e5c1e1f7562d75d70d54059f5f758b3c68ac..09562614982b81eda5c9878a210310c2a1dd8b51 100644 (file)
@@ -21,7 +21,7 @@
 use rustc::ty::adjustment::{Adjustment, Adjust, OverloadedDeref};
 
 use syntax_pos::Span;
-use syntax::symbol::Symbol;
+use syntax::ast::Ident;
 
 use std::iter;
 
@@ -134,7 +134,7 @@ fn overloaded_deref_ty(&mut self, ty: Ty<'tcx>) -> Option<Ty<'tcx>> {
                                                               ty::ProjectionTy::from_ref_and_name(
                                                                   tcx,
                                                                   trait_ref,
-                                                                  Symbol::intern("Target"),
+                                                                  Ident::from_str("Target"),
                                                               ),
                                                               cause,
                                                               0,
index b0f9a4c8b85adee737418d841f3b118a29c6d69c..383820a62bffe4e849b62c6c21efe64c517fb1f9 100644 (file)
@@ -18,7 +18,7 @@
 use rustc::ty::{self, TyCtxt, TypeFoldable, Ty};
 use rustc::ty::adjustment::{Adjustment, Adjust, AllowTwoPhase, AutoBorrow, AutoBorrowMutability};
 use rustc_target::spec::abi;
-use syntax::symbol::Symbol;
+use syntax::ast::Ident;
 use syntax_pos::Span;
 
 use rustc::hir;
@@ -157,9 +157,9 @@ fn try_overloaded_call_traits(&self,
                                              MethodCallee<'tcx>)> {
         // Try the options that are least restrictive on the caller first.
         for &(opt_trait_def_id, method_name, borrow) in
-            &[(self.tcx.lang_items().fn_trait(), Symbol::intern("call"), true),
-              (self.tcx.lang_items().fn_mut_trait(), Symbol::intern("call_mut"), true),
-              (self.tcx.lang_items().fn_once_trait(), Symbol::intern("call_once"), false)] {
+            &[(self.tcx.lang_items().fn_trait(), Ident::from_str("call"), true),
+              (self.tcx.lang_items().fn_mut_trait(), Ident::from_str("call_mut"), true),
+              (self.tcx.lang_items().fn_once_trait(), Ident::from_str("call_once"), false)] {
             let trait_def_id = match opt_trait_def_id {
                 Some(def_id) => def_id,
                 None => continue,
index 76219c6971b22773ba7024b23a61f1ad01feefd4..e276dcff0601dca195a53556bd141d68365b28bc 100644 (file)
@@ -1203,9 +1203,14 @@ fn coerce_inner<'a>(&mut self,
                                       "supposed to be part of a block tail expression, but the \
                                        expression is empty");
                         });
-                        fcx.suggest_mismatched_types_on_tail(&mut db, expr,
-                                                             expected, found,
-                                                             cause.span, blk_id);
+                        fcx.suggest_mismatched_types_on_tail(
+                            &mut db,
+                            expr,
+                            expected,
+                            found,
+                            cause.span,
+                            blk_id,
+                        );
                     }
                     _ => {
                         db = fcx.report_mismatched_types(cause, expected, found, err);
index 5f8955612e10cb00614bf34c15aae479d6eab212..04c11d30d26791b10f81f88f64746953156a49ff 100644 (file)
@@ -100,7 +100,7 @@ fn compare_predicate_entailment<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
         span: impl_m_span,
         body_id: impl_m_node_id,
         code: ObligationCauseCode::CompareImplMethodObligation {
-            item_name: impl_m.name,
+            item_name: impl_m.ident.name,
             impl_item_def_id: impl_m.def_id,
             trait_item_def_id: trait_m.def_id,
         },
@@ -318,7 +318,7 @@ fn compare_predicate_entailment<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                                             cause.span(&tcx),
                                             E0053,
                                             "method `{}` has an incompatible type for trait",
-                                            trait_m.name);
+                                            trait_m.ident);
 
             infcx.note_type_err(&mut diag,
                                 &cause,
@@ -383,7 +383,7 @@ fn check_region_bounds_on_impl_method<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                                        E0195,
                                        "lifetime parameters or bounds on method `{}` do not match \
                                         the trait declaration",
-                                       impl_m.name);
+                                       impl_m.ident);
         err.span_label(span, "lifetimes do not match method in trait");
         if let Some(sp) = tcx.hir.span_if_local(trait_m.def_id) {
             err.span_label(tcx.sess.codemap().def_span(sp),
@@ -529,13 +529,13 @@ fn compare_self_type<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                                            E0185,
                                            "method `{}` has a `{}` declaration in the impl, but \
                                             not in the trait",
-                                           trait_m.name,
+                                           trait_m.ident,
                                            self_descr);
             err.span_label(impl_m_span, format!("`{}` used in impl", self_descr));
             if let Some(span) = tcx.hir.span_if_local(trait_m.def_id) {
                 err.span_label(span, format!("trait method declared without `{}`", self_descr));
             } else {
-                err.note_trait_signature(trait_m.name.to_string(),
+                err.note_trait_signature(trait_m.ident.to_string(),
                                          trait_m.signature(&tcx));
             }
             err.emit();
@@ -549,13 +549,13 @@ fn compare_self_type<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                                            E0186,
                                            "method `{}` has a `{}` declaration in the trait, but \
                                             not in the impl",
-                                           trait_m.name,
+                                           trait_m.ident,
                                            self_descr);
             err.span_label(impl_m_span, format!("expected `{}` in impl", self_descr));
             if let Some(span) = tcx.hir.span_if_local(trait_m.def_id) {
                 err.span_label(span, format!("`{}` used in trait", self_descr));
             } else {
-                err.note_trait_signature(trait_m.name.to_string(),
+                err.note_trait_signature(trait_m.ident.to_string(),
                                          trait_m.signature(&tcx));
             }
             err.emit();
@@ -590,7 +590,7 @@ fn compare_number_of_generics<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                                        E0049,
                                        "method `{}` has {} type parameter{} but its trait \
                                         declaration has {} type parameter{}",
-                                       trait_m.name,
+                                       trait_m.ident,
                                        num_impl_m_type_params,
                                        if num_impl_m_type_params == 1 { "" } else { "s" },
                                        num_trait_m_type_params,
@@ -681,7 +681,7 @@ fn compare_number_of_method_arguments<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                                        E0050,
                                        "method `{}` has {} parameter{} but the declaration in \
                                         trait `{}` has {}",
-                                       trait_m.name,
+                                       trait_m.ident,
                                        impl_number_args,
                                        if impl_number_args == 1 { "" } else { "s" },
                                        tcx.item_path_str(trait_m.def_id),
@@ -695,7 +695,7 @@ trait `{}` has {}",
                                         format!("{} parameter", trait_number_args)
                                     }));
         } else {
-            err.note_trait_signature(trait_m.name.to_string(),
+            err.note_trait_signature(trait_m.ident.to_string(),
                                      trait_m.signature(&tcx));
         }
         err.span_label(impl_span,
@@ -748,7 +748,7 @@ fn compare_synthetic_generics<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                                            impl_span,
                                            E0643,
                                            "method `{}` has incompatible signature for trait",
-                                           trait_m.name);
+                                           trait_m.ident);
             err.span_label(trait_span, "declaration in trait here");
             match (impl_synthetic, trait_synthetic) {
                 // The case where the impl method uses `impl Trait` but the trait method uses
@@ -948,7 +948,7 @@ pub fn compare_const_impl<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                                             E0326,
                                             "implemented const `{}` has an incompatible type for \
                                              trait",
-                                            trait_c.name);
+                                            trait_c.ident);
 
             let trait_c_node_id = tcx.hir.as_local_node_id(trait_c.def_id);
             let trait_c_span = trait_c_node_id.map(|trait_c_node_id| {
index 5b922af821cc228c718f7a77342c36ca42ccd41e..08d8dd2e498b68ff67c437929978b883bd124abe 100644 (file)
@@ -8,8 +8,6 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-use std::iter;
-
 use check::FnCtxt;
 use rustc::infer::InferOk;
 use rustc::traits::ObligationCause;
@@ -140,25 +138,12 @@ pub fn demand_coerce_diag(&self,
             }
         }
 
-        if let Some((sp, msg, suggestion)) = self.check_ref(expr, checked_ty, expected) {
-            err.span_suggestion(sp, msg, suggestion);
-        } else if !self.check_for_cast(&mut err, expr, expr_ty, expected) {
-            let methods = self.get_conversion_methods(expr.span, expected, checked_ty);
-            if let Ok(expr_text) = self.tcx.sess.codemap().span_to_snippet(expr.span) {
-                let suggestions = iter::repeat(expr_text).zip(methods.iter())
-                    .map(|(receiver, method)| format!("{}.{}()", receiver, method.name))
-                    .collect::<Vec<_>>();
-                if !suggestions.is_empty() {
-                    err.span_suggestions(expr.span,
-                                         "try using a conversion method",
-                                         suggestions);
-                }
-            }
-        }
+        self.suggest_ref_or_into(&mut err, expr, expected, expr_ty);
+
         (expected, Some(err))
     }
 
-    fn get_conversion_methods(&self, span: Span, expected: Ty<'tcx>, checked_ty: Ty<'tcx>)
+    pub fn get_conversion_methods(&self, span: Span, expected: Ty<'tcx>, checked_ty: Ty<'tcx>)
                               -> Vec<AssociatedItem> {
         let mut methods = self.probe_for_return_type(span,
                                                      probe::Mode::MethodCall,
@@ -226,7 +211,7 @@ fn can_use_as_ref(&self, expr: &hir::Expr) -> Option<(Span, &'static str, String
                     })), 1) = (self.tcx.hir.find(parent), decl.inputs.len()) {
                         let self_ty = self.tables.borrow().node_id_to_type(expr[0].hir_id);
                         let self_ty = format!("{:?}", self_ty);
-                        let name = path.name.as_str();
+                        let name = path.ident.as_str();
                         let is_as_ref_able = (
                             self_ty.starts_with("&std::option::Option") ||
                             self_ty.starts_with("&std::result::Result") ||
@@ -261,19 +246,24 @@ fn can_use_as_ref(&self, expr: &hir::Expr) -> Option<(Span, &'static str, String
     /// In addition of this check, it also checks between references mutability state. If the
     /// expected is mutable but the provided isn't, maybe we could just say "Hey, try with
     /// `&mut`!".
-    fn check_ref(&self,
+    pub fn check_ref(&self,
                  expr: &hir::Expr,
                  checked_ty: Ty<'tcx>,
                  expected: Ty<'tcx>)
                  -> Option<(Span, &'static str, String)> {
-        let sp = expr.span;
+        let cm = self.sess().codemap();
+        // Use the callsite's span if this is a macro call. #41858
+        let sp = cm.call_span_if_macro(expr.span);
+        if !cm.span_to_filename(sp).is_real() {
+            return None;
+        }
+
         match (&expected.sty, &checked_ty.sty) {
             (&ty::TyRef(_, exp, _), &ty::TyRef(_, check, _)) => match (&exp.sty, &check.sty) {
                 (&ty::TyStr, &ty::TyArray(arr, _)) |
                 (&ty::TyStr, &ty::TySlice(arr)) if arr == self.tcx.types.u8 => {
                     if let hir::ExprLit(_) = expr.node {
-                        let sp = self.sess().codemap().call_span_if_macro(expr.span);
-                        if let Ok(src) = self.tcx.sess.codemap().span_to_snippet(sp) {
+                        if let Ok(src) = cm.span_to_snippet(sp) {
                             return Some((sp,
                                          "consider removing the leading `b`",
                                          src[1..].to_string()));
@@ -283,8 +273,7 @@ fn check_ref(&self,
                 (&ty::TyArray(arr, _), &ty::TyStr) |
                 (&ty::TySlice(arr), &ty::TyStr) if arr == self.tcx.types.u8 => {
                     if let hir::ExprLit(_) = expr.node {
-                        let sp = self.sess().codemap().call_span_if_macro(expr.span);
-                        if let Ok(src) = self.tcx.sess.codemap().span_to_snippet(sp) {
+                        if let Ok(src) = cm.span_to_snippet(sp) {
                             return Some((sp,
                                          "consider adding a leading `b`",
                                          format!("b{}", src)));
@@ -311,9 +300,7 @@ fn check_ref(&self,
                                                        checked_ty),
                 };
                 if self.can_coerce(ref_ty, expected) {
-                    // Use the callsite's span if this is a macro call. #41858
-                    let sp = self.sess().codemap().call_span_if_macro(expr.span);
-                    if let Ok(src) = self.tcx.sess.codemap().span_to_snippet(sp) {
+                    if let Ok(src) = cm.span_to_snippet(sp) {
                         let sugg_expr = match expr.node { // parenthesize if needed (Issue #46756)
                             hir::ExprCast(_, _) | hir::ExprBinary(_, _, _) => format!("({})", src),
                             _ => src,
@@ -342,11 +329,14 @@ fn check_ref(&self,
                 // a macro; if so, it's hard to extract the text and make a good
                 // suggestion, so don't bother.)
                 if self.infcx.can_sub(self.param_env, checked, &expected).is_ok() &&
-                   expr.span.ctxt().outer().expn_info().is_none() {
+                   sp.ctxt().outer().expn_info().is_none() {
                     match expr.node {
                         // Maybe remove `&`?
                         hir::ExprAddrOf(_, ref expr) => {
-                            if let Ok(code) = self.tcx.sess.codemap().span_to_snippet(expr.span) {
+                            if !cm.span_to_filename(expr.span).is_real() {
+                                return None;
+                            }
+                            if let Ok(code) = cm.span_to_snippet(expr.span) {
                                 return Some((sp, "consider removing the borrow", code));
                             }
                         }
@@ -355,9 +345,9 @@ fn check_ref(&self,
                         _ => {
                             if !self.infcx.type_moves_by_default(self.param_env,
                                                                 checked,
-                                                                expr.span) {
-                                let sp = self.sess().codemap().call_span_if_macro(expr.span);
-                                if let Ok(code) = self.tcx.sess.codemap().span_to_snippet(sp) {
+                                                                sp) {
+                                let sp = cm.call_span_if_macro(sp);
+                                if let Ok(code) = cm.span_to_snippet(sp) {
                                     return Some((sp,
                                                  "consider dereferencing the borrow",
                                                  format!("*{}", code)));
@@ -372,7 +362,7 @@ fn check_ref(&self,
         None
     }
 
-    fn check_for_cast(&self,
+    pub fn check_for_cast(&self,
                       err: &mut DiagnosticBuilder<'tcx>,
                       expr: &hir::Expr,
                       checked_ty: Ty<'tcx>,
index b8aa39a202bee18c054f22fd26629e745550f24f..f9166851f6fcc75bfac80b0f48c39bd64b916310 100644 (file)
@@ -16,7 +16,7 @@
 use rustc::middle::region;
 use rustc::ty::subst::{Subst, Substs, UnpackedKind};
 use rustc::ty::{self, Ty, TyCtxt};
-use rustc::traits::{ObligationCause, TraitEngine};
+use rustc::traits::{ObligationCause, TraitEngine, TraitEngineExt};
 use util::common::ErrorReported;
 
 use syntax::ast;
index a51876d7960b0bd46ba575d7ab0980007779452b..04c32fa88271ad24c7a4c45bafeac4c63c222951 100644 (file)
@@ -120,7 +120,7 @@ pub fn method_exists(&self,
                          allow_private: bool)
                          -> bool {
         let mode = probe::Mode::MethodCall;
-        match self.probe_for_name(method_name.span, mode, method_name.name,
+        match self.probe_for_name(method_name.span, mode, method_name,
                                   IsSuggestion(false), self_ty, call_expr_id,
                                   ProbeScope::TraitsInScope) {
             Ok(..) => true,
@@ -157,14 +157,14 @@ pub fn lookup_method(&self,
                          self_expr: &'gcx hir::Expr)
                          -> Result<MethodCallee<'tcx>, MethodError<'tcx>> {
         debug!("lookup(method_name={}, self_ty={:?}, call_expr={:?}, self_expr={:?})",
-               segment.name,
+               segment.ident,
                self_ty,
                call_expr,
                self_expr);
 
         let pick = self.lookup_probe(
             span,
-            segment.name,
+            segment.ident,
             self_ty,
             call_expr,
             ProbeScope::TraitsInScope
@@ -192,7 +192,7 @@ pub fn lookup_method(&self,
             // We probe again, taking all traits into account (not only those in scope).
             let candidates =
                 match self.lookup_probe(span,
-                                        segment.name,
+                                        segment.ident,
                                         self_ty,
                                         call_expr,
                                         ProbeScope::AllTraits) {
@@ -222,7 +222,7 @@ pub fn lookup_method(&self,
 
     fn lookup_probe(&self,
                     span: Span,
-                    method_name: ast::Name,
+                    method_name: ast::Ident,
                     self_ty: Ty<'tcx>,
                     call_expr: &'gcx hir::Expr,
                     scope: ProbeScope)
@@ -244,7 +244,7 @@ fn lookup_probe(&self,
     /// of this method is basically the same as confirmation.
     pub fn lookup_method_in_trait(&self,
                                   span: Span,
-                                  m_name: ast::Name,
+                                  m_name: ast::Ident,
                                   trait_def_id: DefId,
                                   self_ty: Ty<'tcx>,
                                   opt_input_types: Option<&[Ty<'tcx>]>)
@@ -289,7 +289,8 @@ pub fn lookup_method_in_trait(&self,
         // Trait must have a method named `m_name` and it should not have
         // type parameters or early-bound regions.
         let tcx = self.tcx;
-        let method_item = self.associated_item(trait_def_id, m_name, Namespace::Value).unwrap();
+        let method_item =
+            self.associated_item(trait_def_id, m_name, Namespace::Value).unwrap();
         let def_id = method_item.def_id;
         let generics = tcx.generics_of(def_id);
         assert_eq!(generics.params.len(), 0);
@@ -362,7 +363,7 @@ pub fn lookup_method_in_trait(&self,
 
     pub fn resolve_ufcs(&self,
                         span: Span,
-                        method_name: ast::Name,
+                        method_name: ast::Ident,
                         self_ty: Ty<'tcx>,
                         expr_id: ast::NodeId)
                         -> Result<Def, MethodError<'tcx>> {
@@ -385,10 +386,11 @@ pub fn resolve_ufcs(&self,
 
     /// Find item with name `item_name` defined in impl/trait `def_id`
     /// and return it, or `None`, if no such item was defined there.
-    pub fn associated_item(&self, def_id: DefId, item_name: ast::Name, ns: Namespace)
+    pub fn associated_item(&self, def_id: DefId, item_name: ast::Ident, ns: Namespace)
                            -> Option<ty::AssociatedItem> {
-        self.tcx.associated_items(def_id)
-                .find(|item| Namespace::from(item.kind) == ns &&
-                             self.tcx.hygienic_eq(item_name, item.name, def_id))
+        self.tcx.associated_items(def_id).find(|item| {
+            Namespace::from(item.kind) == ns &&
+            self.tcx.hygienic_eq(item_name, item.ident, def_id)
+        })
     }
 }
index 834c7d4e9e391a01ab734eff34eb5ffd3f444545..0aff322a29a6debf58dbfeea5b82b86f0d1e80fa 100644 (file)
@@ -47,7 +47,7 @@ struct ProbeContext<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> {
     fcx: &'a FnCtxt<'a, 'gcx, 'tcx>,
     span: Span,
     mode: Mode,
-    method_name: Option<ast::Name>,
+    method_name: Option<ast::Ident>,
     return_type: Option<Ty<'tcx>>,
     steps: Rc<Vec<CandidateStep<'tcx>>>,
     inherent_candidates: Vec<Candidate<'tcx>>,
@@ -213,7 +213,7 @@ pub fn probe_for_return_type(&self,
     pub fn probe_for_name(&self,
                           span: Span,
                           mode: Mode,
-                          item_name: ast::Name,
+                          item_name: ast::Ident,
                           is_suggestion: IsSuggestion,
                           self_ty: Ty<'tcx>,
                           scope_expr_id: ast::NodeId,
@@ -237,7 +237,7 @@ pub fn probe_for_name(&self,
     fn probe_op<OP,R>(&'a self,
                       span: Span,
                       mode: Mode,
-                      method_name: Option<ast::Name>,
+                      method_name: Option<ast::Ident>,
                       return_type: Option<Ty<'tcx>>,
                       is_suggestion: IsSuggestion,
                       self_ty: Ty<'tcx>,
@@ -382,7 +382,7 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> {
     fn new(fcx: &'a FnCtxt<'a, 'gcx, 'tcx>,
            span: Span,
            mode: Mode,
-           method_name: Option<ast::Name>,
+           method_name: Option<ast::Ident>,
            return_type: Option<Ty<'tcx>>,
            steps: Rc<Vec<CandidateStep<'tcx>>>,
            is_suggestion: IsSuggestion)
@@ -422,8 +422,7 @@ fn push_candidate(&mut self,
     {
         let is_accessible = if let Some(name) = self.method_name {
             let item = candidate.item;
-            let def_scope =
-                self.tcx.adjust_ident(name.to_ident(), item.container.id(), self.body_id).1;
+            let def_scope = self.tcx.adjust_ident(name, item.container.id(), self.body_id).1;
             item.vis.is_accessible_from(def_scope, self.tcx)
         } else {
             true
@@ -799,7 +798,7 @@ fn assemble_extension_candidates_for_trait(&mut self,
         Ok(())
     }
 
-    fn candidate_method_names(&self) -> Vec<ast::Name> {
+    fn candidate_method_names(&self) -> Vec<ast::Ident> {
         let mut set = FxHashSet();
         let mut names: Vec<_> = self.inherent_candidates
             .iter()
@@ -811,7 +810,7 @@ fn candidate_method_names(&self) -> Vec<ast::Name> {
                     true
                 }
             })
-            .map(|candidate| candidate.item.name)
+            .map(|candidate| candidate.item.ident)
             .filter(|&name| set.insert(name))
             .collect();
 
@@ -1310,14 +1309,14 @@ fn probe_for_lev_candidate(&mut self) -> Result<Option<ty::AssociatedItem>, Meth
                 Ok(None)
             } else {
                 let best_name = {
-                    let names = applicable_close_candidates.iter().map(|cand| &cand.name);
+                    let names = applicable_close_candidates.iter().map(|cand| &cand.ident.name);
                     find_best_match_for_name(names,
                                              &self.method_name.unwrap().as_str(),
                                              None)
                 }.unwrap();
                 Ok(applicable_close_candidates
                    .into_iter()
-                   .find(|method| method.name == best_name))
+                   .find(|method| method.ident.name == best_name))
             }
         })
     }
@@ -1457,7 +1456,7 @@ fn impl_or_trait_item(&self, def_id: DefId) -> Vec<ty::AssociatedItem> {
                 let max_dist = max(name.as_str().len(), 3) / 3;
                 self.tcx.associated_items(def_id)
                     .filter(|x| {
-                        let dist = lev_distance(&*name.as_str(), &x.name.as_str());
+                        let dist = lev_distance(&*name.as_str(), &x.ident.as_str());
                         Namespace::from(x.kind) == Namespace::Value && dist > 0
                         && dist <= max_dist
                     })
index 546ce34fe12ae864a7e8300565513e796ab8f323..35d237d94de8251a12a5a3295d31f43ab507a019 100644 (file)
@@ -77,7 +77,7 @@ fn is_fn_ty(&self, ty: &Ty<'tcx>, span: Span) -> bool {
     pub fn report_method_error(&self,
                                span: Span,
                                rcvr_ty: Ty<'tcx>,
-                               item_name: ast::Name,
+                               item_name: ast::Ident,
                                rcvr_expr: Option<&hir::Expr>,
                                error: MethodError<'tcx>,
                                args: Option<&'gcx [hir::Expr]>) {
@@ -340,8 +340,7 @@ pub fn report_method_error(&self,
                         match ty.sty {
                             ty::TyAdt(def, substs) if !def.is_enum() => {
                                 let variant = &def.non_enum_variant();
-                                if let Some(index) =
-                                        self.tcx.find_field_index(item_name.to_ident(), variant) {
+                                if let Some(index) = self.tcx.find_field_index(item_name, variant) {
                                     let field = &variant.fields[index];
                                     let snippet = tcx.sess.codemap().span_to_snippet(expr.span);
                                     let expr_string = match snippet {
@@ -393,7 +392,7 @@ macro_rules! report_function {
                             report_function!(expr.span, expr_string);
                         } else if let hir::ExprPath(hir::QPath::Resolved(_, ref path)) = expr.node {
                             if let Some(segment) = path.segments.last() {
-                                report_function!(expr.span, segment.name);
+                                report_function!(expr.span, segment.ident);
                             }
                         }
                     }
@@ -445,7 +444,7 @@ macro_rules! report_function {
                 }
 
                 if let Some(lev_candidate) = lev_candidate {
-                    err.help(&format!("did you mean `{}`?", lev_candidate.name));
+                    err.help(&format!("did you mean `{}`?", lev_candidate.ident));
                 }
                 err.emit();
             }
@@ -565,7 +564,7 @@ fn suggest_traits_to_import(&self,
                                 err: &mut DiagnosticBuilder,
                                 span: Span,
                                 rcvr_ty: Ty<'tcx>,
-                                item_name: ast::Name,
+                                item_name: ast::Ident,
                                 rcvr_expr: Option<&hir::Expr>,
                                 valid_out_of_scope_traits: Vec<DefId>) {
         if self.suggest_valid_traits(err, valid_out_of_scope_traits) {
index 889073f6b4ca96a0ba5a9bfa67522c2176c73ce0..0185d00518699b0b53cc09681b665ce8dba83034 100644 (file)
 use std::collections::hash_map::Entry;
 use std::cmp;
 use std::fmt::Display;
+use std::iter;
 use std::mem::replace;
 use std::ops::{self, Deref};
 use rustc_target::spec::abi::Abi;
@@ -960,14 +961,14 @@ fn visit_local(&mut self, local: &'gcx hir::Local) {
 
     // Add pattern bindings.
     fn visit_pat(&mut self, p: &'gcx hir::Pat) {
-        if let PatKind::Binding(_, _, ref path1, _) = p.node {
+        if let PatKind::Binding(_, _, ident, _) = p.node {
             let var_ty = self.assign(p.span, p.id, None);
 
             self.fcx.require_type_is_sized(var_ty, p.span,
                                            traits::VariableType(p.id));
 
             debug!("Pattern binding {} is assigned to {} with type {:?}",
-                   path1.node,
+                   ident,
                    self.fcx.ty_to_string(
                        self.fcx.locals.borrow().get(&p.id).unwrap().clone()),
                    var_ty);
@@ -1050,7 +1051,7 @@ fn check_fn<'a, 'gcx, 'tcx>(inherited: &'a Inherited<'a, 'gcx, 'tcx>,
         // The check for a non-trivial pattern is a hack to avoid duplicate warnings
         // for simple cases like `fn foo(x: Trait)`,
         // where we would error once on the parameter as a whole, and once on the binding `x`.
-        if arg.pat.simple_name().is_none() {
+        if arg.pat.simple_ident().is_none() {
             fcx.require_type_is_sized(arg_ty, decl.output.span(), traits::MiscObligation);
         }
 
@@ -1333,15 +1334,15 @@ fn report_forbidden_specialization<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
         tcx.sess, impl_item.span, E0520,
         "`{}` specializes an item from a parent `impl`, but \
          that item is not marked `default`",
-        impl_item.name);
+        impl_item.ident);
     err.span_label(impl_item.span, format!("cannot specialize default item `{}`",
-                                            impl_item.name));
+                                            impl_item.ident));
 
     match tcx.span_of_impl(parent_impl) {
         Ok(span) => {
             err.span_label(span, "parent `impl` is here");
             err.note(&format!("to specialize, `{}` in the parent `impl` must be marked `default`",
-                              impl_item.name));
+                              impl_item.ident));
         }
         Err(cname) => {
             err.note(&format!("parent implementation is in crate `{}`", cname));
@@ -1365,7 +1366,7 @@ fn check_specialization_validity<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
         hir::ImplItemKind::Type(_) => ty::AssociatedKind::Type
     };
 
-    let parent = ancestors.defs(tcx, trait_item.name, kind, trait_def.def_id).skip(1).next()
+    let parent = ancestors.defs(tcx, trait_item.ident, kind, trait_def.def_id).skip(1).next()
         .map(|node_item| node_item.map(|parent| parent.defaultness));
 
     if let Some(parent) = parent {
@@ -1400,11 +1401,11 @@ fn check_impl_items_against_trait<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
         let ty_impl_item = tcx.associated_item(tcx.hir.local_def_id(impl_item.id));
         let ty_trait_item = tcx.associated_items(impl_trait_ref.def_id)
             .find(|ac| Namespace::from(&impl_item.node) == Namespace::from(ac.kind) &&
-                         tcx.hygienic_eq(ty_impl_item.name, ac.name, impl_trait_ref.def_id))
+                       tcx.hygienic_eq(ty_impl_item.ident, ac.ident, impl_trait_ref.def_id))
             .or_else(|| {
                 // Not compatible, but needed for the error message
                 tcx.associated_items(impl_trait_ref.def_id)
-                   .find(|ac| tcx.hygienic_eq(ty_impl_item.name, ac.name, impl_trait_ref.def_id))
+                   .find(|ac| tcx.hygienic_eq(ty_impl_item.ident, ac.ident, impl_trait_ref.def_id))
             });
 
         // Check that impl definition matches trait definition
@@ -1422,7 +1423,7 @@ fn check_impl_items_against_trait<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                          let mut err = struct_span_err!(tcx.sess, impl_item.span, E0323,
                                   "item `{}` is an associated const, \
                                   which doesn't match its trait `{}`",
-                                  ty_impl_item.name,
+                                  ty_impl_item.ident,
                                   impl_trait_ref);
                          err.span_label(impl_item.span, "does not match trait");
                          // We can only get the spans from local trait definition
@@ -1446,7 +1447,7 @@ fn check_impl_items_against_trait<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                         let mut err = struct_span_err!(tcx.sess, impl_item.span, E0324,
                                   "item `{}` is an associated method, \
                                   which doesn't match its trait `{}`",
-                                  ty_impl_item.name,
+                                  ty_impl_item.ident,
                                   impl_trait_ref);
                          err.span_label(impl_item.span, "does not match trait");
                          if let Some(trait_span) = tcx.hir.span_if_local(ty_trait_item.def_id) {
@@ -1464,7 +1465,7 @@ fn check_impl_items_against_trait<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
                         let mut err = struct_span_err!(tcx.sess, impl_item.span, E0325,
                                   "item `{}` is an associated type, \
                                   which doesn't match its trait `{}`",
-                                  ty_impl_item.name,
+                                  ty_impl_item.ident,
                                   impl_trait_ref);
                          err.span_label(impl_item.span, "does not match trait");
                          if let Some(trait_span) = tcx.hir.span_if_local(ty_trait_item.def_id) {
@@ -1485,7 +1486,7 @@ fn check_impl_items_against_trait<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
     let associated_type_overridden = overridden_associated_type.is_some();
     for trait_item in tcx.associated_items(impl_trait_ref.def_id) {
         let is_implemented = trait_def.ancestors(tcx, impl_id)
-            .defs(tcx, trait_item.name, trait_item.kind, impl_trait_ref.def_id)
+            .defs(tcx, trait_item.ident, trait_item.kind, impl_trait_ref.def_id)
             .next()
             .map(|node_item| !node_item.node.is_from_trait())
             .unwrap_or(false);
@@ -1494,7 +1495,7 @@ fn check_impl_items_against_trait<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
             if !trait_item.defaultness.has_value() {
                 missing_items.push(trait_item);
             } else if associated_type_overridden {
-                invalidated_items.push(trait_item.name);
+                invalidated_items.push(trait_item.ident);
             }
         }
     }
@@ -1503,17 +1504,17 @@ fn check_impl_items_against_trait<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
         let mut err = struct_span_err!(tcx.sess, impl_span, E0046,
             "not all trait items implemented, missing: `{}`",
             missing_items.iter()
-                  .map(|trait_item| trait_item.name.to_string())
+                  .map(|trait_item| trait_item.ident.to_string())
                   .collect::<Vec<_>>().join("`, `"));
         err.span_label(impl_span, format!("missing `{}` in implementation",
                 missing_items.iter()
-                    .map(|trait_item| trait_item.name.to_string())
+                    .map(|trait_item| trait_item.ident.to_string())
                     .collect::<Vec<_>>().join("`, `")));
         for trait_item in missing_items {
             if let Some(span) = tcx.hir.span_if_local(trait_item.def_id) {
-                err.span_label(span, format!("`{}` from trait", trait_item.name));
+                err.span_label(span, format!("`{}` from trait", trait_item.ident));
             } else {
-                err.note_trait_signature(trait_item.name.to_string(),
+                err.note_trait_signature(trait_item.ident.to_string(),
                                          trait_item.signature(&tcx));
             }
         }
@@ -1525,7 +1526,7 @@ fn check_impl_items_against_trait<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
         span_err!(tcx.sess, invalidator.span, E0399,
                   "the following trait items need to be reimplemented \
                    as `{}` was overridden: `{}`",
-                  invalidator.name,
+                  invalidator.ident,
                   invalidated_items.iter()
                                    .map(|name| name.to_string())
                                    .collect::<Vec<_>>().join("`, `"))
@@ -2468,7 +2469,7 @@ fn try_index_step(&self,
         None
     }
 
-    fn resolve_place_op(&self, op: PlaceOp, is_mut: bool) -> (Option<DefId>, Symbol) {
+    fn resolve_place_op(&self, op: PlaceOp, is_mut: bool) -> (Option<DefId>, ast::Ident) {
         let (tr, name) = match (op, is_mut) {
             (PlaceOp::Deref, false) =>
                 (self.tcx.lang_items().deref_trait(), "deref"),
@@ -2479,7 +2480,7 @@ fn resolve_place_op(&self, op: PlaceOp, is_mut: bool) -> (Option<DefId>, Symbol)
             (PlaceOp::Index, true) =>
                 (self.tcx.lang_items().index_mut_trait(), "index_mut"),
         };
-        (tr, Symbol::intern(name))
+        (tr, ast::Ident::from_str(name))
     }
 
     fn try_overloaded_place_op(&self,
@@ -3004,10 +3005,10 @@ fn check_method_call(&self,
                 Ok(method)
             }
             Err(error) => {
-                if segment.name != keywords::Invalid.name() {
+                if segment.ident.name != keywords::Invalid.name() {
                     self.report_method_error(span,
                                              rcvr_t,
-                                             segment.name,
+                                             segment.ident,
                                              Some(rcvr),
                                              error,
                                              Some(args));
@@ -3836,7 +3837,7 @@ fn check_expr_kind(&self,
                         // ... except when we try to 'break rust;'.
                         // ICE this expression in particular (see #43162).
                         if let hir::ExprPath(hir::QPath::Resolved(_, ref path)) = e.node {
-                            if path.segments.len() == 1 && path.segments[0].name == "rust" {
+                            if path.segments.len() == 1 && path.segments[0].ident.name == "rust" {
                                 fatally_break_rust(self.tcx.sess);
                             }
                         }
@@ -4252,7 +4253,7 @@ pub fn resolve_ty_and_def_ufcs<'b>(&self,
             // errors with default match binding modes. See #44614.
             return (*cached_def, Some(ty), slice::from_ref(&**item_segment))
         }
-        let item_name = item_segment.name;
+        let item_name = item_segment.ident;
         let def = match self.resolve_ufcs(span, item_name, ty, node_id) {
             Ok(def) => def,
             Err(error) => {
@@ -4260,7 +4261,7 @@ pub fn resolve_ty_and_def_ufcs<'b>(&self,
                     method::MethodError::PrivateMatch(def, _) => def,
                     _ => Def::Err,
                 };
-                if item_name != keywords::Invalid.name() {
+                if item_name.name != keywords::Invalid.name() {
                     self.report_method_error(span, ty, item_name, None, error, None);
                 }
                 def
@@ -4539,10 +4540,32 @@ pub fn suggest_mismatched_types_on_tail(&self,
                                             cause_span: Span,
                                             blk_id: ast::NodeId) {
         self.suggest_missing_semicolon(err, expression, expected, cause_span);
-
         if let Some((fn_decl, can_suggest)) = self.get_fn_decl(blk_id) {
             self.suggest_missing_return_type(err, &fn_decl, expected, found, can_suggest);
         }
+        self.suggest_ref_or_into(err, expression, expected, found);
+    }
+
+    pub fn suggest_ref_or_into(
+        &self,
+        err: &mut DiagnosticBuilder<'tcx>,
+        expr: &hir::Expr,
+        expected: Ty<'tcx>,
+        found: Ty<'tcx>,
+    ) {
+        if let Some((sp, msg, suggestion)) = self.check_ref(expr, found, expected) {
+            err.span_suggestion(sp, msg, suggestion);
+        } else if !self.check_for_cast(err, expr, found, expected) {
+            let methods = self.get_conversion_methods(expr.span, expected, found);
+            if let Ok(expr_text) = self.sess().codemap().span_to_snippet(expr.span) {
+                let suggestions = iter::repeat(expr_text).zip(methods.iter())
+                    .map(|(receiver, method)| format!("{}.{}()", receiver, method.ident))
+                    .collect::<Vec<_>>();
+                if !suggestions.is_empty() {
+                    err.span_suggestions(expr.span, "try using a conversion method", suggestions);
+                }
+            }
+        }
     }
 
     /// A common error is to forget to add a semicolon at the end of a block:
index 2dd22058d76959289f89dc523e9769a1504ca9f5..0a33252d4cd01b27f727dee3217b3c34099b022e 100644 (file)
@@ -18,7 +18,7 @@
 use rustc::infer::type_variable::TypeVariableOrigin;
 use errors;
 use syntax_pos::Span;
-use syntax::symbol::Symbol;
+use syntax::ast::Ident;
 use rustc::hir;
 
 impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
@@ -564,7 +564,7 @@ fn lookup_op_method(&self, lhs_ty: Ty<'tcx>, other_tys: &[Ty<'tcx>], op: Op)
                trait_did);
 
         let method = trait_did.and_then(|trait_did| {
-            let opname = Symbol::intern(opname);
+            let opname = Ident::from_str(opname);
             self.lookup_method_in_trait(span, opname, trait_did, lhs_ty, Some(other_tys))
         });
 
index b61f09cbaea6d22b2789f476416768f0f039dd7a..be42549df74a32ba816217ec314930619b7fe6d2 100644 (file)
@@ -632,7 +632,7 @@ fn check_variances_for_type_defn<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
         }
 
         let param = &hir_generics.params[index];
-        report_bivariance(tcx, param.span, param.name.name());
+        report_bivariance(tcx, param.span, param.name.ident().name);
     }
 }
 
index 88a2dc817ae63f139f1ad1a38bee016c777fa732..6a346b02b7943f4596a0521b06d5bf4452dec3da 100644 (file)
@@ -35,7 +35,7 @@ fn check_for_common_items_in_impls(&self, impl1: DefId, impl2: DefId,
 
         let name_and_namespace = |def_id| {
             let item = self.tcx.associated_item(def_id);
-            (item.name, Namespace::from(item.kind))
+            (item.ident, Namespace::from(item.kind))
         };
 
         let impl_items1 = self.tcx.associated_item_def_ids(impl1);
index a537b7d41c44b631777de4a141ff039759e47e1e..852603ac51c35deb24fa8b580a0227c4a91844cb 100644 (file)
@@ -898,7 +898,7 @@ fn generics_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
     let early_lifetimes = early_bound_lifetimes_from_generics(tcx, ast_generics);
     params.extend(early_lifetimes.enumerate().map(|(i, param)| {
         ty::GenericParamDef {
-            name: param.name.name().as_interned_str(),
+            name: param.name.ident().as_interned_str(),
             index: own_start + i as u32,
             def_id: tcx.hir.local_def_id(param.id),
             pure_wrt_drop: param.pure_wrt_drop,
@@ -914,7 +914,7 @@ fn generics_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
     let mut i = 0;
     params.extend(ast_generics.params.iter().filter_map(|param| match param.kind {
         GenericParamKind::Type { ref default, synthetic, .. } => {
-            if param.name.name() == keywords::SelfType.name() {
+            if param.name.ident().name == keywords::SelfType.name() {
                 span_bug!(param.span,  "`Self` should not be the name of a regular parameter");
             }
 
@@ -931,7 +931,7 @@ fn generics_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
 
             let ty_param = ty::GenericParamDef {
                 index: type_start + i as u32,
-                name: param.name.name().as_interned_str(),
+                name: param.name.ident().as_interned_str(),
                 def_id: tcx.hir.local_def_id(param.id),
                 pure_wrt_drop: param.pure_wrt_drop,
                 kind: ty::GenericParamDefKind::Type {
@@ -1437,7 +1437,7 @@ pub fn explicit_predicates_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
         let region = tcx.mk_region(ty::ReEarlyBound(ty::EarlyBoundRegion {
             def_id: tcx.hir.local_def_id(param.id),
             index,
-            name: param.name.name().as_interned_str(),
+            name: param.name.ident().as_interned_str(),
         }));
         index += 1;
 
@@ -1461,7 +1461,7 @@ pub fn explicit_predicates_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
     for param in &ast_generics.params {
         match param.kind {
             GenericParamKind::Type { .. } => {
-                let name = param.name.name().as_interned_str();
+                let name = param.name.ident().as_interned_str();
                 let param_ty = ty::ParamTy::new(index, name).to_ty(tcx);
                 index += 1;
 
index 7b7cb2549797878ecc0da1335de9c3814b56fd08..b57bb1fccfb5781f6f0ef90b518ea276cef98a78 100644 (file)
@@ -184,14 +184,14 @@ fn enforce_impl_items_are_distinct<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
             hir::ImplItemKind::Type(_) => &mut seen_type_items,
             _                    => &mut seen_value_items,
         };
-        match seen_items.entry(impl_item.name) {
+        match seen_items.entry(impl_item.ident.modern()) {
             Occupied(entry) => {
                 let mut err = struct_span_err!(tcx.sess, impl_item.span, E0201,
                                                "duplicate definitions with name `{}`:",
-                                               impl_item.name);
+                                               impl_item.ident);
                 err.span_label(*entry.get(),
                                format!("previous definition of `{}` here",
-                                        impl_item.name));
+                                        impl_item.ident));
                 err.span_label(impl_item.span, "duplicate definition");
                 err.emit();
             }
index dcc5fa53d2f420f9c0e75c6cc0464d00b1f37781..b18e5ca54ff476775b6ce75b0ddbe379a65f8768 100644 (file)
 use rustc::ty::subst::Substs;
 use rustc::ty::{self, Ty, TyCtxt};
 use rustc::ty::query::Providers;
-use rustc::traits::{ObligationCause, ObligationCauseCode, TraitEngine};
+use rustc::traits::{ObligationCause, ObligationCauseCode, TraitEngine, TraitEngineExt};
 use session::{CompileIncomplete, config};
 use util::common::time;
 
index 5c09da90491d064f6001723f0080d4e1171640e4..527aef80a8d9dcf26e736323fbc62e55a9eff146 100644 (file)
@@ -200,10 +200,10 @@ fn get_auto_trait_impl_for<F>(
             let mut segments = path.segments.into_vec();
             let last = segments.pop().unwrap();
 
-            let real_name = name.map(|name| Symbol::intern(&name));
+            let real_name = name.map(|name| Ident::from_str(&name));
 
             segments.push(hir::PathSegment::new(
-                real_name.unwrap_or(last.name),
+                real_name.unwrap_or(last.ident),
                 self.generics_to_path_params(generics.clone()),
                 false,
             ));
@@ -251,9 +251,9 @@ fn generics_to_path_params(&self, generics: ty::Generics) -> hir::GenericArgs {
             match param.kind {
                 ty::GenericParamDefKind::Lifetime => {
                     let name = if param.name == "" {
-                        hir::ParamName::Plain(keywords::StaticLifetime.name())
+                        hir::ParamName::Plain(keywords::StaticLifetime.ident())
                     } else {
-                        hir::ParamName::Plain(param.name.as_symbol())
+                        hir::ParamName::Plain(ast::Ident::from_interned_str(param.name))
                     };
 
                     args.push(hir::GenericArg::Lifetime(hir::Lifetime {
@@ -263,7 +263,7 @@ fn generics_to_path_params(&self, generics: ty::Generics) -> hir::GenericArgs {
                     }));
                 }
                 ty::GenericParamDefKind::Type {..} => {
-                    args.push(hir::GenericArg::Type(P(self.ty_param_to_ty(param.clone()))));
+                    args.push(hir::GenericArg::Type(self.ty_param_to_ty(param.clone())));
                 }
             }
         }
@@ -285,7 +285,7 @@ fn ty_param_to_ty(&self, param: ty::GenericParamDef) -> hir::Ty {
                     span: DUMMY_SP,
                     def: Def::TyParam(param.def_id),
                     segments: HirVec::from_vec(vec![
-                        hir::PathSegment::from_name(param.name.as_symbol())
+                        hir::PathSegment::from_ident(Ident::from_interned_str(param.name))
                     ]),
                 }),
             )),
index ad7389db729ec7f3151300a6ecc5f24c933a23e1..7677ccf8bf40c4ffd2007e71c69ef0d38a10a483 100644 (file)
@@ -391,7 +391,7 @@ pub fn build_impl(cx: &DocContext, did: DefId, ret: &mut Vec<clean::Item>) {
     let provided = trait_.def_id().map(|did| {
         tcx.provided_trait_methods(did)
            .into_iter()
-           .map(|meth| meth.name.to_string())
+           .map(|meth| meth.ident.to_string())
            .collect()
     }).unwrap_or(FxHashSet());
 
index 057e7f3ab843354d88dc3d4f15fe261cee302087..8cdffcd558d75cff4c1c5ab6e2ab82945fe776fa 100644 (file)
@@ -29,7 +29,7 @@
 use syntax::symbol::{Symbol, InternedString};
 use syntax_pos::{self, DUMMY_SP, Pos, FileName};
 
-use rustc::middle::const_val::ConstVal;
+use rustc::mir::interpret::ConstValue;
 use rustc::middle::privacy::AccessLevels;
 use rustc::middle::resolve_lifetime as rl;
 use rustc::ty::fold::TypeFolder;
@@ -1146,7 +1146,7 @@ fn resolve(cx: &DocContext, path_str: &str, is_val: bool) -> Result<(Def, Option
             Def::Struct(did) | Def::Union(did) | Def::Enum(did) | Def::TyAlias(did) => {
                 let item = cx.tcx.inherent_impls(did).iter()
                                  .flat_map(|imp| cx.tcx.associated_items(*imp))
-                                 .find(|item| item.name == item_name);
+                                 .find(|item| item.ident.name == item_name);
                 if let Some(item) = item {
                     let out = match item.kind {
                         ty::AssociatedKind::Method if is_val => "method",
@@ -1181,7 +1181,7 @@ fn resolve(cx: &DocContext, path_str: &str, is_val: bool) -> Result<(Def, Option
             Def::Trait(did) => {
                 let item = cx.tcx.associated_item_def_ids(did).iter()
                              .map(|item| cx.tcx.associated_item(*item))
-                             .find(|item| item.name == item_name);
+                             .find(|item| item.ident.name == item_name);
                 if let Some(item) = item {
                     let kind = match item.kind {
                         ty::AssociatedKind::Const if is_val => "associatedconstant",
@@ -1664,7 +1664,7 @@ fn clean(&self, cx: &DocContext) -> Lifetime {
                 _ => {}
             }
         }
-        Lifetime(self.name.name().to_string())
+        Lifetime(self.name.ident().to_string())
     }
 }
 
@@ -1677,14 +1677,14 @@ fn clean(&self, _: &DocContext) -> Lifetime {
                         hir::GenericBound::Outlives(lt) => lt,
                         _ => panic!(),
                     });
-                    let name = bounds.next().unwrap().name.name();
-                    let mut s = format!("{}: {}", self.name.name(), name);
+                    let name = bounds.next().unwrap().name.ident();
+                    let mut s = format!("{}: {}", self.name.ident(), name);
                     for bound in bounds {
-                        s.push_str(&format!(" + {}", bound.name.name()));
+                        s.push_str(&format!(" + {}", bound.name.ident()));
                     }
                     Lifetime(s)
                 } else {
-                    Lifetime(self.name.name().to_string())
+                    Lifetime(self.name.ident().to_string())
                 }
             }
             _ => panic!(),
@@ -1823,7 +1823,7 @@ fn clean(&self, cx: &DocContext) -> Type {
             GenericBound::Outlives(_) => panic!("cleaning a trait got a lifetime"),
         };
         Type::QPath {
-            name: cx.tcx.associated_item(self.item_def_id).name.clean(cx),
+            name: cx.tcx.associated_item(self.item_def_id).ident.name.clean(cx),
             self_type: box self.self_ty().clean(cx),
             trait_: box trait_
         }
@@ -1896,19 +1896,19 @@ fn clean(&self, cx: &DocContext) -> GenericParamDef {
                         hir::GenericBound::Outlives(lt) => lt,
                         _ => panic!(),
                     });
-                    let name = bounds.next().unwrap().name.name();
-                    let mut s = format!("{}: {}", self.name.name(), name);
+                    let name = bounds.next().unwrap().name.ident();
+                    let mut s = format!("{}: {}", self.name.ident(), name);
                     for bound in bounds {
-                        s.push_str(&format!(" + {}", bound.name.name()));
+                        s.push_str(&format!(" + {}", bound.name.ident()));
                     }
                     s
                 } else {
-                    self.name.name().to_string()
+                    self.name.ident().to_string()
                 };
                 (name, GenericParamDefKind::Lifetime)
             }
             hir::GenericParamKind::Type { ref default, synthetic, .. } => {
-                (self.name.name().clean(cx), GenericParamDefKind::Type {
+                (self.name.ident().name.clean(cx), GenericParamDefKind::Type {
                     did: cx.tcx.hir.local_def_id(self.id),
                     bounds: self.bounds.clean(cx),
                     default: default.clean(cx),
@@ -2150,11 +2150,11 @@ pub struct Arguments {
     pub values: Vec<Argument>,
 }
 
-impl<'a> Clean<Arguments> for (&'a [P<hir::Ty>], &'a [Spanned<ast::Name>]) {
+impl<'a> Clean<Arguments> for (&'a [hir::Ty], &'a [ast::Ident]) {
     fn clean(&self, cx: &DocContext) -> Arguments {
         Arguments {
             values: self.0.iter().enumerate().map(|(i, ty)| {
-                let mut name = self.1.get(i).map(|n| n.node.to_string())
+                let mut name = self.1.get(i).map(|ident| ident.to_string())
                                             .unwrap_or(String::new());
                 if name.is_empty() {
                     name = "_".to_string();
@@ -2168,7 +2168,7 @@ fn clean(&self, cx: &DocContext) -> Arguments {
     }
 }
 
-impl<'a> Clean<Arguments> for (&'a [P<hir::Ty>], hir::BodyId) {
+impl<'a> Clean<Arguments> for (&'a [hir::Ty], hir::BodyId) {
     fn clean(&self, cx: &DocContext) -> Arguments {
         let body = cx.tcx.hir.body(self.1);
 
@@ -2184,7 +2184,7 @@ fn clean(&self, cx: &DocContext) -> Arguments {
 }
 
 impl<'a, A: Copy> Clean<FnDecl> for (&'a hir::FnDecl, A)
-    where (&'a [P<hir::Ty>], A): Clean<Arguments>
+    where (&'a [hir::Ty], A): Clean<Arguments>
 {
     fn clean(&self, cx: &DocContext) -> FnDecl {
         FnDecl {
@@ -2360,7 +2360,7 @@ fn clean(&self, cx: &DocContext) -> Item {
             }
         };
         Item {
-            name: Some(self.name.clean(cx)),
+            name: Some(self.ident.name.clean(cx)),
             attrs: self.attrs.clean(cx),
             source: self.span.clean(cx),
             def_id: cx.tcx.hir.local_def_id(self.id),
@@ -2388,7 +2388,7 @@ fn clean(&self, cx: &DocContext) -> Item {
             }, true),
         };
         Item {
-            name: Some(self.name.clean(cx)),
+            name: Some(self.ident.name.clean(cx)),
             source: self.span.clean(cx),
             attrs: self.attrs.clean(cx),
             def_id: cx.tcx.hir.local_def_id(self.id),
@@ -2474,7 +2474,7 @@ fn clean(&self, cx: &DocContext) -> Item {
                 }
             }
             ty::AssociatedKind::Type => {
-                let my_name = self.name.clean(cx);
+                let my_name = self.ident.name.clean(cx);
 
                 if let ty::TraitContainer(did) = self.container {
                     // When loading a cross-crate associated type, the bounds for this type
@@ -2537,7 +2537,7 @@ fn clean(&self, cx: &DocContext) -> Item {
         };
 
         Item {
-            name: Some(self.name.clean(cx)),
+            name: Some(self.ident.name.clean(cx)),
             visibility,
             stability: get_stability(cx, self.def_id),
             deprecation: get_deprecation(cx, self.def_id),
@@ -2926,7 +2926,7 @@ fn clean(&self, cx: &DocContext) -> Type {
                                         }
                                     });
                                     if let Some(ty) = type_.cloned() {
-                                        ty_substs.insert(ty_param_def, ty.into_inner().clean(cx));
+                                        ty_substs.insert(ty_param_def, ty.clean(cx));
                                     } else if let Some(default) = default.clone() {
                                         ty_substs.insert(ty_param_def,
                                                          default.into_inner().clean(cx));
@@ -2949,7 +2949,7 @@ fn clean(&self, cx: &DocContext) -> Type {
                     segments: segments.into(),
                 };
                 Type::QPath {
-                    name: p.segments.last().unwrap().name.clean(cx),
+                    name: p.segments.last().unwrap().ident.name.clean(cx),
                     self_type: box qself.clean(cx),
                     trait_: box resolve_type(cx, trait_path.clean(cx), self.id)
                 }
@@ -2966,7 +2966,7 @@ fn clean(&self, cx: &DocContext) -> Type {
                     segments: vec![].into(),
                 };
                 Type::QPath {
-                    name: segment.name.clean(cx),
+                    name: segment.ident.name.clean(cx),
                     self_type: box qself.clean(cx),
                     trait_: box resolve_type(cx, trait_path.clean(cx), self.id)
                 }
@@ -3014,7 +3014,7 @@ fn clean(&self, cx: &DocContext) -> Type {
             ty::TySlice(ty) => Slice(box ty.clean(cx)),
             ty::TyArray(ty, n) => {
                 let mut n = cx.tcx.lift(&n).unwrap();
-                if let ConstVal::Unevaluated(def_id, substs) = n.val {
+                if let ConstValue::Unevaluated(def_id, substs) = n.val {
                     let param_env = cx.tcx.param_env(def_id);
                     let cid = GlobalId {
                         instance: ty::Instance::new(def_id, substs),
@@ -3099,7 +3099,7 @@ fn clean(&self, cx: &DocContext) -> Type {
                     let mut bindings = vec![];
                     for pb in obj.projection_bounds() {
                         bindings.push(TypeBinding {
-                            name: cx.tcx.associated_item(pb.item_def_id()).name.clean(cx),
+                            name: cx.tcx.associated_item(pb.item_def_id()).ident.name.clean(cx),
                             ty: pb.skip_binder().ty.clean(cx)
                         });
                     }
@@ -3156,7 +3156,7 @@ fn clean(&self, cx: &DocContext) -> Type {
                             if proj.projection_ty.trait_ref(cx.tcx) == *trait_ref.skip_binder() {
                                 Some(TypeBinding {
                                     name: cx.tcx.associated_item(proj.projection_ty.item_def_id)
-                                                .name.clean(cx),
+                                                .ident.name.clean(cx),
                                     ty: proj.ty.clean(cx),
                                 })
                             } else {
@@ -3575,7 +3575,7 @@ pub struct PathSegment {
 impl Clean<PathSegment> for hir::PathSegment {
     fn clean(&self, cx: &DocContext) -> PathSegment {
         PathSegment {
-            name: self.name.clean(cx),
+            name: self.ident.name.clean(cx),
             args: self.with_generic_args(|generic_args| generic_args.clean(cx))
         }
     }
@@ -3628,7 +3628,7 @@ fn strip_path(path: &Path) -> Path {
 fn qpath_to_string(p: &hir::QPath) -> String {
     let segments = match *p {
         hir::QPath::Resolved(_, ref path) => &path.segments,
-        hir::QPath::TypeRelative(_, ref segment) => return segment.name.to_string(),
+        hir::QPath::TypeRelative(_, ref segment) => return segment.ident.to_string(),
     };
 
     let mut s = String::new();
@@ -3636,8 +3636,8 @@ fn qpath_to_string(p: &hir::QPath) -> String {
         if i > 0 {
             s.push_str("::");
         }
-        if seg.name != keywords::CrateRoot.name() {
-            s.push_str(&*seg.name.as_str());
+        if seg.ident.name != keywords::CrateRoot.name() {
+            s.push_str(&*seg.ident.as_str());
         }
     }
     s
@@ -3823,7 +3823,7 @@ fn clean(&self, cx: &DocContext) -> Vec<Item> {
         let provided = trait_.def_id().map(|did| {
             cx.tcx.provided_trait_methods(did)
                   .into_iter()
-                  .map(|meth| meth.name.to_string())
+                  .map(|meth| meth.ident.to_string())
                   .collect()
         }).unwrap_or(FxHashSet());
 
@@ -4064,7 +4064,7 @@ fn name_from_pat(p: &hir::Pat) -> String {
 
     match p.node {
         PatKind::Wild => "_".to_string(),
-        PatKind::Binding(_, _, ref p, _) => p.node.to_string(),
+        PatKind::Binding(_, _, ident, _) => ident.to_string(),
         PatKind::TupleStruct(ref p, ..) | PatKind::Path(ref p) => qpath_to_string(p),
         PatKind::Struct(ref name, ref fields, etc) => {
             format!("{} {{ {}{} }}", qpath_to_string(name),
@@ -4096,14 +4096,14 @@ fn name_from_pat(p: &hir::Pat) -> String {
 
 fn print_const(cx: &DocContext, n: &ty::Const) -> String {
     match n.val {
-        ConstVal::Unevaluated(def_id, _) => {
+        ConstValue::Unevaluated(def_id, _) => {
             if let Some(node_id) = cx.tcx.hir.as_local_node_id(def_id) {
                 print_const_expr(cx, cx.tcx.hir.body_owned_by(node_id))
             } else {
                 inline::print_inlined_const(cx, def_id)
             }
         },
-        ConstVal::Value(..) => {
+        _ => {
             let mut s = String::new();
             ::rustc::mir::fmt_const_val(&mut s, n).unwrap();
             // array lengths are obviously usize
@@ -4293,7 +4293,7 @@ pub struct TypeBinding {
 impl Clean<TypeBinding> for hir::TypeBinding {
     fn clean(&self, cx: &DocContext) -> TypeBinding {
         TypeBinding {
-            name: self.name.clean(cx),
+            name: self.ident.name.clean(cx),
             ty: self.ty.clean(cx)
         }
     }
@@ -4427,7 +4427,7 @@ fn push(&mut self, text: &str) {
         span: DUMMY_SP,
         def: def_ctor(def_id),
         segments: hir::HirVec::from_vec(apb.names.iter().map(|s| hir::PathSegment {
-            name: ast::Name::intern(&s),
+            ident: ast::Ident::from_str(&s),
             args: None,
             infer_types: false,
         }).collect())
index fe992169ca934552f49e08970fa94034a70e8ca6..53032b9b98c5d79fa7305720637c76025a310071 100644 (file)
@@ -718,13 +718,13 @@ fn visit_item(&mut self, item: &'hir hir::Item) {
     }
 
     fn visit_trait_item(&mut self, item: &'hir hir::TraitItem) {
-        self.visit_testable(item.name.to_string(), &item.attrs, |this| {
+        self.visit_testable(item.ident.to_string(), &item.attrs, |this| {
             intravisit::walk_trait_item(this, item);
         });
     }
 
     fn visit_impl_item(&mut self, item: &'hir hir::ImplItem) {
-        self.visit_testable(item.name.to_string(), &item.attrs, |this| {
+        self.visit_testable(item.ident.to_string(), &item.attrs, |this| {
             intravisit::walk_impl_item(this, item);
         });
     }
index 1786e2960825dfb7b6340c8c8a84872fc2c21a2c..ae7f25c7fedbe43ecfbe5c82de45b8ab4d8c0e85 100644 (file)
@@ -9,12 +9,8 @@
 // except according to those terms.
 
 #[inline]
-pub fn write_to_vec(vec: &mut Vec<u8>, position: usize, byte: u8) {
-    if position == vec.len() {
-        vec.push(byte);
-    } else {
-        vec[position] = byte;
-    }
+pub fn write_to_vec(vec: &mut Vec<u8>, byte: u8) {
+    vec.push(byte);
 }
 
 #[cfg(target_pointer_width = "32")]
@@ -33,8 +29,7 @@ macro_rules! leb128_size {
 macro_rules! impl_write_unsigned_leb128 {
     ($fn_name:ident, $int_ty:ident) => (
         #[inline]
-        pub fn $fn_name(out: &mut Vec<u8>, start_position: usize, mut value: $int_ty) -> usize {
-            let mut position = start_position;
+        pub fn $fn_name(out: &mut Vec<u8>, mut value: $int_ty) {
             for _ in 0 .. leb128_size!($int_ty) {
                 let mut byte = (value & 0x7F) as u8;
                 value >>= 7;
@@ -42,15 +37,12 @@ pub fn $fn_name(out: &mut Vec<u8>, start_position: usize, mut value: $int_ty) ->
                     byte |= 0x80;
                 }
 
-                write_to_vec(out, position, byte);
-                position += 1;
+                write_to_vec(out, byte);
 
                 if value == 0 {
                     break;
                 }
             }
-
-            position - start_position
         }
     )
 }
@@ -105,11 +97,9 @@ pub fn $fn_name(slice: &[u8]) -> ($int_ty, usize) {
 /// The callback `write` is called once for each position
 /// that is to be written to with the byte to be encoded
 /// at that position.
-pub fn write_signed_leb128_to<W>(mut value: i128, mut write: W) -> usize
-    where W: FnMut(usize, u8)
+pub fn write_signed_leb128_to<W>(mut value: i128, mut write: W)
+    where W: FnMut(u8)
 {
-    let mut position = 0;
-
     loop {
         let mut byte = (value as u8) & 0x7f;
         value >>= 7;
@@ -120,18 +110,16 @@ pub fn write_signed_leb128_to<W>(mut value: i128, mut write: W) -> usize
             byte |= 0x80; // Mark this byte to show that more bytes will follow.
         }
 
-        write(position, byte);
-        position += 1;
+        write(byte);
 
         if !more {
             break;
         }
     }
-    position
 }
 
-pub fn write_signed_leb128(out: &mut Vec<u8>, start_position: usize, value: i128) -> usize {
-    write_signed_leb128_to(value, |i, v| write_to_vec(out, start_position+i, v))
+pub fn write_signed_leb128(out: &mut Vec<u8>, value: i128) {
+    write_signed_leb128_to(value, |v| write_to_vec(out, v))
 }
 
 #[inline]
@@ -167,9 +155,7 @@ fn $test_name() {
             let mut stream = Vec::new();
 
             for x in 0..62 {
-                let pos = stream.len();
-                let bytes_written = $write_fn_name(&mut stream, pos, (3u64 << x) as $int_ty);
-                assert_eq!(stream.len(), pos + bytes_written);
+                $write_fn_name(&mut stream, (3u64 << x) as $int_ty);
             }
 
             let mut position = 0;
@@ -195,9 +181,7 @@ fn test_signed_leb128() {
     let values: Vec<_> = (-500..500).map(|i| i * 0x12345789ABCDEF).collect();
     let mut stream = Vec::new();
     for &x in &values {
-        let pos = stream.len();
-        let bytes_written = write_signed_leb128(&mut stream, pos, x);
-        assert_eq!(stream.len(), pos + bytes_written);
+        write_signed_leb128(&mut stream, x);
     }
     let mut pos = 0;
     for &x in &values {
index 22d27b6697a7c8973257ba0c4de88528971f82d4..a5f4b32b329e7c22a51c791035a9cb05d168076d 100644 (file)
@@ -23,6 +23,7 @@
 #![feature(box_syntax)]
 #![feature(core_intrinsics)]
 #![feature(specialization)]
+#![feature(never_type)]
 #![cfg_attr(test, feature(test))]
 
 pub use self::serialize::{Decoder, Encoder, Decodable, Encodable};
index 077efadd60af88823876634ce0524ee0311f060a..a77d1d9b88b89a0e048c6f8123de8386ef727a98 100644 (file)
 
 use leb128::{self, read_signed_leb128, write_signed_leb128};
 use std::borrow::Cow;
-use std::io::{self, Write};
 use serialize;
 
 // -----------------------------------------------------------------------------
 // Encoder
 // -----------------------------------------------------------------------------
 
-pub type EncodeResult = io::Result<()>;
+pub type EncodeResult = Result<(), !>;
 
-pub struct Encoder<'a> {
-    pub cursor: &'a mut io::Cursor<Vec<u8>>,
+pub struct Encoder {
+    pub data: Vec<u8>,
 }
 
-impl<'a> Encoder<'a> {
-    pub fn new(cursor: &'a mut io::Cursor<Vec<u8>>) -> Encoder<'a> {
-        Encoder { cursor: cursor }
+impl Encoder {
+    pub fn new(data: Vec<u8>) -> Encoder {
+        Encoder { data }
     }
 
-    pub fn emit_raw_bytes(&mut self, s: &[u8]) -> EncodeResult {
-        self.cursor.write_all(s)
+    pub fn into_inner(self) -> Vec<u8> {
+        self.data
     }
-}
 
+    pub fn emit_raw_bytes(&mut self, s: &[u8]) {
+        self.data.extend_from_slice(s);
+    }
+}
 
 macro_rules! write_uleb128 {
     ($enc:expr, $value:expr, $fun:ident) => {{
-        let pos = $enc.cursor.position() as usize;
-        let bytes_written = leb128::$fun($enc.cursor.get_mut(), pos, $value);
-        $enc.cursor.set_position((pos + bytes_written) as u64);
+        leb128::$fun(&mut $enc.data, $value);
         Ok(())
     }}
 }
 
 macro_rules! write_sleb128 {
     ($enc:expr, $value:expr) => {{
-        let pos = $enc.cursor.position() as usize;
-        let bytes_written = write_signed_leb128($enc.cursor.get_mut(), pos, $value as i128);
-        $enc.cursor.set_position((pos + bytes_written) as u64);
+        write_signed_leb128(&mut $enc.data, $value as i128);
         Ok(())
     }}
 }
 
-impl<'a> serialize::Encoder for Encoder<'a> {
-    type Error = io::Error;
+impl serialize::Encoder for Encoder {
+    type Error = !;
 
     #[inline]
     fn emit_nil(&mut self) -> EncodeResult {
@@ -87,9 +85,7 @@ fn emit_u16(&mut self, v: u16) -> EncodeResult {
 
     #[inline]
     fn emit_u8(&mut self, v: u8) -> EncodeResult {
-        let pos = self.cursor.position() as usize;
-        leb128::write_to_vec(self.cursor.get_mut(), pos, v);
-        self.cursor.set_position((pos + 1) as u64);
+        self.data.push(v);
         Ok(())
     }
 
@@ -153,15 +149,15 @@ fn emit_char(&mut self, v: char) -> EncodeResult {
     #[inline]
     fn emit_str(&mut self, v: &str) -> EncodeResult {
         self.emit_usize(v.len())?;
-        let _ = self.cursor.write_all(v.as_bytes());
+        self.emit_raw_bytes(v.as_bytes());
         Ok(())
     }
 }
 
-impl<'a> Encoder<'a> {
+impl Encoder {
     #[inline]
     pub fn position(&self) -> usize {
-        self.cursor.position() as usize
+        self.data.len()
     }
 }
 
@@ -339,7 +335,6 @@ fn error(&mut self, err: &str) -> Self::Error {
 #[cfg(test)]
 mod tests {
     use serialize::{Encodable, Decodable};
-    use std::io::Cursor;
     use std::fmt::Debug;
     use super::{Encoder, Decoder};
 
@@ -368,14 +363,13 @@ struct Struct {
 
 
     fn check_round_trip<T: Encodable + Decodable + PartialEq + Debug>(values: Vec<T>) {
-        let mut cursor = Cursor::new(Vec::new());
+        let mut encoder = Encoder::new(Vec::new());
 
         for value in &values {
-            let mut encoder = Encoder::new(&mut cursor);
             Encodable::encode(&value, &mut encoder).unwrap();
         }
 
-        let data = cursor.into_inner();
+        let data = encoder.into_inner();
         let mut decoder = Decoder::new(&data[..], 0);
 
         for value in values {
index ae30321f46dfca4ed02c039e88b7ce28028c62d5..8e8340b3ed901b557e23e7111bb54e236060cbc7 100644 (file)
@@ -254,7 +254,14 @@ pub fn div_euc(self, rhs: f32) -> f32 {
 
     /// Calculates the Euclidean modulo (self mod rhs), which is never negative.
     ///
-    /// In particular, the result `n` satisfies `0 <= n < rhs.abs()`.
+    /// In particular, the return value `r` satisfies `0.0 <= r < rhs.abs()` in
+    /// most cases. However, due to a floating point round-off error it can
+    /// result in `r == rhs.abs()`, violating the mathematical definition, if
+    /// `self` is much smaller than `rhs.abs()` in magnitude and `self < 0.0`.
+    /// This result is not an element of the function's codomain, but it is the
+    /// closest floating point number in the real numbers and thus fulfills the
+    /// property `self == self.div_euc(rhs) * rhs + self.mod_euc(rhs)`
+    /// approximatively.
     ///
     /// # Examples
     ///
@@ -266,6 +273,8 @@ pub fn div_euc(self, rhs: f32) -> f32 {
     /// assert_eq!((-a).mod_euc(b), 1.0);
     /// assert_eq!(a.mod_euc(-b), 3.0);
     /// assert_eq!((-a).mod_euc(-b), 1.0);
+    /// // limitation due to round-off error
+    /// assert!((-std::f32::EPSILON).mod_euc(3.0) != 0.0);
     /// ```
     #[inline]
     #[unstable(feature = "euclidean_division", issue = "49048")]
index 7950d434b77e67a5c7302fdafc42c855965e0527..6880294afcaafd4f41c40f6d0ef448f3b5bd14fd 100644 (file)
@@ -230,7 +230,14 @@ pub fn div_euc(self, rhs: f64) -> f64 {
 
     /// Calculates the Euclidean modulo (self mod rhs), which is never negative.
     ///
-    /// In particular, the result `n` satisfies `0 <= n < rhs.abs()`.
+    /// In particular, the return value `r` satisfies `0.0 <= r < rhs.abs()` in
+    /// most cases.  However, due to a floating point round-off error it can
+    /// result in `r == rhs.abs()`, violating the mathematical definition, if
+    /// `self` is much smaller than `rhs.abs()` in magnitude and `self < 0.0`.
+    /// This result is not an element of the function's codomain, but it is the
+    /// closest floating point number in the real numbers and thus fulfills the
+    /// property `self == self.div_euc(rhs) * rhs + self.mod_euc(rhs)`
+    /// approximatively.
     ///
     /// # Examples
     ///
@@ -242,6 +249,8 @@ pub fn div_euc(self, rhs: f64) -> f64 {
     /// assert_eq!((-a).mod_euc(b), 1.0);
     /// assert_eq!(a.mod_euc(-b), 3.0);
     /// assert_eq!((-a).mod_euc(-b), 1.0);
+    /// // limitation due to round-off error
+    /// assert!((-std::f64::EPSILON).mod_euc(3.0) != 0.0);
     /// ```
     #[inline]
     #[unstable(feature = "euclidean_division", issue = "49048")]
index 3f7f06714908428ee990b89fe15c483fe890d8ec..01c0ada4ffbe531f227fbaec9858831bb56b6367 100644 (file)
@@ -183,34 +183,10 @@ pub fn fill_bytes(v: &mut [u8]) {
 mod imp {
     #[link(name = "zircon")]
     extern {
-        fn zx_cprng_draw_new(buffer: *mut u8, len: usize) -> i32;
-    }
-
-    fn getrandom(buf: &mut [u8]) -> Result<usize, i32> {
-        unsafe {
-            let status = zx_cprng_draw_new(buf.as_mut_ptr(), buf.len());
-            if status == 0 {
-                Ok(buf.len())
-            } else {
-                Err(status)
-            }
-        }
+        fn zx_cprng_draw(buffer: *mut u8, len: usize);
     }
 
     pub fn fill_bytes(v: &mut [u8]) {
-        let mut buf = v;
-        while !buf.is_empty() {
-            let ret = getrandom(buf);
-            match ret {
-                Err(err) => {
-                    panic!("kernel zx_cprng_draw call failed! (returned {}, buf.len() {})",
-                        err, buf.len())
-                }
-                Ok(actual) => {
-                    let move_buf = buf;
-                    buf = &mut move_buf[(actual as usize)..];
-                }
-            }
-        }
+        unsafe { zx_cprng_draw(v.as_mut_ptr(), v.len()) }
     }
 }
index 40d3280baa687da385fe43d2f36b52962f67ffc7..a170abb2628e521eafaf0f4f4050c813d82bd039 100644 (file)
@@ -276,7 +276,7 @@ unsafe fn init(&self, slot: &UnsafeCell<Option<T>>) -> &T {
     ///
     /// This will lazily initialize the value if this thread has not referenced
     /// this key yet. If the key has been destroyed (which may happen if this is called
-    /// in a destructor), this function will return a `ThreadLocalError`.
+    /// in a destructor), this function will return an [`AccessError`](struct.AccessError.html).
     ///
     /// # Panics
     ///
index 8e4b7660a1ccef89c20739d2134588f25e7852bb..1d5429bdf8f7d124d498d9f0243b2ce6e49aa8e3 100644 (file)
@@ -211,8 +211,7 @@ fn next_start_pos(&self) -> usize {
         }
     }
 
-    /// Creates a new filemap without setting its line information. If you don't
-    /// intend to set the line information yourself, you should use new_filemap_and_lines.
+    /// Creates a new filemap.
     /// This does not ensure that only one FileMap exists per file name.
     pub fn new_filemap(&self, filename: FileName, src: String) -> Lrc<FileMap> {
         let start_pos = self.next_start_pos();
@@ -247,22 +246,6 @@ pub fn new_filemap(&self, filename: FileName, src: String) -> Lrc<FileMap> {
         filemap
     }
 
-    /// Creates a new filemap and sets its line information.
-    /// This does not ensure that only one FileMap exists per file name.
-    pub fn new_filemap_and_lines(&self, filename: &Path, src: &str) -> Lrc<FileMap> {
-        let fm = self.new_filemap(filename.to_owned().into(), src.to_owned());
-        let mut byte_pos: u32 = fm.start_pos.0;
-        for line in src.lines() {
-            // register the start of this line
-            fm.next_line(BytePos(byte_pos));
-
-            // update byte_pos to include this line and the \n at the end
-            byte_pos += line.len() as u32 + 1;
-        }
-        fm
-    }
-
-
     /// Allocates a new FileMap representing a source file from an external
     /// crate. The source code of such an "imported filemap" is not available,
     /// but we still know enough to generate accurate debuginfo location
@@ -305,9 +288,9 @@ pub fn new_imported_filemap(&self,
             external_src: Lock::new(ExternalSource::AbsentOk),
             start_pos,
             end_pos,
-            lines: Lock::new(file_local_lines),
-            multibyte_chars: Lock::new(file_local_multibyte_chars),
-            non_narrow_chars: Lock::new(file_local_non_narrow_chars),
+            lines: file_local_lines,
+            multibyte_chars: file_local_multibyte_chars,
+            non_narrow_chars: file_local_non_narrow_chars,
             name_hash,
         });
 
@@ -345,21 +328,22 @@ pub fn lookup_char_pos(&self, pos: BytePos) -> Loc {
         match self.lookup_line(pos) {
             Ok(FileMapAndLine { fm: f, line: a }) => {
                 let line = a + 1; // Line numbers start at 1
-                let linebpos = (*f.lines.borrow())[a];
+                let linebpos = f.lines[a];
                 let linechpos = self.bytepos_to_file_charpos(linebpos);
                 let col = chpos - linechpos;
 
                 let col_display = {
-                    let non_narrow_chars = f.non_narrow_chars.borrow();
-                    let start_width_idx = non_narrow_chars
+                    let start_width_idx = f
+                        .non_narrow_chars
                         .binary_search_by_key(&linebpos, |x| x.pos())
                         .unwrap_or_else(|x| x);
-                    let end_width_idx = non_narrow_chars
+                    let end_width_idx = f
+                        .non_narrow_chars
                         .binary_search_by_key(&pos, |x| x.pos())
                         .unwrap_or_else(|x| x);
                     let special_chars = end_width_idx - start_width_idx;
-                    let non_narrow: usize =
-                        non_narrow_chars[start_width_idx..end_width_idx]
+                    let non_narrow: usize = f
+                        .non_narrow_chars[start_width_idx..end_width_idx]
                         .into_iter()
                         .map(|x| x.width())
                         .sum();
@@ -380,12 +364,12 @@ pub fn lookup_char_pos(&self, pos: BytePos) -> Loc {
             }
             Err(f) => {
                 let col_display = {
-                    let non_narrow_chars = f.non_narrow_chars.borrow();
-                    let end_width_idx = non_narrow_chars
+                    let end_width_idx = f
+                        .non_narrow_chars
                         .binary_search_by_key(&pos, |x| x.pos())
                         .unwrap_or_else(|x| x);
-                    let non_narrow: usize =
-                        non_narrow_chars[0..end_width_idx]
+                    let non_narrow: usize = f
+                        .non_narrow_chars[0..end_width_idx]
                         .into_iter()
                         .map(|x| x.width())
                         .sum();
@@ -830,22 +814,22 @@ pub fn bytepos_to_file_charpos(&self, bpos: BytePos) -> CharPos {
         // The number of extra bytes due to multibyte chars in the FileMap
         let mut total_extra_bytes = 0;
 
-        for mbc in map.multibyte_chars.borrow().iter() {
+        for mbc in map.multibyte_chars.iter() {
             debug!("{}-byte char at {:?}", mbc.bytes, mbc.pos);
             if mbc.pos < bpos {
                 // every character is at least one byte, so we only
                 // count the actual extra bytes.
-                total_extra_bytes += mbc.bytes - 1;
+                total_extra_bytes += mbc.bytes as u32 - 1;
                 // We should never see a byte position in the middle of a
                 // character
-                assert!(bpos.to_usize() >= mbc.pos.to_usize() + mbc.bytes);
+                assert!(bpos.to_u32() >= mbc.pos.to_u32() + mbc.bytes as u32);
             } else {
                 break;
             }
         }
 
-        assert!(map.start_pos.to_usize() + total_extra_bytes <= bpos.to_usize());
-        CharPos(bpos.to_usize() - map.start_pos.to_usize() - total_extra_bytes)
+        assert!(map.start_pos.to_u32() + total_extra_bytes <= bpos.to_u32());
+        CharPos(bpos.to_usize() - map.start_pos.to_usize() - total_extra_bytes as usize)
     }
 
     // Return the index of the filemap (in self.files) which contains pos.
@@ -1028,51 +1012,16 @@ pub fn map_prefix(&self, path: PathBuf) -> (PathBuf, bool) {
 #[cfg(test)]
 mod tests {
     use super::*;
-    use std::borrow::Cow;
     use rustc_data_structures::sync::Lrc;
 
-    #[test]
-    fn t1 () {
-        let cm = CodeMap::new(FilePathMapping::empty());
-        let fm = cm.new_filemap(PathBuf::from("blork.rs").into(),
-                                "first line.\nsecond line".to_string());
-        fm.next_line(BytePos(0));
-        // Test we can get lines with partial line info.
-        assert_eq!(fm.get_line(0), Some(Cow::from("first line.")));
-        // TESTING BROKEN BEHAVIOR: line break declared before actual line break.
-        fm.next_line(BytePos(10));
-        assert_eq!(fm.get_line(1), Some(Cow::from(".")));
-        fm.next_line(BytePos(12));
-        assert_eq!(fm.get_line(2), Some(Cow::from("second line")));
-    }
-
-    #[test]
-    #[should_panic]
-    fn t2 () {
-        let cm = CodeMap::new(FilePathMapping::empty());
-        let fm = cm.new_filemap(PathBuf::from("blork.rs").into(),
-                                "first line.\nsecond line".to_string());
-        // TESTING *REALLY* BROKEN BEHAVIOR:
-        fm.next_line(BytePos(0));
-        fm.next_line(BytePos(10));
-        fm.next_line(BytePos(2));
-    }
-
     fn init_code_map() -> CodeMap {
         let cm = CodeMap::new(FilePathMapping::empty());
-        let fm1 = cm.new_filemap(PathBuf::from("blork.rs").into(),
-                                 "first line.\nsecond line".to_string());
-        let fm2 = cm.new_filemap(PathBuf::from("empty.rs").into(),
-                                 "".to_string());
-        let fm3 = cm.new_filemap(PathBuf::from("blork2.rs").into(),
-                                 "first line.\nsecond line".to_string());
-
-        fm1.next_line(BytePos(0));
-        fm1.next_line(BytePos(12));
-        fm2.next_line(fm2.start_pos);
-        fm3.next_line(fm3.start_pos);
-        fm3.next_line(fm3.start_pos + BytePos(12));
-
+        cm.new_filemap(PathBuf::from("blork.rs").into(),
+                       "first line.\nsecond line".to_string());
+        cm.new_filemap(PathBuf::from("empty.rs").into(),
+                       "".to_string());
+        cm.new_filemap(PathBuf::from("blork2.rs").into(),
+                       "first line.\nsecond line".to_string());
         cm
     }
 
@@ -1125,26 +1074,10 @@ fn t5() {
     fn init_code_map_mbc() -> CodeMap {
         let cm = CodeMap::new(FilePathMapping::empty());
         // € is a three byte utf8 char.
-        let fm1 =
-            cm.new_filemap(PathBuf::from("blork.rs").into(),
-                           "fir€st €€€€ line.\nsecond line".to_string());
-        let fm2 = cm.new_filemap(PathBuf::from("blork2.rs").into(),
-                                 "first line€€.\n€ second line".to_string());
-
-        fm1.next_line(BytePos(0));
-        fm1.next_line(BytePos(28));
-        fm2.next_line(fm2.start_pos);
-        fm2.next_line(fm2.start_pos + BytePos(20));
-
-        fm1.record_multibyte_char(BytePos(3), 3);
-        fm1.record_multibyte_char(BytePos(9), 3);
-        fm1.record_multibyte_char(BytePos(12), 3);
-        fm1.record_multibyte_char(BytePos(15), 3);
-        fm1.record_multibyte_char(BytePos(18), 3);
-        fm2.record_multibyte_char(fm2.start_pos + BytePos(10), 3);
-        fm2.record_multibyte_char(fm2.start_pos + BytePos(13), 3);
-        fm2.record_multibyte_char(fm2.start_pos + BytePos(18), 3);
-
+        cm.new_filemap(PathBuf::from("blork.rs").into(),
+                       "fir€st €€€€ line.\nsecond line".to_string());
+        cm.new_filemap(PathBuf::from("blork2.rs").into(),
+                       "first line€€.\n€ second line".to_string());
         cm
     }
 
@@ -1196,7 +1129,7 @@ fn span_to_snippet_and_lines_spanning_multiple_lines() {
         let cm = CodeMap::new(FilePathMapping::empty());
         let inputtext = "aaaaa\nbbbbBB\nCCC\nDDDDDddddd\neee\n";
         let selection = "     \n    ~~\n~~~\n~~~~~     \n   \n";
-        cm.new_filemap_and_lines(Path::new("blork.rs"), inputtext);
+        cm.new_filemap(Path::new("blork.rs").to_owned().into(), inputtext.to_string());
         let span = span_from_selection(inputtext, selection);
 
         // check that we are extracting the text we thought we were extracting
@@ -1239,7 +1172,7 @@ fn span_merging_fail() {
         let inputtext  = "bbbb BB\ncc CCC\n";
         let selection1 = "     ~~\n      \n";
         let selection2 = "       \n   ~~~\n";
-        cm.new_filemap_and_lines(Path::new("blork.rs"), inputtext);
+        cm.new_filemap(Path::new("blork.rs").to_owned().into(), inputtext.to_owned());
         let span1 = span_from_selection(inputtext, selection1);
         let span2 = span_from_selection(inputtext, selection2);
 
index 16d786dd6cad2f76450618c0c917a80991d12e36..9afce74f53cc4001fcccb6b1635e244061163467 100644 (file)
@@ -621,6 +621,9 @@ pub enum SyntaxExtension {
         /// Whether the contents of the macro can use `unsafe`
         /// without triggering the `unsafe_code` lint.
         allow_internal_unsafe: bool,
+        /// Enables the macro helper hack (`ident!(...)` -> `$crate::ident!(...)`)
+        /// for a given macro.
+        local_inner_macros: bool,
         /// The macro's feature name if it is unstable, and the stability feature
         unstable_feature: Option<(Symbol, u32)>,
         /// Edition of the crate in which the macro is defined
index 940fb6405f1b6065c170dfd7d7abc5ec9fdfd006..32ace937ac0067c1b424c04d548a91223f169b81 100644 (file)
@@ -64,6 +64,7 @@ pub fn add_derived_markers<T>(cx: &mut ExtCtxt, span: Span, traits: &[ast::Path]
         format: ExpnFormat::MacroAttribute(Symbol::intern(&pretty_name)),
         allow_internal_unstable: true,
         allow_internal_unsafe: false,
+        local_inner_macros: false,
         edition: hygiene::default_edition(),
     });
 
index 69c99c63aafe3e55692d41ec91785063c00879c4..9cd410d424394643bafb156d6f7c44a613b2b80c 100644 (file)
@@ -542,6 +542,7 @@ fn expand_attr_invoc(&mut self,
             format: MacroAttribute(Symbol::intern(&format!("{}", attr.path))),
             allow_internal_unstable: false,
             allow_internal_unsafe: false,
+            local_inner_macros: false,
             edition: ext.edition(),
         });
 
@@ -695,6 +696,7 @@ fn expand_bang_invoc(&mut self,
                                           def_site_span: Option<Span>,
                                           allow_internal_unstable,
                                           allow_internal_unsafe,
+                                          local_inner_macros,
                                           // can't infer this type
                                           unstable_feature: Option<(Symbol, u32)>,
                                           edition| {
@@ -729,6 +731,7 @@ fn expand_bang_invoc(&mut self,
                 format: macro_bang_format(path),
                 allow_internal_unstable,
                 allow_internal_unsafe,
+                local_inner_macros,
                 edition,
             });
             Ok(())
@@ -737,7 +740,7 @@ fn expand_bang_invoc(&mut self,
         let opt_expanded = match *ext {
             DeclMacro(ref expand, def_span, edition) => {
                 if let Err(dummy_span) = validate_and_set_expn_info(self, def_span.map(|(_, s)| s),
-                                                                    false, false, None,
+                                                                    false, false, false, None,
                                                                     edition) {
                     dummy_span
                 } else {
@@ -750,12 +753,14 @@ fn expand_bang_invoc(&mut self,
                 def_info,
                 allow_internal_unstable,
                 allow_internal_unsafe,
+                local_inner_macros,
                 unstable_feature,
                 edition,
             } => {
                 if let Err(dummy_span) = validate_and_set_expn_info(self, def_info.map(|(_, s)| s),
                                                                     allow_internal_unstable,
                                                                     allow_internal_unsafe,
+                                                                    local_inner_macros,
                                                                     unstable_feature,
                                                                     edition) {
                     dummy_span
@@ -777,6 +782,7 @@ fn expand_bang_invoc(&mut self,
                         format: macro_bang_format(path),
                         allow_internal_unstable,
                         allow_internal_unsafe: false,
+                        local_inner_macros: false,
                         edition: hygiene::default_edition(),
                     });
 
@@ -816,6 +822,7 @@ fn expand_bang_invoc(&mut self,
                         // FIXME probably want to follow macro_rules macros here.
                         allow_internal_unstable,
                         allow_internal_unsafe: false,
+                        local_inner_macros: false,
                         edition,
                     });
 
@@ -890,6 +897,7 @@ fn expand_derive_invoc(&mut self,
             format: MacroAttribute(pretty_name),
             allow_internal_unstable: false,
             allow_internal_unsafe: false,
+            local_inner_macros: false,
             edition: ext.edition(),
         };
 
@@ -1487,9 +1495,11 @@ fn fold_attribute(&mut self, at: ast::Attribute) -> Option<ast::Attribute> {
 
                     match String::from_utf8(buf) {
                         Ok(src) => {
+                            let src_interned = Symbol::intern(&src);
+
                             // Add this input file to the code map to make it available as
                             // dependency information
-                            self.cx.codemap().new_filemap_and_lines(&filename, &src);
+                            self.cx.codemap().new_filemap(filename.into(), src);
 
                             let include_info = vec![
                                 dummy_spanned(ast::NestedMetaItemKind::MetaItem(
@@ -1497,7 +1507,7 @@ fn fold_attribute(&mut self, at: ast::Attribute) -> Option<ast::Attribute> {
                                                                      dummy_spanned(file)))),
                                 dummy_spanned(ast::NestedMetaItemKind::MetaItem(
                                         attr::mk_name_value_item_str(Ident::from_str("contents"),
-                                                            dummy_spanned(Symbol::intern(&src))))),
+                                                            dummy_spanned(src_interned)))),
                             ];
 
                             let include_ident = Ident::from_str("include");
index d6dce63ea5e4b8d63c16fd2f9ef58529835a20ed..669536f519ce31274453123b7ea42d384851921d 100644 (file)
@@ -150,11 +150,13 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenT
     };
     match String::from_utf8(bytes) {
         Ok(src) => {
+            let interned_src = Symbol::intern(&src);
+
             // Add this input file to the code map to make it available as
             // dependency information
-            cx.codemap().new_filemap_and_lines(&file, &src);
+            cx.codemap().new_filemap(file.into(), src);
 
-            base::MacEager::expr(cx.expr_str(sp, Symbol::intern(&src)))
+            base::MacEager::expr(cx.expr_str(sp, interned_src))
         }
         Err(_) => {
             cx.span_err(sp,
@@ -182,7 +184,7 @@ pub fn expand_include_bytes(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::Toke
         Ok(..) => {
             // Add this input file to the code map to make it available as
             // dependency information, but don't enter it's contents
-            cx.codemap().new_filemap_and_lines(&file, "");
+            cx.codemap().new_filemap(file.into(), "".to_string());
 
             base::MacEager::expr(cx.expr_lit(sp, ast::LitKind::ByteStr(Lrc::new(bytes))))
         }
index 4ee5357f4766cc1186d36299a0e78f19e2d47d5e..3b3892729d93cdcc0315b36b7f8229e6b615642d 100644 (file)
@@ -286,6 +286,12 @@ pub fn compile(sess: &ParseSess, features: &Features, def: &ast::Item, edition:
     if body.legacy {
         let allow_internal_unstable = attr::contains_name(&def.attrs, "allow_internal_unstable");
         let allow_internal_unsafe = attr::contains_name(&def.attrs, "allow_internal_unsafe");
+        let mut local_inner_macros = false;
+        if let Some(macro_export) = attr::find_by_name(&def.attrs, "macro_export") {
+            if let Some(l) = macro_export.meta_item_list() {
+                local_inner_macros = attr::list_contains_name(&l, "local_inner_macros");
+            }
+        }
 
         let unstable_feature = attr::find_stability(&sess.span_diagnostic,
                                                     &def.attrs, def.span).and_then(|stability| {
@@ -301,6 +307,7 @@ pub fn compile(sess: &ParseSess, features: &Features, def: &ast::Item, edition:
             def_info: Some((def.id, def.span)),
             allow_internal_unstable,
             allow_internal_unsafe,
+            local_inner_macros,
             unstable_feature,
             edition,
         }
index c813ec1977b881bc00ee8cb3f6379af7f89c9c79..6d55b3de75d2e69090247064b5bf505a4082539b 100644 (file)
@@ -1359,17 +1359,17 @@ fn leveled_feature_err<'a>(sess: &'a ParseSess, feature: &str, span: Span, issue
     ":literal fragment specifier is experimental and subject to change";
 
 pub const EXPLAIN_UNSIZED_TUPLE_COERCION: &'static str =
-    "Unsized tuple coercion is not stable enough for use and is subject to change";
+    "unsized tuple coercion is not stable enough for use and is subject to change";
 
 pub const EXPLAIN_MACRO_AT_MOST_ONCE_REP: &'static str =
-    "Using the `?` macro Kleene operator for \"at most one\" repetition is unstable";
+    "using the `?` macro Kleene operator for \"at most one\" repetition is unstable";
 
 pub const EXPLAIN_MACROS_IN_EXTERN: &'static str =
-    "Macro invocations in `extern {}` blocks are experimental.";
+    "macro invocations in `extern {}` blocks are experimental.";
 
 // mention proc-macros when enabled
 pub const EXPLAIN_PROC_MACROS_IN_EXTERN: &'static str =
-    "Macro and proc-macro invocations in `extern {}` blocks are experimental.";
+    "macro and proc-macro invocations in `extern {}` blocks are experimental.";
 
 struct PostExpansionVisitor<'a> {
     context: &'a Context<'a>,
index 7da0d816d0f7aeb39afeb67c587d999604ea521b..3995a9b8689e77e7a4b2f139ee019cef55ccc8f7 100644 (file)
@@ -240,9 +240,11 @@ fn read_block_comment(rdr: &mut StringReader,
     let mut lines: Vec<String> = Vec::new();
 
     // Count the number of chars since the start of the line by rescanning.
-    let mut src_index = rdr.src_index(rdr.filemap.line_begin_pos());
+    let mut src_index = rdr.src_index(rdr.filemap.line_begin_pos(rdr.pos));
     let end_src_index = rdr.src_index(rdr.pos);
-    assert!(src_index <= end_src_index);
+    assert!(src_index <= end_src_index,
+        "src_index={}, end_src_index={}, line_begin_pos={}",
+        src_index, end_src_index, rdr.filemap.line_begin_pos(rdr.pos).to_u32());
     let mut n = 0;
     while src_index < end_src_index {
         let c = char_at(&rdr.src, src_index);
index c09cfd910d207adf8efba17bf785781876fdc5c4..dcc71e787785289558e18a8872aac5dc23a15c4d 100644 (file)
@@ -51,11 +51,7 @@ pub struct StringReader<'a> {
     pub ch: Option<char>,
     pub filemap: Lrc<syntax_pos::FileMap>,
     /// Stop reading src at this index.
-    end_src_index: usize,
-    /// Whether to record new-lines and multibyte chars in filemap.
-    /// This is only necessary the first time a filemap is lexed.
-    /// If part of a filemap is being re-lexed, this should be set to false.
-    save_new_lines_and_multibyte: bool,
+    pub end_src_index: usize,
     // cached:
     peek_tok: token::Token,
     peek_span: Span,
@@ -188,7 +184,6 @@ fn new_raw_internal(sess: &'a ParseSess, filemap: Lrc<syntax_pos::FileMap>,
             ch: Some('\n'),
             filemap,
             end_src_index: src.len(),
-            save_new_lines_and_multibyte: true,
             // dummy values; not read
             peek_tok: token::Eof,
             peek_span: syntax_pos::DUMMY_SP,
@@ -225,7 +220,6 @@ pub fn retokenize(sess: &'a ParseSess, mut span: Span) -> Self {
         let mut sr = StringReader::new_raw_internal(sess, begin.fm, None);
 
         // Seek the lexer to the right byte range.
-        sr.save_new_lines_and_multibyte = false;
         sr.next_pos = span.lo();
         sr.end_src_index = sr.src_index(span.hi());
 
@@ -458,18 +452,6 @@ fn translate_crlf_(rdr: &StringReader,
             let next_ch = char_at(&self.src, next_src_index);
             let next_ch_len = next_ch.len_utf8();
 
-            if self.ch.unwrap() == '\n' {
-                if self.save_new_lines_and_multibyte {
-                    self.filemap.next_line(self.next_pos);
-                }
-            }
-            if next_ch_len > 1 {
-                if self.save_new_lines_and_multibyte {
-                    self.filemap.record_multibyte_char(self.next_pos, next_ch_len);
-                }
-            }
-            self.filemap.record_width(self.next_pos, next_ch);
-
             self.ch = Some(next_ch);
             self.pos = self.next_pos;
             self.next_pos = self.next_pos + Pos::from_usize(next_ch_len);
index 21bd6c083244d0f8050d67dcf29bd313d5dee087..faf2cf64e1d9f05aa374e6ed8b9e7b0ea90864be 100644 (file)
@@ -7325,7 +7325,7 @@ fn parse_rename(&mut self) -> PResult<'a, Option<Ident>> {
             match self.token {
                 token::Ident(ident, false) if ident.name == keywords::Underscore.name() => {
                     self.bump(); // `_`
-                    Ok(Some(Ident::new(ident.name.gensymed(), ident.span)))
+                    Ok(Some(ident.gensym()))
                 }
                 _ => self.parse_ident().map(Some),
             }
index 66e8e0d7a9c6c76e52dcaa0b0b850d62cce71ba9..68121d42b69c64698bb4dd07a2684b85fb25b075 100644 (file)
@@ -29,6 +29,7 @@ fn ignored_span(sp: Span) -> Span {
         format: MacroAttribute(Symbol::intern("std_inject")),
         allow_internal_unstable: true,
         allow_internal_unsafe: false,
+        local_inner_macros: false,
         edition: hygiene::default_edition(),
     });
     sp.with_ctxt(SyntaxContext::empty().apply_mark(mark))
index 141fd122ff57b6647cea156804d3da4b15322f8b..51fbe34028e8f66b8af6cdb48eadef73bf684bde 100644 (file)
@@ -311,6 +311,7 @@ fn generate_test_harness(sess: &ParseSess,
         format: MacroAttribute(Symbol::intern("test")),
         allow_internal_unstable: true,
         allow_internal_unsafe: false,
+        local_inner_macros: false,
         edition: hygiene::default_edition(),
     });
 
index 81dcc1998edd1c7a6b43b7fb0d7011fae9a77d0c..c7e4fbd1073d7c06cb8657c0cd12f4d0b07a2b54 100644 (file)
@@ -51,7 +51,7 @@ fn test_harness(file_text: &str, span_labels: Vec<SpanLabel>, expected_output: &
         let output = Arc::new(Mutex::new(Vec::new()));
 
         let code_map = Lrc::new(CodeMap::new(FilePathMapping::empty()));
-        code_map.new_filemap_and_lines(Path::new("test.rs"), &file_text);
+        code_map.new_filemap(Path::new("test.rs").to_owned().into(), file_text.to_owned());
 
         let primary_span = make_span(&file_text, &span_labels[0].start, &span_labels[0].end);
         let mut msp = MultiSpan::from_span(primary_span);
index af3ef181c59dc9a7808e97cf198b6f769b258d84..311251832664e960e25285aee14d6c6bb80c3233 100644 (file)
@@ -76,6 +76,7 @@ macro_rules! register {
                         def_info: None,
                         allow_internal_unstable: false,
                         allow_internal_unsafe: false,
+                        local_inner_macros: false,
                         unstable_feature: None,
                         edition: hygiene::default_edition(),
                     });
@@ -132,6 +133,7 @@ macro_rules! register {
                 def_info: None,
                 allow_internal_unstable: true,
                 allow_internal_unsafe: false,
+                local_inner_macros: false,
                 unstable_feature: None,
                 edition: hygiene::default_edition(),
             });
index ee343e47bd8905c5d44c4ce6bee4bdc892df6879..ef29e5a6b022b624498854b93e13b4d81be48a22 100644 (file)
@@ -368,6 +368,7 @@ fn mk_registrar(cx: &mut ExtCtxt,
         format: MacroAttribute(Symbol::intern("proc_macro")),
         allow_internal_unstable: true,
         allow_internal_unsafe: false,
+        local_inner_macros: false,
         edition: hygiene::default_edition(),
     });
     let span = DUMMY_SP.apply_mark(mark);
index a9147b394f7a498406baec5c22449ed2943c1c16..08ee2e0f3762647939d95879baf4251efd979d18 100644 (file)
@@ -14,3 +14,4 @@ rustc_data_structures = { path = "../librustc_data_structures" }
 arena = { path = "../libarena" }
 scoped-tls = { version = "0.1.1", features = ["nightly"] }
 unicode-width = "0.1.4"
+cfg-if = "0.1.2"
diff --git a/src/libsyntax_pos/analyze_filemap.rs b/src/libsyntax_pos/analyze_filemap.rs
new file mode 100644 (file)
index 0000000..c7c0263
--- /dev/null
@@ -0,0 +1,436 @@
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use unicode_width::UnicodeWidthChar;
+use super::*;
+
+/// Find all newlines, multi-byte characters, and non-narrow characters in a
+/// FileMap.
+///
+/// This function will use an SSE2 enhanced implementation if hardware support
+/// is detected at runtime.
+pub fn analyze_filemap(
+    src: &str,
+    filemap_start_pos: BytePos)
+    -> (Vec<BytePos>, Vec<MultiByteChar>, Vec<NonNarrowChar>)
+{
+    let mut lines = vec![filemap_start_pos];
+    let mut multi_byte_chars = vec![];
+    let mut non_narrow_chars = vec![];
+
+    // Calls the right implementation, depending on hardware support available.
+    analyze_filemap_dispatch(src,
+                             filemap_start_pos,
+                             &mut lines,
+                             &mut multi_byte_chars,
+                             &mut non_narrow_chars);
+
+    // The code above optimistically registers a new line *after* each \n
+    // it encounters. If that point is already outside the filemap, remove
+    // it again.
+    if let Some(&last_line_start) = lines.last() {
+        let file_map_end = filemap_start_pos + BytePos::from_usize(src.len());
+        assert!(file_map_end >= last_line_start);
+        if last_line_start == file_map_end {
+            lines.pop();
+        }
+    }
+
+    (lines, multi_byte_chars, non_narrow_chars)
+}
+
+cfg_if! {
+    if #[cfg(all(any(target_arch = "x86", target_arch = "x86_64"),
+                 not(stage0)))] {
+        fn analyze_filemap_dispatch(src: &str,
+                                    filemap_start_pos: BytePos,
+                                    lines: &mut Vec<BytePos>,
+                                    multi_byte_chars: &mut Vec<MultiByteChar>,
+                                    non_narrow_chars: &mut Vec<NonNarrowChar>) {
+            if is_x86_feature_detected!("sse2") {
+                unsafe {
+                    analyze_filemap_sse2(src,
+                                         filemap_start_pos,
+                                         lines,
+                                         multi_byte_chars,
+                                         non_narrow_chars);
+                }
+            } else {
+                analyze_filemap_generic(src,
+                                        src.len(),
+                                        filemap_start_pos,
+                                        lines,
+                                        multi_byte_chars,
+                                        non_narrow_chars);
+
+            }
+        }
+
+        /// Check 16 byte chunks of text at a time. If the chunk contains
+        /// something other than printable ASCII characters and newlines, the
+        /// function falls back to the generic implementation. Otherwise it uses
+        /// SSE2 intrinsics to quickly find all newlines.
+        #[target_feature(enable = "sse2")]
+        unsafe fn analyze_filemap_sse2(src: &str,
+                                       output_offset: BytePos,
+                                       lines: &mut Vec<BytePos>,
+                                       multi_byte_chars: &mut Vec<MultiByteChar>,
+                                       non_narrow_chars: &mut Vec<NonNarrowChar>) {
+            #[cfg(target_arch = "x86")]
+            use std::arch::x86::*;
+            #[cfg(target_arch = "x86_64")]
+            use std::arch::x86_64::*;
+
+            const CHUNK_SIZE: usize = 16;
+
+            let src_bytes = src.as_bytes();
+
+            let chunk_count = src.len() / CHUNK_SIZE;
+
+            // This variable keeps track of where we should start decoding a
+            // chunk. If a multi-byte character spans across chunk boundaries,
+            // we need to skip that part in the next chunk because we already
+            // handled it.
+            let mut intra_chunk_offset = 0;
+
+            for chunk_index in 0 .. chunk_count {
+                let ptr = src_bytes.as_ptr() as *const __m128i;
+                // We don't know if the pointer is aligned to 16 bytes, so we
+                // use `loadu`, which supports unaligned loading.
+                let chunk = _mm_loadu_si128(ptr.offset(chunk_index as isize));
+
+                // For character in the chunk, see if its byte value is < 0, which
+                // indicates that it's part of a UTF-8 char.
+                let multibyte_test = _mm_cmplt_epi8(chunk, _mm_set1_epi8(0));
+                // Create a bit mask from the comparison results.
+                let multibyte_mask = _mm_movemask_epi8(multibyte_test);
+
+                // If the bit mask is all zero, we only have ASCII chars here:
+                if multibyte_mask == 0 {
+                    assert!(intra_chunk_offset == 0);
+
+                    // Check if there are any control characters in the chunk. All
+                    // control characters that we can encounter at this point have a
+                    // byte value less than 32 or ...
+                    let control_char_test0 = _mm_cmplt_epi8(chunk, _mm_set1_epi8(32));
+                    let control_char_mask0 = _mm_movemask_epi8(control_char_test0);
+
+                    // ... it's the ASCII 'DEL' character with a value of 127.
+                    let control_char_test1 = _mm_cmpeq_epi8(chunk, _mm_set1_epi8(127));
+                    let control_char_mask1 = _mm_movemask_epi8(control_char_test1);
+
+                    let control_char_mask = control_char_mask0 | control_char_mask1;
+
+                    if control_char_mask != 0 {
+                        // Check for newlines in the chunk
+                        let newlines_test = _mm_cmpeq_epi8(chunk, _mm_set1_epi8(b'\n' as i8));
+                        let newlines_mask = _mm_movemask_epi8(newlines_test);
+
+                        if control_char_mask == newlines_mask {
+                            // All control characters are newlines, record them
+                            let mut newlines_mask = 0xFFFF0000 | newlines_mask as u32;
+                            let output_offset = output_offset +
+                                BytePos::from_usize(chunk_index * CHUNK_SIZE + 1);
+
+                            loop {
+                                let index = newlines_mask.trailing_zeros();
+
+                                if index >= CHUNK_SIZE as u32 {
+                                    // We have arrived at the end of the chunk.
+                                    break
+                                }
+
+                                lines.push(BytePos(index) + output_offset);
+
+                                // Clear the bit, so we can find the next one.
+                                newlines_mask &= (!1) << index;
+                            }
+
+                            // We are done for this chunk. All control characters were
+                            // newlines and we took care of those.
+                            continue
+                        } else {
+                            // Some of the control characters are not newlines,
+                            // fall through to the slow path below.
+                        }
+                    } else {
+                        // No control characters, nothing to record for this chunk
+                        continue
+                    }
+                }
+
+                // The slow path.
+                // There are control chars in here, fallback to generic decoding.
+                let scan_start = chunk_index * CHUNK_SIZE + intra_chunk_offset;
+                intra_chunk_offset = analyze_filemap_generic(
+                    &src[scan_start .. ],
+                    CHUNK_SIZE - intra_chunk_offset,
+                    BytePos::from_usize(scan_start) + output_offset,
+                    lines,
+                    multi_byte_chars,
+                    non_narrow_chars
+                );
+            }
+
+            // There might still be a tail left to analyze
+            let tail_start = chunk_count * CHUNK_SIZE + intra_chunk_offset;
+            if tail_start < src.len() {
+                analyze_filemap_generic(&src[tail_start as usize ..],
+                                        src.len() - tail_start,
+                                        output_offset + BytePos::from_usize(tail_start),
+                                        lines,
+                                        multi_byte_chars,
+                                        non_narrow_chars);
+            }
+        }
+    } else {
+
+        // The target (or compiler version) does not support SSE2 ...
+        fn analyze_filemap_dispatch(src: &str,
+                                    filemap_start_pos: BytePos,
+                                    lines: &mut Vec<BytePos>,
+                                    multi_byte_chars: &mut Vec<MultiByteChar>,
+                                    non_narrow_chars: &mut Vec<NonNarrowChar>) {
+            analyze_filemap_generic(src,
+                                    src.len(),
+                                    filemap_start_pos,
+                                    lines,
+                                    multi_byte_chars,
+                                    non_narrow_chars);
+        }
+    }
+}
+
+// `scan_len` determines the number of bytes in `src` to scan. Note that the
+// function can read past `scan_len` if a multi-byte character start within the
+// range but extends past it. The overflow is returned by the function.
+fn analyze_filemap_generic(src: &str,
+                           scan_len: usize,
+                           output_offset: BytePos,
+                           lines: &mut Vec<BytePos>,
+                           multi_byte_chars: &mut Vec<MultiByteChar>,
+                           non_narrow_chars: &mut Vec<NonNarrowChar>)
+                           -> usize
+{
+    assert!(src.len() >= scan_len);
+    let mut i = 0;
+    let src_bytes = src.as_bytes();
+
+    while i < scan_len {
+        let byte = unsafe {
+            // We verified that i < scan_len <= src.len()
+            *src_bytes.get_unchecked(i as usize)
+        };
+
+        // How much to advance in order to get to the next UTF-8 char in the
+        // string.
+        let mut char_len = 1;
+
+        if byte < 32 {
+            // This is an ASCII control character, it could be one of the cases
+            // that are interesting to us.
+
+            let pos = BytePos::from_usize(i) + output_offset;
+
+            match byte {
+                b'\n' => {
+                    lines.push(pos + BytePos(1));
+                }
+                b'\t' => {
+                    non_narrow_chars.push(NonNarrowChar::Tab(pos));
+                }
+                _ => {
+                    non_narrow_chars.push(NonNarrowChar::ZeroWidth(pos));
+                }
+            }
+        } else if byte >= 127 {
+            // The slow path:
+            // This is either ASCII control character "DEL" or the beginning of
+            // a multibyte char. Just decode to `char`.
+            let c = (&src[i..]).chars().next().unwrap();
+            char_len = c.len_utf8();
+
+            let pos = BytePos::from_usize(i) + output_offset;
+
+            if char_len > 1 {
+                assert!(char_len >=2 && char_len <= 4);
+                let mbc = MultiByteChar {
+                    pos,
+                    bytes: char_len as u8,
+                };
+                multi_byte_chars.push(mbc);
+            }
+
+            // Assume control characters are zero width.
+            // FIXME: How can we decide between `width` and `width_cjk`?
+            let char_width = UnicodeWidthChar::width(c).unwrap_or(0);
+
+            if char_width != 1 {
+                non_narrow_chars.push(NonNarrowChar::new(pos, char_width));
+            }
+        }
+
+        i += char_len;
+    }
+
+    i - scan_len
+}
+
+
+
+macro_rules! test {
+    (case: $test_name:ident,
+     text: $text:expr,
+     filemap_start_pos: $filemap_start_pos:expr,
+     lines: $lines:expr,
+     multi_byte_chars: $multi_byte_chars:expr,
+     non_narrow_chars: $non_narrow_chars:expr,) => (
+
+    #[test]
+    fn $test_name() {
+
+        let (lines, multi_byte_chars, non_narrow_chars) =
+            analyze_filemap($text, BytePos($filemap_start_pos));
+
+        let expected_lines: Vec<BytePos> = $lines
+            .into_iter()
+            .map(|pos| BytePos(pos))
+            .collect();
+
+        assert_eq!(lines, expected_lines);
+
+        let expected_mbcs: Vec<MultiByteChar> = $multi_byte_chars
+            .into_iter()
+            .map(|(pos, bytes)| MultiByteChar {
+                pos: BytePos(pos),
+                bytes,
+            })
+            .collect();
+
+        assert_eq!(multi_byte_chars, expected_mbcs);
+
+        let expected_nncs: Vec<NonNarrowChar> = $non_narrow_chars
+            .into_iter()
+            .map(|(pos, width)| {
+                NonNarrowChar::new(BytePos(pos), width)
+            })
+            .collect();
+
+        assert_eq!(non_narrow_chars, expected_nncs);
+    })
+}
+
+test!(
+    case: empty_text,
+    text: "",
+    filemap_start_pos: 0,
+    lines: vec![],
+    multi_byte_chars: vec![],
+    non_narrow_chars: vec![],
+);
+
+test!(
+    case: newlines_short,
+    text: "a\nc",
+    filemap_start_pos: 0,
+    lines: vec![0, 2],
+    multi_byte_chars: vec![],
+    non_narrow_chars: vec![],
+);
+
+test!(
+    case: newlines_long,
+    text: "012345678\nabcdef012345678\na",
+    filemap_start_pos: 0,
+    lines: vec![0, 10, 26],
+    multi_byte_chars: vec![],
+    non_narrow_chars: vec![],
+);
+
+test!(
+    case: newline_and_multi_byte_char_in_same_chunk,
+    text: "01234β789\nbcdef0123456789abcdef",
+    filemap_start_pos: 0,
+    lines: vec![0, 11],
+    multi_byte_chars: vec![(5, 2)],
+    non_narrow_chars: vec![],
+);
+
+test!(
+    case: newline_and_control_char_in_same_chunk,
+    text: "01234\u{07}6789\nbcdef0123456789abcdef",
+    filemap_start_pos: 0,
+    lines: vec![0, 11],
+    multi_byte_chars: vec![],
+    non_narrow_chars: vec![(5, 0)],
+);
+
+test!(
+    case: multi_byte_char_short,
+    text: "aβc",
+    filemap_start_pos: 0,
+    lines: vec![0],
+    multi_byte_chars: vec![(1, 2)],
+    non_narrow_chars: vec![],
+);
+
+test!(
+    case: multi_byte_char_long,
+    text: "0123456789abcΔf012345β",
+    filemap_start_pos: 0,
+    lines: vec![0],
+    multi_byte_chars: vec![(13, 2), (22, 2)],
+    non_narrow_chars: vec![],
+);
+
+test!(
+    case: multi_byte_char_across_chunk_boundary,
+    text: "0123456789abcdeΔ123456789abcdef01234",
+    filemap_start_pos: 0,
+    lines: vec![0],
+    multi_byte_chars: vec![(15, 2)],
+    non_narrow_chars: vec![],
+);
+
+test!(
+    case: multi_byte_char_across_chunk_boundary_tail,
+    text: "0123456789abcdeΔ....",
+    filemap_start_pos: 0,
+    lines: vec![0],
+    multi_byte_chars: vec![(15, 2)],
+    non_narrow_chars: vec![],
+);
+
+test!(
+    case: non_narrow_short,
+    text: "0\t2",
+    filemap_start_pos: 0,
+    lines: vec![0],
+    multi_byte_chars: vec![],
+    non_narrow_chars: vec![(1, 4)],
+);
+
+test!(
+    case: non_narrow_long,
+    text: "01\t3456789abcdef01234567\u{07}9",
+    filemap_start_pos: 0,
+    lines: vec![0],
+    multi_byte_chars: vec![],
+    non_narrow_chars: vec![(2, 4), (24, 0)],
+);
+
+test!(
+    case: output_offset_all,
+    text: "01\t345\n789abcΔf01234567\u{07}9\nbcΔf",
+    filemap_start_pos: 1000,
+    lines: vec![0 + 1000, 7 + 1000, 27 + 1000],
+    multi_byte_chars: vec![(13 + 1000, 2), (29 + 1000, 2)],
+    non_narrow_chars: vec![(2 + 1000, 4), (24 + 1000, 0)],
+);
index 3365d5954033fd6ea73e95461eff85ab9f93d793..99d8b1b172d24505aaa6cc54c29a0d0c6774b306 100644 (file)
@@ -18,7 +18,7 @@
 use GLOBALS;
 use Span;
 use edition::Edition;
-use symbol::{Ident, Symbol};
+use symbol::Symbol;
 
 use serialize::{Encodable, Decodable, Encoder, Decoder};
 use std::collections::HashMap;
@@ -190,7 +190,6 @@ pub struct HygieneData {
     marks: Vec<MarkData>,
     syntax_contexts: Vec<SyntaxContextData>,
     markings: HashMap<(SyntaxContext, Mark), SyntaxContext>,
-    gensym_to_ctxt: HashMap<Symbol, Span>,
     default_edition: Edition,
 }
 
@@ -211,7 +210,6 @@ pub fn new() -> Self {
                 modern: SyntaxContext(0),
             }],
             markings: HashMap::new(),
-            gensym_to_ctxt: HashMap::new(),
             default_edition: Edition::Edition2015,
         }
     }
@@ -496,6 +494,9 @@ pub struct ExpnInfo {
     /// Whether the macro is allowed to use `unsafe` internally
     /// even if the user crate has `#![forbid(unsafe_code)]`.
     pub allow_internal_unsafe: bool,
+    /// Enables the macro helper hack (`ident!(...)` -> `$crate::ident!(...)`)
+    /// for a given macro.
+    pub local_inner_macros: bool,
     /// Edition of the crate in which the macro is defined.
     pub edition: Edition,
 }
@@ -556,22 +557,3 @@ fn decode<D: Decoder>(_: &mut D) -> Result<SyntaxContext, D::Error> {
         Ok(SyntaxContext::empty()) // FIXME(jseyfried) intercrate hygiene
     }
 }
-
-impl Symbol {
-    pub fn from_ident(ident: Ident) -> Symbol {
-        HygieneData::with(|data| {
-            let gensym = ident.name.gensymed();
-            data.gensym_to_ctxt.insert(gensym, ident.span);
-            gensym
-        })
-    }
-
-    pub fn to_ident(self) -> Ident {
-        HygieneData::with(|data| {
-            match data.gensym_to_ctxt.get(&self) {
-                Some(&span) => Ident::new(self.interned(), span),
-                None => Ident::with_empty_ctxt(self),
-            }
-        })
-    }
-}
index 756e0c059a729a57ddf00b52c78a43cbfc31901d..55dec31511c37f506825d111b00dffe50e051c62 100644 (file)
@@ -24,6 +24,7 @@
 #![feature(optin_builtin_traits)]
 #![allow(unused_attributes)]
 #![feature(specialization)]
+#![feature(stdsimd)]
 
 use std::borrow::Cow;
 use std::cell::Cell;
@@ -47,6 +48,9 @@
 extern crate serialize;
 extern crate serialize as rustc_serialize; // used by deriving
 
+#[macro_use]
+extern crate cfg_if;
+
 extern crate unicode_width;
 
 pub mod edition;
@@ -58,6 +62,8 @@
 
 pub mod symbol;
 
+mod analyze_filemap;
+
 pub struct Globals {
     symbol_interner: Lock<symbol::Interner>,
     span_interner: Lock<span_encoding::SpanInterner>,
@@ -652,16 +658,16 @@ fn from(spans: Vec<Span>) -> MultiSpan {
 pub const NO_EXPANSION: SyntaxContext = SyntaxContext::empty();
 
 /// Identifies an offset of a multi-byte character in a FileMap
-#[derive(Copy, Clone, RustcEncodable, RustcDecodable, Eq, PartialEq)]
+#[derive(Copy, Clone, RustcEncodable, RustcDecodable, Eq, PartialEq, Debug)]
 pub struct MultiByteChar {
     /// The absolute offset of the character in the CodeMap
     pub pos: BytePos,
     /// The number of bytes, >=2
-    pub bytes: usize,
+    pub bytes: u8,
 }
 
 /// Identifies an offset of a non-narrow character in a FileMap
-#[derive(Copy, Clone, RustcEncodable, RustcDecodable, Eq, PartialEq)]
+#[derive(Copy, Clone, RustcEncodable, RustcDecodable, Eq, PartialEq, Debug)]
 pub enum NonNarrowChar {
     /// Represents a zero-width character
     ZeroWidth(BytePos),
@@ -779,11 +785,11 @@ pub struct FileMap {
     /// The end position of this source in the CodeMap
     pub end_pos: BytePos,
     /// Locations of lines beginnings in the source code
-    pub lines: Lock<Vec<BytePos>>,
+    pub lines: Vec<BytePos>,
     /// Locations of multi-byte characters in the source code
-    pub multibyte_chars: Lock<Vec<MultiByteChar>>,
+    pub multibyte_chars: Vec<MultiByteChar>,
     /// Width of characters that are not narrow in the source code
-    pub non_narrow_chars: Lock<Vec<NonNarrowChar>>,
+    pub non_narrow_chars: Vec<NonNarrowChar>,
     /// A hash of the filename, used for speeding up the incr. comp. hashing.
     pub name_hash: u128,
 }
@@ -797,7 +803,7 @@ fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
             s.emit_struct_field("start_pos", 4, |s| self.start_pos.encode(s))?;
             s.emit_struct_field("end_pos", 5, |s| self.end_pos.encode(s))?;
             s.emit_struct_field("lines", 6, |s| {
-                let lines = self.lines.borrow();
+                let lines = &self.lines[..];
                 // store the length
                 s.emit_u32(lines.len() as u32)?;
 
@@ -843,10 +849,10 @@ fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
                 Ok(())
             })?;
             s.emit_struct_field("multibyte_chars", 7, |s| {
-                (*self.multibyte_chars.borrow()).encode(s)
+                self.multibyte_chars.encode(s)
             })?;
             s.emit_struct_field("non_narrow_chars", 8, |s| {
-                (*self.non_narrow_chars.borrow()).encode(s)
+                self.non_narrow_chars.encode(s)
             })?;
             s.emit_struct_field("name_hash", 9, |s| {
                 self.name_hash.encode(s)
@@ -914,9 +920,9 @@ fn decode<D: Decoder>(d: &mut D) -> Result<FileMap, D::Error> {
                 src: None,
                 src_hash,
                 external_src: Lock::new(ExternalSource::AbsentOk),
-                lines: Lock::new(lines),
-                multibyte_chars: Lock::new(multibyte_chars),
-                non_narrow_chars: Lock::new(non_narrow_chars),
+                lines,
+                multibyte_chars,
+                non_narrow_chars,
                 name_hash,
             })
         })
@@ -949,6 +955,9 @@ pub fn new(name: FileName,
         };
         let end_pos = start_pos.to_usize() + src.len();
 
+        let (lines, multibyte_chars, non_narrow_chars) =
+            analyze_filemap::analyze_filemap(&src[..], start_pos);
+
         FileMap {
             name,
             name_was_remapped,
@@ -959,37 +968,17 @@ pub fn new(name: FileName,
             external_src: Lock::new(ExternalSource::Unneeded),
             start_pos,
             end_pos: Pos::from_usize(end_pos),
-            lines: Lock::new(Vec::new()),
-            multibyte_chars: Lock::new(Vec::new()),
-            non_narrow_chars: Lock::new(Vec::new()),
+            lines,
+            multibyte_chars,
+            non_narrow_chars,
             name_hash,
         }
     }
 
-    /// EFFECT: register a start-of-line offset in the
-    /// table of line-beginnings.
-    /// UNCHECKED INVARIANT: these offsets must be added in the right
-    /// order and must be in the right places; there is shared knowledge
-    /// about what ends a line between this file and parse.rs
-    /// WARNING: pos param here is the offset relative to start of CodeMap,
-    /// and CodeMap will append a newline when adding a filemap without a newline at the end,
-    /// so the safe way to call this is with value calculated as
-    /// filemap.start_pos + newline_offset_relative_to_the_start_of_filemap.
-    pub fn next_line(&self, pos: BytePos) {
-        // the new charpos must be > the last one (or it's the first one).
-        let mut lines = self.lines.borrow_mut();
-        let line_len = lines.len();
-        assert!(line_len == 0 || ((*lines)[line_len - 1] < pos));
-        lines.push(pos);
-    }
-
     /// Return the BytePos of the beginning of the current line.
-    pub fn line_begin_pos(&self) -> BytePos {
-        let lines = self.lines.borrow();
-        match lines.last() {
-            Some(&line_pos) => line_pos,
-            None => self.start_pos,
-        }
+    pub fn line_begin_pos(&self, pos: BytePos) -> BytePos {
+        let line_index = self.lookup_line(pos).unwrap();
+        self.lines[line_index]
     }
 
     /// Add externally loaded source.
@@ -1040,8 +1029,7 @@ fn get_until_newline(src: &str, begin: usize) -> &str {
         }
 
         let begin = {
-            let lines = self.lines.borrow();
-            let line = if let Some(line) = lines.get(line_number) {
+            let line = if let Some(line) = self.lines.get(line_number) {
                 line
             } else {
                 return None;
@@ -1059,35 +1047,6 @@ fn get_until_newline(src: &str, begin: usize) -> &str {
         }
     }
 
-    pub fn record_multibyte_char(&self, pos: BytePos, bytes: usize) {
-        assert!(bytes >=2 && bytes <= 4);
-        let mbc = MultiByteChar {
-            pos,
-            bytes,
-        };
-        self.multibyte_chars.borrow_mut().push(mbc);
-    }
-
-    #[inline]
-    pub fn record_width(&self, pos: BytePos, ch: char) {
-        let width = match ch {
-            '\t' =>
-                // Tabs will consume 4 columns.
-                4,
-            '\n' =>
-                // Make newlines take one column so that displayed spans can point them.
-                1,
-            ch =>
-                // Assume control characters are zero width.
-                // FIXME: How can we decide between `width` and `width_cjk`?
-                unicode_width::UnicodeWidthChar::width(ch).unwrap_or(0),
-        };
-        // Only record non-narrow characters.
-        if width != 1 {
-            self.non_narrow_chars.borrow_mut().push(NonNarrowChar::new(pos, width));
-        }
-    }
-
     pub fn is_real_file(&self) -> bool {
         self.name.is_real()
     }
@@ -1100,7 +1059,7 @@ pub fn byte_length(&self) -> u32 {
         self.end_pos.0 - self.start_pos.0
     }
     pub fn count_lines(&self) -> usize {
-        self.lines.borrow().len()
+        self.lines.len()
     }
 
     /// Find the line containing the given position. The return value is the
@@ -1108,13 +1067,12 @@ pub fn count_lines(&self) -> usize {
     /// number. If the filemap is empty or the position is located before the
     /// first line, None is returned.
     pub fn lookup_line(&self, pos: BytePos) -> Option<usize> {
-        let lines = self.lines.borrow();
-        if lines.len() == 0 {
+        if self.lines.len() == 0 {
             return None;
         }
 
-        let line_index = lookup_line(&lines[..], pos);
-        assert!(line_index < lines.len() as isize);
+        let line_index = lookup_line(&self.lines[..], pos);
+        assert!(line_index < self.lines.len() as isize);
         if line_index >= 0 {
             Some(line_index as usize)
         } else {
@@ -1127,12 +1085,11 @@ pub fn line_bounds(&self, line_index: usize) -> (BytePos, BytePos) {
             return (self.start_pos, self.end_pos);
         }
 
-        let lines = self.lines.borrow();
-        assert!(line_index < lines.len());
-        if line_index == (lines.len() - 1) {
-            (lines[line_index], self.end_pos)
+        assert!(line_index < self.lines.len());
+        if line_index == (self.lines.len() - 1) {
+            (self.lines[line_index], self.end_pos)
         } else {
-            (lines[line_index], lines[line_index + 1])
+            (self.lines[line_index], self.lines[line_index + 1])
         }
     }
 
@@ -1156,6 +1113,8 @@ fn remove_bom(src: &mut String) {
 pub trait Pos {
     fn from_usize(n: usize) -> Self;
     fn to_usize(&self) -> usize;
+    fn from_u32(n: u32) -> Self;
+    fn to_u32(&self) -> u32;
 }
 
 /// A byte offset. Keep this small (currently 32-bits), as AST contains
@@ -1177,7 +1136,13 @@ impl Pos for BytePos {
     fn from_usize(n: usize) -> BytePos { BytePos(n as u32) }
 
     #[inline(always)]
-    fn to_usize(&self) -> usize { let BytePos(n) = *self; n as usize }
+    fn to_usize(&self) -> usize { self.0 as usize }
+
+    #[inline(always)]
+    fn from_u32(n: u32) -> BytePos { BytePos(n) }
+
+    #[inline(always)]
+    fn to_u32(&self) -> u32 { self.0 }
 }
 
 impl Add for BytePos {
@@ -1215,7 +1180,13 @@ impl Pos for CharPos {
     fn from_usize(n: usize) -> CharPos { CharPos(n) }
 
     #[inline(always)]
-    fn to_usize(&self) -> usize { let CharPos(n) = *self; n }
+    fn to_usize(&self) -> usize { self.0 }
+
+    #[inline(always)]
+    fn from_u32(n: u32) -> CharPos { CharPos(n as usize) }
+
+    #[inline(always)]
+    fn to_u32(&self) -> u32 { self.0 as u32}
 }
 
 impl Add for CharPos {
index abe738d751c133871d560a6c1836e7aae6198f8d..bb64dad12085dfaa0e18e8674efef1a10c7c060d 100644 (file)
@@ -59,6 +59,11 @@ pub fn without_first_quote(self) -> Ident {
         Ident::new(Symbol::intern(self.as_str().trim_left_matches('\'')), self.span)
     }
 
+    /// "Normalize" ident for use in comparisons using "item hygiene".
+    /// Identifiers with same string value become same if they came from the same "modern" macro
+    /// (e.g. `macro` item, but not `macro_rules` item) and stay different if they came from
+    /// different "modern" macros.
+    /// Technically, this operation strips all non-opaque marks from ident's syntactic context.
     pub fn modern(self) -> Ident {
         Ident::new(self.name, self.span.modern())
     }
@@ -70,6 +75,10 @@ pub fn gensym(self) -> Ident {
     pub fn as_str(self) -> LocalInternedString {
         self.name.as_str()
     }
+
+    pub fn as_interned_str(self) -> InternedString {
+        self.name.as_interned_str()
+    }
 }
 
 impl PartialEq for Ident {
index 4c88df332460b39a368f82c05f1da1496ef55d5b..75d5ac6495ae8dbd24e56cfd21e6c0bf23117110 100644 (file)
@@ -26,13 +26,13 @@ fn main() {
 #[link(name = "rust_test_helpers", kind = "static")]
 extern {
     #[no_output]
-    //~^ ERROR Macro and proc-macro invocations in `extern {}` blocks are experimental.
+    //~^ ERROR macro and proc-macro invocations in `extern {}` blocks are experimental.
     fn some_definitely_unknown_symbol_which_should_be_removed();
 
     #[nop_attr]
-    //~^ ERROR Macro and proc-macro invocations in `extern {}` blocks are experimental.
+    //~^ ERROR macro and proc-macro invocations in `extern {}` blocks are experimental.
     fn rust_get_test_int() -> isize;
 
     emit_input!(fn rust_dbg_extern_identity_u32(arg: u32) -> u32;);
-    //~^ ERROR Macro and proc-macro invocations in `extern {}` blocks are experimental.
+    //~^ ERROR macro and proc-macro invocations in `extern {}` blocks are experimental.
 }
index 92c6b1fd0b5822a0125c3d8e6bf7dad3f08821dc..39b1b342eac3149752718d55ad5d48e24e3669c9 100644 (file)
 #![deny(const_err)]
 
 pub const A: i8 = -std::i8::MIN; //~ ERROR const_err
-//~^ ERROR this constant cannot be used
-//~| ERROR this expression will panic at runtime
 pub const B: u8 = 200u8 + 200u8; //~ ERROR const_err
-//~^ ERROR this constant cannot be used
 pub const C: u8 = 200u8 * 4; //~ ERROR const_err
-//~^ ERROR this constant cannot be used
 pub const D: u8 = 42u8 - (42u8 + 1); //~ ERROR const_err
-//~^ ERROR this constant cannot be used
 pub const E: u8 = [5u8][1]; //~ ERROR const_err
-//~| ERROR this constant cannot be used
 
 fn main() {
     let _a = A;
index 4a5e78b381ed049bdace21dc842ded1f758f6ed9..f2ee8a7078c7c0d9f94dbeb3a2c874be32da4055 100644 (file)
 #![deny(const_err)]
 
 pub const A: i8 = -std::i8::MIN;
-//~^ ERROR E0080
-//~| ERROR attempt to negate with overflow
-//~| ERROR this expression will panic at runtime
-//~| ERROR this constant cannot be used
+//~^ ERROR this constant cannot be used
 pub const B: i8 = A;
 //~^ ERROR const_err
 //~| ERROR const_err
index f77603b3ebafcf83a884e12316dec5e278220fd5..8683f6a0231921c0e89d84a9dc36fe68edfc6e58 100644 (file)
@@ -23,9 +23,10 @@ fn black_box<T>(_: T) {
 // Make sure that the two uses get two errors.
 const FOO: u8 = [5u8][1];
 //~^ ERROR constant evaluation error
-//~| ERROR constant evaluation error
 //~| index out of bounds: the len is 1 but the index is 1
 
 fn main() {
     black_box((FOO, FOO));
+    //~^ ERROR referenced constant has errors
+    //~| ERROR could not evaluate constant
 }
index 7b5db7a4f6db07923a9b71b0ee23114a20a6f0d9..88fc51827753532d7107dc574df8d298f8b086fa 100644 (file)
      //~^ ERROR this constant cannot be used
     (
      i8::MIN - 1,
-     //~^ ERROR attempt to subtract with overflow
      );
 
 const VALS_I16: (i16,) =
      //~^ ERROR this constant cannot be used
     (
      i16::MIN - 1,
-     //~^ ERROR attempt to subtract with overflow
      );
 
 const VALS_I32: (i32,) =
      //~^ ERROR this constant cannot be used
     (
      i32::MIN - 1,
-     //~^ ERROR attempt to subtract with overflow
      );
 
 const VALS_I64: (i64,) =
      //~^ ERROR this constant cannot be used
     (
      i64::MIN - 1,
-     //~^ ERROR attempt to subtract with overflow
      );
 
 const VALS_U8: (u8,) =
      //~^ ERROR this constant cannot be used
     (
      u8::MIN - 1,
-     //~^ ERROR attempt to subtract with overflow
      );
 
 const VALS_U16: (u16,) = (
      //~^ ERROR this constant cannot be used
      u16::MIN - 1,
-     //~^ ERROR attempt to subtract with overflow
      );
 
 const VALS_U32: (u32,) = (
      //~^ ERROR this constant cannot be used
      u32::MIN - 1,
-     //~^ ERROR attempt to subtract with overflow
      );
 
 const VALS_U64: (u64,) =
      //~^ ERROR this constant cannot be used
     (
      u64::MIN - 1,
-     //~^ ERROR attempt to subtract with overflow
      );
 
 fn main() {
index ce4dc72555dc0172afcbe78078a113f23f90f17e..1878daea93198a40cf169e407b43f3c33fd2cc2a 100644 (file)
      //~^ ERROR this constant cannot be used
     (
      i8::MAX + 1,
-     //~^ ERROR attempt to add with overflow
      );
 
 const VALS_I16: (i16,) =
      //~^ ERROR this constant cannot be used
     (
      i16::MAX + 1,
-     //~^ ERROR attempt to add with overflow
      );
 
 const VALS_I32: (i32,) =
      //~^ ERROR this constant cannot be used
     (
      i32::MAX + 1,
-     //~^ ERROR attempt to add with overflow
      );
 
 const VALS_I64: (i64,) =
      //~^ ERROR this constant cannot be used
     (
      i64::MAX + 1,
-     //~^ ERROR attempt to add with overflow
      );
 
 const VALS_U8: (u8,) =
      //~^ ERROR this constant cannot be used
     (
      u8::MAX + 1,
-     //~^ ERROR attempt to add with overflow
      );
 
 const VALS_U16: (u16,) = (
      //~^ ERROR this constant cannot be used
      u16::MAX + 1,
-     //~^ ERROR attempt to add with overflow
      );
 
 const VALS_U32: (u32,) = (
      //~^ ERROR this constant cannot be used
      u32::MAX + 1,
-     //~^ ERROR attempt to add with overflow
      );
 
 const VALS_U64: (u64,) =
      //~^ ERROR this constant cannot be used
     (
      u64::MAX + 1,
-     //~^ ERROR attempt to add with overflow
      );
 
 fn main() {
index 88eb14a133019a33659cd2d699396ce7f8c376e7..f3d28295bf81ce19a0116590d5d796c3884aad62 100644 (file)
      //~^ ERROR this constant cannot be used
     (
      i8::MIN * 2,
-     //~^ ERROR attempt to multiply with overflow
      );
 
 const VALS_I16: (i16,) =
      //~^ ERROR this constant cannot be used
     (
      i16::MIN * 2,
-     //~^ ERROR attempt to multiply with overflow
      );
 
 const VALS_I32: (i32,) =
      //~^ ERROR this constant cannot be used
     (
      i32::MIN * 2,
-     //~^ ERROR attempt to multiply with overflow
      );
 
 const VALS_I64: (i64,) =
      //~^ ERROR this constant cannot be used
     (
      i64::MIN * 2,
-     //~^ ERROR attempt to multiply with overflow
      );
 
 const VALS_U8: (u8,) =
      //~^ ERROR this constant cannot be used
     (
      u8::MAX * 2,
-     //~^ ERROR attempt to multiply with overflow
      );
 
 const VALS_U16: (u16,) = (
      //~^ ERROR this constant cannot be used
      u16::MAX * 2,
-     //~^ ERROR attempt to multiply with overflow
      );
 
 const VALS_U32: (u32,) = (
      //~^ ERROR this constant cannot be used
      u32::MAX * 2,
-     //~^ ERROR attempt to multiply with overflow
      );
 
 const VALS_U64: (u64,) =
      //~^ ERROR this constant cannot be used
     (
      u64::MAX * 2,
-     //~^ ERROR attempt to multiply with overflow
      );
 
 fn main() {
index b304443f63145701d2294af7713e685cb6c25fc4..4ba44fa54d8cd79678864fc0a8d4cee4d8169d0f 100644 (file)
@@ -9,7 +9,7 @@
 // except according to those terms.
 
 // ignore-tidy-linelength
-// compile-flags: --edition=2015 -Zunstable-options
+// edition:2015
 
 // tests that editions work with the tyvar warning-turned-error
 
index d0cf81d59cf370e81b042a8b2bfaeccf0c9d4fbc..c8548ed35b972765fe9b8988213ec1a87915c26b 100644 (file)
@@ -9,7 +9,7 @@
 // except according to those terms.
 
 // ignore-tidy-linelength
-// compile-flags: --edition=2018 -Zunstable-options
+// edition:2018
 
 // tests that editions work with the tyvar warning-turned-error
 
index 7d7f95cbbf5cbc8635b2da7166994eca88c72b94..b6e273881ccd62636a50a64d95e1314ca4b5d0ed 100644 (file)
@@ -34,9 +34,9 @@ fn main() {
 #[link(name = "rust_test_helpers", kind = "static")]
 extern {
     returns_isize!(rust_get_test_int);
-    //~^ ERROR Macro invocations in `extern {}` blocks are experimental.
+    //~^ ERROR macro invocations in `extern {}` blocks are experimental.
     takes_u32_returns_u32!(rust_dbg_extern_identity_u32);
-    //~^ ERROR Macro invocations in `extern {}` blocks are experimental.
+    //~^ ERROR macro invocations in `extern {}` blocks are experimental.
     emits_nothing!();
-    //~^ ERROR Macro invocations in `extern {}` blocks are experimental.
+    //~^ ERROR macro invocations in `extern {}` blocks are experimental.
 }
index d750851b719e4e691a7691c24255a2480503c6df..5490acf4bd6c1424d065ebc0479361526ace3fd6 100644 (file)
@@ -19,5 +19,5 @@ fn assert<T: UnwindSafe + ?Sized>() {}
 fn main() {
     assert::<Rc<RefCell<i32>>>();
     //~^ ERROR the type `std::cell::UnsafeCell<i32>` may contain interior mutability and a
-    //~| ERROR the type `std::cell::UnsafeCell<usize>` may contain interior mutability and a
+    //~| ERROR the type `std::cell::UnsafeCell<isize>` may contain interior mutability and a
 }
index cd27b274258ea3d49619ef0622576c6cf22a003e..0fac395a115828fe1cb716482bde512e2042bd71 100644 (file)
@@ -19,5 +19,5 @@ fn assert<T: UnwindSafe + ?Sized>() {}
 fn main() {
     assert::<Arc<RefCell<i32>>>();
     //~^ ERROR the type `std::cell::UnsafeCell<i32>` may contain interior mutability and a
-    //~| ERROR the type `std::cell::UnsafeCell<usize>` may contain interior mutability and a
+    //~| ERROR the type `std::cell::UnsafeCell<isize>` may contain interior mutability and a
 }
index 956eca432c5f06f1602ca0e1a86723c62cce462c..bf0392018b5dda96659993bbfe740efc1faabe90 100644 (file)
@@ -18,5 +18,5 @@ fn assert<T: UnwindSafe + ?Sized>() {}
 fn main() {
     assert::<&RefCell<i32>>();
     //~^ ERROR the type `std::cell::UnsafeCell<i32>` may contain interior mutability and a
-    //~| ERROR the type `std::cell::UnsafeCell<usize>` may contain interior mutability and a
+    //~| ERROR the type `std::cell::UnsafeCell<isize>` may contain interior mutability and a
 }
index d0ca1db52120480d29447df6491d6f3190516b26..950f0a0b53aecaa5d4aa942f03a770053b65bd01 100644 (file)
@@ -18,5 +18,5 @@ fn assert<T: UnwindSafe + ?Sized>() {}
 fn main() {
     assert::<*mut RefCell<i32>>();
     //~^ ERROR the type `std::cell::UnsafeCell<i32>` may contain interior mutability and a
-    //~| ERROR the type `std::cell::UnsafeCell<usize>` may contain interior mutability and a
+    //~| ERROR the type `std::cell::UnsafeCell<isize>` may contain interior mutability and a
 }
index fcf4714ba9695ff3c4a40a67290072ee34d57a16..e04bb27f43500723d39886c97e8a74507a1df05b 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags: --edition=2018 -Zunstable-options
+// edition:2018
 
 #![feature(extern_absolute_paths)]
 
index c256c5592c2695a1d95975db26f189cee04d9ab7..bebf0236bb4d03e7b1f377a0719e8ec94a027927 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags: --edition=2018 -Zunstable-options
+// edition:2018
 
 #![feature(extern_absolute_paths)]
 
index 837dc617b3ad056cb539711b3659ddb0f6ffd3bf..5906a0719c842cfd1305b49a122db46c627579d1 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags: --edition=2018 -Zunstable-options
+// edition:2018
 
 #![feature(extern_absolute_paths)]
 
index 9b7baa0016344221d9de93fc676af7d0e3a727f2..7111176dbd9af8e68ae6369315487515899d72b8 100644 (file)
@@ -9,7 +9,7 @@
 // except according to those terms.
 
 // aux-build:xcrate.rs
-// compile-flags: --edition=2018 -Zunstable-options
+// edition:2018
 
 #![feature(crate_in_paths)]
 #![feature(extern_absolute_paths)]
diff --git a/src/test/run-make-fulldeps/issue-51671/Makefile b/src/test/run-make-fulldeps/issue-51671/Makefile
new file mode 100644 (file)
index 0000000..bdb5ca8
--- /dev/null
@@ -0,0 +1,13 @@
+-include ../tools.mk
+
+ifdef IS_WINDOWS
+# Do nothing on MSVC.
+all:
+       exit 0
+else
+all:
+       $(RUSTC) --emit=obj app.rs
+       nm $(TMPDIR)/app.o | $(CGREP) rust_begin_unwind
+       nm $(TMPDIR)/app.o | $(CGREP) rust_eh_personality
+       nm $(TMPDIR)/app.o | $(CGREP) rust_oom
+endif
diff --git a/src/test/run-make-fulldeps/issue-51671/app.rs b/src/test/run-make-fulldeps/issue-51671/app.rs
new file mode 100644 (file)
index 0000000..720ce15
--- /dev/null
@@ -0,0 +1,28 @@
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![crate_type = "bin"]
+#![feature(lang_items)]
+#![feature(panic_implementation)]
+#![no_main]
+#![no_std]
+
+use core::panic::PanicInfo;
+
+#[panic_implementation]
+fn panic(_: &PanicInfo) -> ! {
+    loop {}
+}
+
+#[lang = "eh_personality"]
+fn eh() {}
+
+#[lang = "oom"]
+fn oom() {}
index d7ede763838687f79a620e7f4982329364bce61c..ac39118c5f1e01e0b96295e2207f319f5ae67ddd 100644 (file)
@@ -54,6 +54,7 @@ pub fn plugin_registrar(reg: &mut Registry) {
             def_info: None,
             allow_internal_unstable: false,
             allow_internal_unsafe: false,
+            local_inner_macros: false,
             unstable_feature: None,
             edition: hygiene::default_edition(),
         });
index 914e3dd4932488b7f8c3dbb49e58b50e4a83c3aa..4f419e70074fc673a329cecb20e89b9f859b2612 100644 (file)
@@ -41,17 +41,16 @@ enum WireProtocol {
 fn encode_json<T: Encodable>(val: &T, wr: &mut Cursor<Vec<u8>>) {
     write!(wr, "{}", json::as_json(val));
 }
-fn encode_opaque<T: Encodable>(val: &T, wr: &mut Cursor<Vec<u8>>) {
+fn encode_opaque<T: Encodable>(val: &T, wr: Vec<u8>) {
     let mut encoder = opaque::Encoder::new(wr);
     val.encode(&mut encoder);
 }
 
 pub fn main() {
     let target = Foo{baz: false,};
-    let mut wr = Cursor::new(Vec::new());
     let proto = WireProtocol::JSON;
     match proto {
-        WireProtocol::JSON => encode_json(&target, &mut wr),
-        WireProtocol::Opaque => encode_opaque(&target, &mut wr)
+        WireProtocol::JSON => encode_json(&target, &mut Cursor::new(Vec::new())),
+        WireProtocol::Opaque => encode_opaque(&target, Vec::new())
     }
 }
index 817db4bb79ec3a4a586b49cea88bef37731fc259..8b649f6ef7bbdc548924c8dfcfc840831c6fdfeb 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags: --edition=2018
+// edition:2018
 
 #![feature(arbitrary_self_types, async_await, await_macro, futures_api, pin)]
 
index 9127c8e350a8cc2bb2e98d3e909fa1d89ce6cdd8..69952e9f90af6a4949bd9f8f9da2528356c35a02 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags: --edition=2015
+// edition:2015
 
 #![feature(raw_identifiers)]
 
index 4fef77d67ea71de1d1371cf271773540e8453a26..415988586a066f18f379df16afda7dbf54b363a5 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags: --edition=2018
+// edition:2018
 
 #![feature(raw_identifiers)]
 
index 41480bb978ec2f9d30bfde1700c2dae192960094..73869e63de7c4bfc879ff160230bade1da85bbe3 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags: --edition=2015
+// edition:2015
 // aux-build:edition-kw-macro-2015.rs
 
 #![feature(raw_identifiers)]
index 78835d510639f06481a65d409e515a09a399d13c..0a1c6505854c99cacdf044efed2eed888f0316c3 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags: --edition=2015
+// edition:2015
 // aux-build:edition-kw-macro-2018.rs
 
 #![feature(raw_identifiers)]
index 46d5f222cbb3179ec566611f1ef6399df79f9186..4c22667d0bf4302e9cf3de452cf3f592c404ea0f 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags: --edition=2018
+// edition:2018
 // aux-build:edition-kw-macro-2015.rs
 
 #![feature(raw_identifiers)]
index 06482988937b667e88feed720588ac0d36cfcb09..2a98b904da5dbedb716e7c4e2208bf5eb0343166 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags: --edition=2018
+// edition:2018
 // aux-build:edition-kw-macro-2018.rs
 
 #![feature(raw_identifiers)]
index bbe066481a8b15d313c807e0a691b334a2fcdbf7..dfa583415f7b64cab475b27d851fbc3b142bc72e 100644 (file)
@@ -9,7 +9,7 @@
 // except according to those terms.
 
 // aux-build:xcrate.rs
-// compile-flags: --edition=2018 -Zunstable-options
+// edition:2018
 
 #![feature(extern_absolute_paths)]
 
index ead462cf0d2cacc09754c6718616cbaffd30e185..6317dc17652cd0b58ababe49baa858ae8190e3c9 100644 (file)
@@ -12,7 +12,8 @@
 //
 // Regression test for #47075.
 
-// compile-flags: --test --edition=2018 -Zunstable-options
+// edition:2018
+// compile-flags: --test
 
 #![feature(extern_absolute_paths)]
 
index ce062ded7f73f03ddb7f9a619823424a23ac3d8f..aab4974e2e7a1c9ff6376c19f96ed33f754a4e76 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags: --edition=2018
+// edition:2018
 
 #![feature(arbitrary_self_types, async_await, await_macro, futures_api, pin)]
 
index 9127c8e350a8cc2bb2e98d3e909fa1d89ce6cdd8..69952e9f90af6a4949bd9f8f9da2528356c35a02 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags: --edition=2015
+// edition:2015
 
 #![feature(raw_identifiers)]
 
index 4fef77d67ea71de1d1371cf271773540e8453a26..415988586a066f18f379df16afda7dbf54b363a5 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags: --edition=2018
+// edition:2018
 
 #![feature(raw_identifiers)]
 
index 3580950854dc7f087b90990b6719d34d07a28cb4..8bc302a2befa4fca221547969703380a6e194c38 100644 (file)
@@ -1,8 +1,10 @@
-warning: attempt to subtract with overflow
-  --> $DIR/conditional_array_execution.rs:15:19
+warning: this constant cannot be used
+  --> $DIR/conditional_array_execution.rs:15:1
    |
 LL | const FOO: u32 = [X - Y, Y - X][(X < Y) as usize];
-   |                   ^^^^^
+   | ^^^^^^^^^^^^^^^^^^-----^^^^^^^^^^^^^^^^^^^^^^^^^^^
+   |                   |
+   |                   attempt to subtract with overflow
    |
 note: lint level defined here
   --> $DIR/conditional_array_execution.rs:11:9
@@ -10,16 +12,8 @@ note: lint level defined here
 LL | #![warn(const_err)]
    |         ^^^^^^^^^
 
-warning: this constant cannot be used
-  --> $DIR/conditional_array_execution.rs:15:1
-   |
-LL | const FOO: u32 = [X - Y, Y - X][(X < Y) as usize];
-   | ^^^^^^^^^^^^^^^^^^-----^^^^^^^^^^^^^^^^^^^^^^^^^^^
-   |                   |
-   |                   attempt to subtract with overflow
-
-warning: referenced constant
-  --> $DIR/conditional_array_execution.rs:20:20
+warning: referenced constant has errors
+  --> $DIR/conditional_array_execution.rs:19:20
    |
 LL | const FOO: u32 = [X - Y, Y - X][(X < Y) as usize];
    |                   ----- attempt to subtract with overflow
@@ -28,13 +22,13 @@ LL |     println!("{}", FOO);
    |                    ^^^
 
 warning: this expression will panic at runtime
-  --> $DIR/conditional_array_execution.rs:20:20
+  --> $DIR/conditional_array_execution.rs:19:20
    |
 LL |     println!("{}", FOO);
    |                    ^^^ referenced constant has errors
 
-error[E0080]: referenced constant
-  --> $DIR/conditional_array_execution.rs:20:5
+error[E0080]: referenced constant has errors
+  --> $DIR/conditional_array_execution.rs:19:5
    |
 LL | const FOO: u32 = [X - Y, Y - X][(X < Y) as usize];
    |                   ----- attempt to subtract with overflow
@@ -45,7 +39,7 @@ LL |     println!("{}", FOO);
    = note: this error originates in a macro outside of the current crate (in Nightly builds, run with -Z external-macro-backtrace for more info)
 
 error[E0080]: erroneous constant used
-  --> $DIR/conditional_array_execution.rs:20:5
+  --> $DIR/conditional_array_execution.rs:19:5
    |
 LL |     println!("{}", FOO);
    |     ^^^^^^^^^^^^^^^---^^
@@ -54,8 +48,8 @@ LL |     println!("{}", FOO);
    |
    = note: this error originates in a macro outside of the current crate (in Nightly builds, run with -Z external-macro-backtrace for more info)
 
-error[E0080]: referenced constant
-  --> $DIR/conditional_array_execution.rs:20:20
+error[E0080]: referenced constant has errors
+  --> $DIR/conditional_array_execution.rs:19:20
    |
 LL | const FOO: u32 = [X - Y, Y - X][(X < Y) as usize];
    |                   ----- attempt to subtract with overflow
@@ -64,7 +58,7 @@ LL |     println!("{}", FOO);
    |                    ^^^
 
 error[E0080]: erroneous constant used
-  --> $DIR/conditional_array_execution.rs:20:20
+  --> $DIR/conditional_array_execution.rs:19:20
    |
 LL |     println!("{}", FOO);
    |                    ^^^ referenced constant has errors
index ac555b25afdcd60647c1ec6200190777c544fa0a..99487eb49792324137d03fea2eae59ee674e757a 100644 (file)
@@ -13,8 +13,7 @@
 const X: u32 = 5;
 const Y: u32 = 6;
 const FOO: u32 = [X - Y, Y - X][(X < Y) as usize];
-//~^ WARN attempt to subtract with overflow
-//~| WARN this constant cannot be used
+//~^ WARN this constant cannot be used
 
 fn main() {
     println!("{}", FOO);
index 64010c946a7f1de863670d6d429a076f2b1f8846..649da03a5e7fae60d713e0f1eb96805432233e56 100644 (file)
@@ -1,8 +1,10 @@
-warning: attempt to subtract with overflow
-  --> $DIR/conditional_array_execution.rs:15:19
+warning: this constant cannot be used
+  --> $DIR/conditional_array_execution.rs:15:1
    |
 LL | const FOO: u32 = [X - Y, Y - X][(X < Y) as usize];
-   |                   ^^^^^
+   | ^^^^^^^^^^^^^^^^^^-----^^^^^^^^^^^^^^^^^^^^^^^^^^^
+   |                   |
+   |                   attempt to subtract with overflow
    |
 note: lint level defined here
   --> $DIR/conditional_array_execution.rs:11:9
@@ -10,16 +12,8 @@ note: lint level defined here
 LL | #![warn(const_err)]
    |         ^^^^^^^^^
 
-warning: this constant cannot be used
-  --> $DIR/conditional_array_execution.rs:15:1
-   |
-LL | const FOO: u32 = [X - Y, Y - X][(X < Y) as usize];
-   | ^^^^^^^^^^^^^^^^^^-----^^^^^^^^^^^^^^^^^^^^^^^^^^^
-   |                   |
-   |                   attempt to subtract with overflow
-
-warning: referenced constant
-  --> $DIR/conditional_array_execution.rs:20:20
+warning: referenced constant has errors
+  --> $DIR/conditional_array_execution.rs:19:20
    |
 LL | const FOO: u32 = [X - Y, Y - X][(X < Y) as usize];
    |                   ----- attempt to subtract with overflow
@@ -28,13 +22,13 @@ LL |     println!("{}", FOO);
    |                    ^^^
 
 warning: this expression will panic at runtime
-  --> $DIR/conditional_array_execution.rs:20:20
+  --> $DIR/conditional_array_execution.rs:19:20
    |
 LL |     println!("{}", FOO);
    |                    ^^^ referenced constant has errors
 
-error[E0080]: referenced constant
-  --> $DIR/conditional_array_execution.rs:20:20
+error[E0080]: referenced constant has errors
+  --> $DIR/conditional_array_execution.rs:19:20
    |
 LL | const FOO: u32 = [X - Y, Y - X][(X < Y) as usize];
    |                   ----- attempt to subtract with overflow
@@ -43,7 +37,7 @@ LL |     println!("{}", FOO);
    |                    ^^^
 
 error[E0080]: erroneous constant used
-  --> $DIR/conditional_array_execution.rs:20:20
+  --> $DIR/conditional_array_execution.rs:19:20
    |
 LL |     println!("{}", FOO);
    |                    ^^^ referenced constant has errors
diff --git a/src/test/ui/const-eval/ice-generic-assoc-const.rs b/src/test/ui/const-eval/ice-generic-assoc-const.rs
new file mode 100644 (file)
index 0000000..31e056b
--- /dev/null
@@ -0,0 +1,28 @@
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// compile-pass
+
+pub trait Nullable {
+    const NULL: Self;
+
+    fn is_null(&self) -> bool;
+}
+
+impl<T> Nullable for *const T {
+    const NULL: Self = 0 as *const T;
+
+    fn is_null(&self) -> bool {
+        *self == Self::NULL
+    }
+}
+
+fn main() {
+}
index 3bde12ade90c3c2ff7ca8ccb95cd1c0b77963e95..5819e6a9254a759bcb63a8c2d0cdf89740824b02 100644 (file)
@@ -1,15 +1,3 @@
-warning: attempt to subtract with overflow
-  --> $DIR/issue-43197.rs:20:20
-   |
-LL |     const X: u32 = 0-1;
-   |                    ^^^
-   |
-note: lint level defined here
-  --> $DIR/issue-43197.rs:11:9
-   |
-LL | #![warn(const_err)]
-   |         ^^^^^^^^^
-
 warning: this constant cannot be used
   --> $DIR/issue-43197.rs:20:5
    |
@@ -17,23 +5,23 @@ LL |     const X: u32 = 0-1;
    |     ^^^^^^^^^^^^^^^---^
    |                    |
    |                    attempt to subtract with overflow
-
-warning: attempt to subtract with overflow
-  --> $DIR/issue-43197.rs:23:24
    |
-LL |     const Y: u32 = foo(0-1);
-   |                        ^^^
+note: lint level defined here
+  --> $DIR/issue-43197.rs:11:9
+   |
+LL | #![warn(const_err)]
+   |         ^^^^^^^^^
 
 warning: this constant cannot be used
-  --> $DIR/issue-43197.rs:23:5
+  --> $DIR/issue-43197.rs:22:5
    |
 LL |     const Y: u32 = foo(0-1);
    |     ^^^^^^^^^^^^^^^^^^^---^^
    |                        |
    |                        attempt to subtract with overflow
 
-warning: referenced constant
-  --> $DIR/issue-43197.rs:26:23
+warning: referenced constant has errors
+  --> $DIR/issue-43197.rs:24:23
    |
 LL |     const X: u32 = 0-1;
    |                    --- attempt to subtract with overflow
@@ -42,28 +30,28 @@ LL |     println!("{} {}", X, Y);
    |                       ^
 
 warning: this expression will panic at runtime
-  --> $DIR/issue-43197.rs:26:23
+  --> $DIR/issue-43197.rs:24:23
    |
 LL |     println!("{} {}", X, Y);
    |                       ^ referenced constant has errors
 
-warning: referenced constant
-  --> $DIR/issue-43197.rs:26:26
+warning: referenced constant has errors
+  --> $DIR/issue-43197.rs:24:26
    |
 LL |     const Y: u32 = foo(0-1);
    |                        --- attempt to subtract with overflow
-...
+LL |     //~^ WARN this constant cannot be used
 LL |     println!("{} {}", X, Y);
    |                          ^
 
 warning: this expression will panic at runtime
-  --> $DIR/issue-43197.rs:26:26
+  --> $DIR/issue-43197.rs:24:26
    |
 LL |     println!("{} {}", X, Y);
    |                          ^ referenced constant has errors
 
-error[E0080]: referenced constant
-  --> $DIR/issue-43197.rs:26:5
+error[E0080]: referenced constant has errors
+  --> $DIR/issue-43197.rs:24:5
    |
 LL |     const X: u32 = 0-1;
    |                    --- attempt to subtract with overflow
@@ -74,7 +62,7 @@ LL |     println!("{} {}", X, Y);
    = note: this error originates in a macro outside of the current crate (in Nightly builds, run with -Z external-macro-backtrace for more info)
 
 error[E0080]: erroneous constant used
-  --> $DIR/issue-43197.rs:26:5
+  --> $DIR/issue-43197.rs:24:5
    |
 LL |     println!("{} {}", X, Y);
    |     ^^^^^^^^^^^^^^^^^^-^^^^^
@@ -83,23 +71,23 @@ LL |     println!("{} {}", X, Y);
    |
    = note: this error originates in a macro outside of the current crate (in Nightly builds, run with -Z external-macro-backtrace for more info)
 
-error[E0080]: referenced constant
-  --> $DIR/issue-43197.rs:26:26
+error[E0080]: referenced constant has errors
+  --> $DIR/issue-43197.rs:24:26
    |
 LL |     const Y: u32 = foo(0-1);
    |                        --- attempt to subtract with overflow
-...
+LL |     //~^ WARN this constant cannot be used
 LL |     println!("{} {}", X, Y);
    |                          ^
 
 error[E0080]: erroneous constant used
-  --> $DIR/issue-43197.rs:26:26
+  --> $DIR/issue-43197.rs:24:26
    |
 LL |     println!("{} {}", X, Y);
    |                          ^ referenced constant has errors
 
-error[E0080]: referenced constant
-  --> $DIR/issue-43197.rs:26:23
+error[E0080]: referenced constant has errors
+  --> $DIR/issue-43197.rs:24:23
    |
 LL |     const X: u32 = 0-1;
    |                    --- attempt to subtract with overflow
@@ -108,7 +96,7 @@ LL |     println!("{} {}", X, Y);
    |                       ^
 
 error[E0080]: erroneous constant used
-  --> $DIR/issue-43197.rs:26:23
+  --> $DIR/issue-43197.rs:24:23
    |
 LL |     println!("{} {}", X, Y);
    |                       ^ referenced constant has errors
index 03aa65eb274671716088717e03ab1db03b76693e..9304af7b811cf7832ce7f494a96f63140fe878c2 100644 (file)
@@ -18,11 +18,9 @@ const fn foo(x: u32) -> u32 {
 
 fn main() {
     const X: u32 = 0-1;
-    //~^ WARN attempt to subtract with overflow
-    //~| WARN this constant cannot be used
+    //~^ WARN this constant cannot be used
     const Y: u32 = foo(0-1);
-    //~^ WARN attempt to subtract with overflow
-    //~| WARN this constant cannot be used
+    //~^ WARN this constant cannot be used
     println!("{} {}", X, Y);
     //~^ WARN this expression will panic at runtime
     //~| WARN this expression will panic at runtime
index 071d878730744b409a2b47cb32040ef5ab4a91eb..bf864d81ea3ca179f34825c5b01ab59f13daf2de 100644 (file)
@@ -1,15 +1,3 @@
-warning: attempt to subtract with overflow
-  --> $DIR/issue-43197.rs:20:20
-   |
-LL |     const X: u32 = 0-1;
-   |                    ^^^
-   |
-note: lint level defined here
-  --> $DIR/issue-43197.rs:11:9
-   |
-LL | #![warn(const_err)]
-   |         ^^^^^^^^^
-
 warning: this constant cannot be used
   --> $DIR/issue-43197.rs:20:5
    |
@@ -17,23 +5,23 @@ LL |     const X: u32 = 0-1;
    |     ^^^^^^^^^^^^^^^---^
    |                    |
    |                    attempt to subtract with overflow
-
-warning: attempt to subtract with overflow
-  --> $DIR/issue-43197.rs:23:24
    |
-LL |     const Y: u32 = foo(0-1);
-   |                        ^^^
+note: lint level defined here
+  --> $DIR/issue-43197.rs:11:9
+   |
+LL | #![warn(const_err)]
+   |         ^^^^^^^^^
 
 warning: this constant cannot be used
-  --> $DIR/issue-43197.rs:23:5
+  --> $DIR/issue-43197.rs:22:5
    |
 LL |     const Y: u32 = foo(0-1);
    |     ^^^^^^^^^^^^^^^^^^^---^^
    |                        |
    |                        attempt to subtract with overflow
 
-warning: referenced constant
-  --> $DIR/issue-43197.rs:26:23
+warning: referenced constant has errors
+  --> $DIR/issue-43197.rs:24:23
    |
 LL |     const X: u32 = 0-1;
    |                    --- attempt to subtract with overflow
@@ -42,43 +30,43 @@ LL |     println!("{} {}", X, Y);
    |                       ^
 
 warning: this expression will panic at runtime
-  --> $DIR/issue-43197.rs:26:23
+  --> $DIR/issue-43197.rs:24:23
    |
 LL |     println!("{} {}", X, Y);
    |                       ^ referenced constant has errors
 
-warning: referenced constant
-  --> $DIR/issue-43197.rs:26:26
+warning: referenced constant has errors
+  --> $DIR/issue-43197.rs:24:26
    |
 LL |     const Y: u32 = foo(0-1);
    |                        --- attempt to subtract with overflow
-...
+LL |     //~^ WARN this constant cannot be used
 LL |     println!("{} {}", X, Y);
    |                          ^
 
 warning: this expression will panic at runtime
-  --> $DIR/issue-43197.rs:26:26
+  --> $DIR/issue-43197.rs:24:26
    |
 LL |     println!("{} {}", X, Y);
    |                          ^ referenced constant has errors
 
-error[E0080]: referenced constant
-  --> $DIR/issue-43197.rs:26:26
+error[E0080]: referenced constant has errors
+  --> $DIR/issue-43197.rs:24:26
    |
 LL |     const Y: u32 = foo(0-1);
    |                        --- attempt to subtract with overflow
-...
+LL |     //~^ WARN this constant cannot be used
 LL |     println!("{} {}", X, Y);
    |                          ^
 
 error[E0080]: erroneous constant used
-  --> $DIR/issue-43197.rs:26:26
+  --> $DIR/issue-43197.rs:24:26
    |
 LL |     println!("{} {}", X, Y);
    |                          ^ referenced constant has errors
 
-error[E0080]: referenced constant
-  --> $DIR/issue-43197.rs:26:23
+error[E0080]: referenced constant has errors
+  --> $DIR/issue-43197.rs:24:23
    |
 LL |     const X: u32 = 0-1;
    |                    --- attempt to subtract with overflow
@@ -87,7 +75,7 @@ LL |     println!("{} {}", X, Y);
    |                       ^
 
 error[E0080]: erroneous constant used
-  --> $DIR/issue-43197.rs:26:23
+  --> $DIR/issue-43197.rs:24:23
    |
 LL |     println!("{} {}", X, Y);
    |                       ^ referenced constant has errors
index ad4f08966c01220560ac2b8cbdcac60a82d9436f..eeb152e00ea477d2c5d5ea7095acfe69648e824a 100644 (file)
@@ -1,4 +1,4 @@
-error[E0080]: referenced constant
+error[E0080]: referenced constant has errors
   --> $DIR/issue-44578.rs:35:5
    |
 LL |     const AMT: usize = [A::AMT][(A::AMT > B::AMT) as usize];
@@ -19,7 +19,7 @@ LL |     println!("{}", <Bar<u16, u8> as Foo>::AMT);
    |
    = note: this error originates in a macro outside of the current crate (in Nightly builds, run with -Z external-macro-backtrace for more info)
 
-error[E0080]: referenced constant
+error[E0080]: referenced constant has errors
   --> $DIR/issue-44578.rs:35:20
    |
 LL |     const AMT: usize = [A::AMT][(A::AMT > B::AMT) as usize];
index 28a723a069edf5eda39331c0c3c14158d6198571..06174f37dcad1145896f92a9d2fa78e3e68a5c7f 100644 (file)
@@ -1,4 +1,4 @@
-error[E0080]: referenced constant
+error[E0080]: referenced constant has errors
   --> $DIR/issue-44578.rs:35:20
    |
 LL |     const AMT: usize = [A::AMT][(A::AMT > B::AMT) as usize];
index 3c59cb0e2bc727c03145f5f639a2755fc189f7fd..1e37e9498120fb64da6ba57d8feb4f03c1381103 100644 (file)
@@ -1,4 +1,4 @@
-error[E0080]: referenced constant
+error[E0080]: referenced constant has errors
   --> $DIR/issue-50814-2.rs:26:5
    |
 LL |     const BAR: usize = [5, 6, 7][T::BOO];
index 145279ccc033d71d77f70f9513d9ca8eb0a57b5f..16160207c573ba20d39f923db33cf0d30d687d6d 100644 (file)
@@ -1,4 +1,4 @@
-error[E0080]: referenced constant
+error[E0080]: referenced constant has errors
   --> $DIR/issue-50814.rs:27:5
    |
 LL |     const MAX: u8 = A::MAX + B::MAX;
index ef8fdb33d748aa578dc66abfc348905e45716c81..b7cfa949bac957f76432f4be129acd12838f1bad 100644 (file)
@@ -14,8 +14,7 @@
 #![crate_type = "lib"]
 
 pub const Z: u32 = 0 - 1;
-//~^ WARN attempt to subtract with overflow
-//~| WARN this constant cannot be used
+//~^ WARN this constant cannot be used
 
 pub type Foo = [i32; 0 - 1];
 //~^ WARN attempt to subtract with overflow
index 352289417547e20a562e339df3b2460bb93e2c96..fa3a79a5f1790b09de082fbcb3e3b21d5c8875dc 100644 (file)
@@ -1,8 +1,10 @@
-warning: attempt to subtract with overflow
-  --> $DIR/pub_const_err.rs:16:20
+warning: this constant cannot be used
+  --> $DIR/pub_const_err.rs:16:1
    |
 LL | pub const Z: u32 = 0 - 1;
-   |                    ^^^^^
+   | ^^^^^^^^^^^^^^^^^^^-----^
+   |                    |
+   |                    attempt to subtract with overflow
    |
 note: lint level defined here
   --> $DIR/pub_const_err.rs:12:9
@@ -10,22 +12,14 @@ note: lint level defined here
 LL | #![warn(const_err)]
    |         ^^^^^^^^^
 
-warning: this constant cannot be used
-  --> $DIR/pub_const_err.rs:16:1
-   |
-LL | pub const Z: u32 = 0 - 1;
-   | ^^^^^^^^^^^^^^^^^^^-----^
-   |                    |
-   |                    attempt to subtract with overflow
-
 warning: attempt to subtract with overflow
-  --> $DIR/pub_const_err.rs:20:22
+  --> $DIR/pub_const_err.rs:19:22
    |
 LL | pub type Foo = [i32; 0 - 1];
    |                      ^^^^^
 
 warning: this array length cannot be used
-  --> $DIR/pub_const_err.rs:20:22
+  --> $DIR/pub_const_err.rs:19:22
    |
 LL | pub type Foo = [i32; 0 - 1];
    |                      ^^^^^ attempt to subtract with overflow
index f65da1d8674a29105b1a1d6b0af4d9a6ae9327ac..bafa5b2f4da12ffa0efd556ac84d7bff591170ef 100644 (file)
@@ -12,8 +12,7 @@
 #![warn(const_err)]
 
 pub const Z: u32 = 0 - 1;
-//~^ WARN attempt to subtract with overflow
-//~| WARN this constant cannot be used
+//~^ WARN this constant cannot be used
 
 pub type Foo = [i32; 0 - 1];
 //~^ WARN attempt to subtract with overflow
index a6db2176011d598e61ba6147d5350c8b4aa220ea..73229c60d14dbe45a07d7d2be07afc3b25ac6ed8 100644 (file)
@@ -1,8 +1,10 @@
-warning: attempt to subtract with overflow
-  --> $DIR/pub_const_err_bin.rs:14:20
+warning: this constant cannot be used
+  --> $DIR/pub_const_err_bin.rs:14:1
    |
 LL | pub const Z: u32 = 0 - 1;
-   |                    ^^^^^
+   | ^^^^^^^^^^^^^^^^^^^-----^
+   |                    |
+   |                    attempt to subtract with overflow
    |
 note: lint level defined here
   --> $DIR/pub_const_err_bin.rs:12:9
@@ -10,22 +12,14 @@ note: lint level defined here
 LL | #![warn(const_err)]
    |         ^^^^^^^^^
 
-warning: this constant cannot be used
-  --> $DIR/pub_const_err_bin.rs:14:1
-   |
-LL | pub const Z: u32 = 0 - 1;
-   | ^^^^^^^^^^^^^^^^^^^-----^
-   |                    |
-   |                    attempt to subtract with overflow
-
 warning: attempt to subtract with overflow
-  --> $DIR/pub_const_err_bin.rs:18:22
+  --> $DIR/pub_const_err_bin.rs:17:22
    |
 LL | pub type Foo = [i32; 0 - 1];
    |                      ^^^^^
 
 warning: this array length cannot be used
-  --> $DIR/pub_const_err_bin.rs:18:22
+  --> $DIR/pub_const_err_bin.rs:17:22
    |
 LL | pub type Foo = [i32; 0 - 1];
    |                      ^^^^^ attempt to subtract with overflow
index 20b8865767459793351b132fe896878a2543496c..8db1411005e5772daa8548a395adc18fd16205f2 100644 (file)
 const ONE: usize = 1;
 const TWO: usize = 2;
 const LEN: usize = ONE - TWO;
-//~^ ERROR E0080
-//~| ERROR attempt to subtract with overflow
 
 fn main() {
     let a: [i8; LEN] = unimplemented!();
 //~^ ERROR E0080
 //~| ERROR E0080
+//~| ERROR E0080
+//~| ERROR E0080
 }
index 630828ef8f5178adbacc2bc7a9b4d55c4069e824..cf97a0dc5557866a674705f84fffc56b41ce1dac 100644 (file)
@@ -1,19 +1,20 @@
-error: attempt to subtract with overflow
-  --> $DIR/const-len-underflow-separate-spans.rs:17:20
+error[E0080]: referenced constant has errors
+  --> $DIR/const-len-underflow-separate-spans.rs:20:17
    |
 LL | const LEN: usize = ONE - TWO;
-   |                    ^^^^^^^^^
-   |
-   = note: #[deny(const_err)] on by default
+   |                    --------- attempt to subtract with overflow
+...
+LL |     let a: [i8; LEN] = unimplemented!();
+   |                 ^^^
 
-error[E0080]: constant evaluation error
-  --> $DIR/const-len-underflow-separate-spans.rs:17:20
+error[E0080]: could not evaluate constant
+  --> $DIR/const-len-underflow-separate-spans.rs:20:17
    |
-LL | const LEN: usize = ONE - TWO;
-   |                    ^^^^^^^^^ attempt to subtract with overflow
+LL |     let a: [i8; LEN] = unimplemented!();
+   |                 ^^^ referenced constant has errors
 
-error[E0080]: referenced constant
-  --> $DIR/const-len-underflow-separate-spans.rs:22:12
+error[E0080]: referenced constant has errors
+  --> $DIR/const-len-underflow-separate-spans.rs:20:12
    |
 LL | const LEN: usize = ONE - TWO;
    |                    --------- attempt to subtract with overflow
@@ -22,7 +23,7 @@ LL |     let a: [i8; LEN] = unimplemented!();
    |            ^^^^^^^^^
 
 error[E0080]: could not evaluate constant expression
-  --> $DIR/const-len-underflow-separate-spans.rs:22:12
+  --> $DIR/const-len-underflow-separate-spans.rs:20:12
    |
 LL |     let a: [i8; LEN] = unimplemented!();
    |            ^^^^^---^
index b8a1994a10575fff29a8491788aacfda3e43669a..349ab3e27ad32bf8a149d08371d1d0ecf0939c34 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags: --edition=2015
+// edition:2015
 // aux-build:edition-kw-macro-2015.rs
 // compile-pass
 
index 1fb91ca006cc7ba2a3e6a910415e08d6ecedf53a..08cba2d2908a65c36cd3ce8f2d44a7baf03a464b 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags: --edition=2015
+// edition:2015
 // aux-build:edition-kw-macro-2015.rs
 
 #![feature(raw_identifiers)]
index bc14c104c49fccbd2b014c865ad3e19afbd4856c..082eb8d89f9f1bb028e1344963218649c4cda01e 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags: --edition=2015
+// edition:2015
 // aux-build:edition-kw-macro-2018.rs
 
 #![feature(raw_identifiers)]
index 0b680eb16c7f458795e571f0ef3ca14bd96dbf3d..337d6be6bbcd88d0ba3101b85a7865a6b650501b 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags: --edition=2015
+// edition:2015
 // aux-build:edition-kw-macro-2018.rs
 
 #![feature(raw_identifiers)]
index 6f85f427eb0543dda421dfa48b36d2c635a9503e..6e2073e0e494ae1b858459140d373922394e2c2a 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags: --edition=2018
+// edition:2018
 // aux-build:edition-kw-macro-2015.rs
 // compile-pass
 
index 02dc8c8795675c34313c00a516449075e0eb0a20..713da57f7e2c7f58f50832862ab67a9b37a16eba 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags: --edition=2018
+// edition:2018
 // aux-build:edition-kw-macro-2015.rs
 
 #![feature(raw_identifiers)]
index ef7f63e225ce159aa3d86f187a6c7cd23ff7238e..50db4202e98d3eb477b2f418e359221f56d6c8c5 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags: --edition=2018
+// edition:2018
 // aux-build:edition-kw-macro-2018.rs
 
 #![feature(raw_identifiers)]
index f9b4d0e18c14b3e9c6d954a43287e3af3d3ab113..263ec95caa7d6a3d31b5d4ddcd201e644ea644ab 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags: --edition=2018
+// edition:2018
 // aux-build:edition-kw-macro-2018.rs
 
 #![feature(raw_identifiers)]
index b1dd6a77e976a740bfa7ee3146adbed10918ae77..5b865e9c1c7cbe21b7c94661012e2f3946b08098 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags: --edition=2015
+// edition:2015
 
 #![feature(futures_api)]
 
index 971b75c6dd03653f286792e674e6297549301905..be34842dea3a68898f1b0705c33c4fc5e70aec22 100644 (file)
@@ -8,7 +8,8 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags: --edition=2018
+// edition:2018
+
 #![feature(futures_api)]
 
 async fn foo() {} //~ ERROR async fn is unstable
index fdee6e205750b0f39384d8b9944d782212cd1bb4..79ed5c4d008b13ef1f6c473fa01bed9289b47841 100644 (file)
@@ -1,5 +1,5 @@
 error[E0658]: async fn is unstable (see issue #50547)
-  --> $DIR/feature-gate-async-await.rs:14:1
+  --> $DIR/feature-gate-async-await.rs:15:1
    |
 LL | async fn foo() {} //~ ERROR async fn is unstable
    | ^^^^^^^^^^^^^^^^^
@@ -7,7 +7,7 @@ LL | async fn foo() {} //~ ERROR async fn is unstable
    = help: add #![feature(async_await)] to the crate attributes to enable
 
 error[E0658]: async blocks are unstable (see issue #50547)
-  --> $DIR/feature-gate-async-await.rs:17:13
+  --> $DIR/feature-gate-async-await.rs:18:13
    |
 LL |     let _ = async {}; //~ ERROR async blocks are unstable
    |             ^^^^^^^^
@@ -15,7 +15,7 @@ LL |     let _ = async {}; //~ ERROR async blocks are unstable
    = help: add #![feature(async_await)] to the crate attributes to enable
 
 error[E0658]: async closures are unstable (see issue #50547)
-  --> $DIR/feature-gate-async-await.rs:18:13
+  --> $DIR/feature-gate-async-await.rs:19:13
    |
 LL |     let _ = async || {}; //~ ERROR async closures are unstable
    |             ^^^^^^^^^^^
index 19f5aca5730e1b18d0f2b4ae83462de391378d25..bdce1952a9a2148f2cd0d128aa3658abadd889d1 100644 (file)
@@ -12,7 +12,7 @@
 // gate is not used.
 
 macro_rules! m { ($(a)?) => {} }
-//~^ ERROR Using the `?` macro Kleene operator for "at most one" repetition is unstable
+//~^ ERROR using the `?` macro Kleene operator for "at most one" repetition is unstable
 
 fn main() {
     m!();
index 8eba07e6c088bcc5f8393944bba4eb355084f881..9ca71d937f8598dadfd363d0af6e11731b42df55 100644 (file)
@@ -1,4 +1,4 @@
-error[E0658]: Using the `?` macro Kleene operator for "at most one" repetition is unstable (see issue #48075)
+error[E0658]: using the `?` macro Kleene operator for "at most one" repetition is unstable (see issue #48075)
   --> $DIR/feature-gate-macro_at_most_once_rep.rs:14:20
    |
 LL | macro_rules! m { ($(a)?) => {} }
index 9c758241ea1b871ae3d07c6f29e9bcabd871aaa2..5271f75b6328cd895de19e060f2abaaff2815838 100644 (file)
@@ -27,9 +27,9 @@ macro_rules! emits_nothing(
 #[link(name = "rust_test_helpers", kind = "static")]
 extern {
     returns_isize!(rust_get_test_int);
-    //~^ ERROR Macro invocations in `extern {}` blocks are experimental.
+    //~^ ERROR macro invocations in `extern {}` blocks are experimental.
     takes_u32_returns_u32!(rust_dbg_extern_identity_u32);
-    //~^ ERROR Macro invocations in `extern {}` blocks are experimental.
+    //~^ ERROR macro invocations in `extern {}` blocks are experimental.
     emits_nothing!();
-    //~^ ERROR Macro invocations in `extern {}` blocks are experimental.
+    //~^ ERROR macro invocations in `extern {}` blocks are experimental.
 }
index 49aca0db2d46c1c8f737e5fd0b1808de5461302d..748adc390d8bbf3025e911461c388a5ffe7db99c 100644 (file)
@@ -1,4 +1,4 @@
-error[E0658]: Macro invocations in `extern {}` blocks are experimental. (see issue #49476)
+error[E0658]: macro invocations in `extern {}` blocks are experimental. (see issue #49476)
   --> $DIR/feature-gate-macros_in_extern.rs:29:5
    |
 LL |     returns_isize!(rust_get_test_int);
@@ -6,7 +6,7 @@ LL |     returns_isize!(rust_get_test_int);
    |
    = help: add #![feature(macros_in_extern)] to the crate attributes to enable
 
-error[E0658]: Macro invocations in `extern {}` blocks are experimental. (see issue #49476)
+error[E0658]: macro invocations in `extern {}` blocks are experimental. (see issue #49476)
   --> $DIR/feature-gate-macros_in_extern.rs:31:5
    |
 LL |     takes_u32_returns_u32!(rust_dbg_extern_identity_u32);
@@ -14,7 +14,7 @@ LL |     takes_u32_returns_u32!(rust_dbg_extern_identity_u32);
    |
    = help: add #![feature(macros_in_extern)] to the crate attributes to enable
 
-error[E0658]: Macro invocations in `extern {}` blocks are experimental. (see issue #49476)
+error[E0658]: macro invocations in `extern {}` blocks are experimental. (see issue #49476)
   --> $DIR/feature-gate-macros_in_extern.rs:33:5
    |
 LL |     emits_nothing!();
index 4ddde01126363524b22040d662bd5d792d28a411..8a43e75494d8175c729c448c07c85079a691a353 100644 (file)
@@ -10,5 +10,5 @@
 
 fn main() {
     let _ : &(Send,) = &((),);
-    //~^ ERROR Unsized tuple coercion is not stable enough
+    //~^ ERROR unsized tuple coercion is not stable enough
 }
index bf790a3b003902bcbc3b58d13a9b93bb2c3956eb..08c15855a6a42393cf88b039ea27c072fe310b22 100644 (file)
@@ -1,4 +1,4 @@
-error[E0658]: Unsized tuple coercion is not stable enough for use and is subject to change (see issue #42877)
+error[E0658]: unsized tuple coercion is not stable enough for use and is subject to change (see issue #42877)
   --> $DIR/feature-gate-unsized_tuple_coercion.rs:12:24
    |
 LL |     let _ : &(Send,) = &((),);
diff --git a/src/test/ui/hygiene/auxiliary/local_inner_macros.rs b/src/test/ui/hygiene/auxiliary/local_inner_macros.rs
new file mode 100644 (file)
index 0000000..caa2903
--- /dev/null
@@ -0,0 +1,29 @@
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#[macro_export]
+macro_rules! helper1 {
+    () => ( struct S; )
+}
+
+#[macro_export(local_inner_macros)]
+macro_rules! helper2 {
+    () => ( helper1!(); )
+}
+
+#[macro_export(local_inner_macros)]
+macro_rules! public_macro {
+    () => ( helper2!(); )
+}
+
+#[macro_export(local_inner_macros)]
+macro_rules! public_macro_dynamic {
+    ($helper: ident) => ( $helper!(); )
+}
diff --git a/src/test/ui/hygiene/local_inner_macros.rs b/src/test/ui/hygiene/local_inner_macros.rs
new file mode 100644 (file)
index 0000000..787e2df
--- /dev/null
@@ -0,0 +1,31 @@
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// compile-pass
+// aux-build:local_inner_macros.rs
+
+#![feature(use_extern_macros)]
+
+extern crate local_inner_macros;
+
+use local_inner_macros::{public_macro, public_macro_dynamic};
+
+public_macro!();
+
+macro_rules! local_helper {
+    () => ( struct Z; )
+}
+
+public_macro_dynamic!(local_helper);
+
+fn main() {
+    let s = S;
+    let z = Z;
+}
diff --git a/src/test/ui/hygiene/local_inner_macros_disabled.rs b/src/test/ui/hygiene/local_inner_macros_disabled.rs
new file mode 100644 (file)
index 0000000..00b3878
--- /dev/null
@@ -0,0 +1,21 @@
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// `local_inner_macros` has no effect if `feature(use_extern_macros)` is not enabled
+
+// aux-build:local_inner_macros.rs
+// error-pattern: cannot find macro `helper2!` in this scope
+
+#[macro_use(public_macro)]
+extern crate local_inner_macros;
+
+public_macro!();
+
+fn main() {}
diff --git a/src/test/ui/hygiene/local_inner_macros_disabled.stderr b/src/test/ui/hygiene/local_inner_macros_disabled.stderr
new file mode 100644 (file)
index 0000000..64cb6c4
--- /dev/null
@@ -0,0 +1,10 @@
+error: cannot find macro `helper2!` in this scope
+  --> $DIR/local_inner_macros_disabled.rs:19:1
+   |
+LL | public_macro!();
+   | ^^^^^^^^^^^^^^^^
+   |
+   = note: this error originates in a macro outside of the current crate (in Nightly builds, run with -Z external-macro-backtrace for more info)
+
+error: aborting due to previous error
+
diff --git a/src/test/ui/impl-trait/static-return-lifetime-infered.nll.stderr b/src/test/ui/impl-trait/static-return-lifetime-infered.nll.stderr
new file mode 100644 (file)
index 0000000..7099316
--- /dev/null
@@ -0,0 +1,26 @@
+warning: not reporting region error due to nll
+  --> $DIR/static-return-lifetime-infered.rs:17:16
+   |
+LL |         self.x.iter().map(|a| a.0)
+   |                ^^^^
+
+warning: not reporting region error due to nll
+  --> $DIR/static-return-lifetime-infered.rs:21:16
+   |
+LL |         self.x.iter().map(|a| a.0)
+   |                ^^^^
+
+error: free region `` does not outlive free region `'static`
+  --> $DIR/static-return-lifetime-infered.rs:17:9
+   |
+LL |         self.x.iter().map(|a| a.0)
+   |         ^^^^^^^^^^^^^
+
+error: free region `'a` does not outlive free region `'static`
+  --> $DIR/static-return-lifetime-infered.rs:21:9
+   |
+LL |         self.x.iter().map(|a| a.0)
+   |         ^^^^^^^^^^^^^
+
+error: aborting due to 2 previous errors
+
diff --git a/src/test/ui/impl-trait/static-return-lifetime-infered.rs b/src/test/ui/impl-trait/static-return-lifetime-infered.rs
new file mode 100644 (file)
index 0000000..a05c889
--- /dev/null
@@ -0,0 +1,26 @@
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+struct A {
+    x: [(u32, u32); 10]
+}
+
+impl A {
+    fn iter_values_anon(&self) -> impl Iterator<Item=u32> {
+        self.x.iter().map(|a| a.0)
+    }
+    //~^^ ERROR cannot infer an appropriate lifetime
+    fn iter_values<'a>(&'a self) -> impl Iterator<Item=u32> {
+        self.x.iter().map(|a| a.0)
+    }
+    //~^^ ERROR cannot infer an appropriate lifetime
+}
+
+fn main() {}
diff --git a/src/test/ui/impl-trait/static-return-lifetime-infered.stderr b/src/test/ui/impl-trait/static-return-lifetime-infered.stderr
new file mode 100644 (file)
index 0000000..2795bb9
--- /dev/null
@@ -0,0 +1,44 @@
+error: cannot infer an appropriate lifetime
+  --> $DIR/static-return-lifetime-infered.rs:17:16
+   |
+LL |     fn iter_values_anon(&self) -> impl Iterator<Item=u32> {
+   |                                   ----------------------- this return type evaluates to the `'static` lifetime...
+LL |         self.x.iter().map(|a| a.0)
+   |         ------ ^^^^
+   |         |
+   |         ...but this borrow...
+   |
+note: ...can't outlive the anonymous lifetime #1 defined on the method body at 16:5
+  --> $DIR/static-return-lifetime-infered.rs:16:5
+   |
+LL | /     fn iter_values_anon(&self) -> impl Iterator<Item=u32> {
+LL | |         self.x.iter().map(|a| a.0)
+LL | |     }
+   | |_____^
+help: you can add a constraint to the return type to make it last less than `'static` and match the anonymous lifetime #1 defined on the method body at 16:5
+   |
+LL |     fn iter_values_anon(&self) -> impl Iterator<Item=u32> + '_ {
+   |                                   ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: cannot infer an appropriate lifetime
+  --> $DIR/static-return-lifetime-infered.rs:21:16
+   |
+LL |     fn iter_values<'a>(&'a self) -> impl Iterator<Item=u32> {
+   |                                     ----------------------- this return type evaluates to the `'static` lifetime...
+LL |         self.x.iter().map(|a| a.0)
+   |         ------ ^^^^
+   |         |
+   |         ...but this borrow...
+   |
+note: ...can't outlive the lifetime 'a as defined on the method body at 20:5
+  --> $DIR/static-return-lifetime-infered.rs:20:5
+   |
+LL |     fn iter_values<'a>(&'a self) -> impl Iterator<Item=u32> {
+   |     ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+help: you can add a constraint to the return type to make it last less than `'static` and match the lifetime 'a as defined on the method body at 20:5
+   |
+LL |     fn iter_values<'a>(&'a self) -> impl Iterator<Item=u32> + 'a {
+   |                                     ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 2 previous errors
+
index de223d9ccf7b6b46e1f3624b4810f13bf43e61c5..c4379b496f8d4fd1a112fa70243099799fab93c6 100644 (file)
@@ -11,7 +11,7 @@
 // tests that the anonymous_parameters lint is warn-by-default on the 2018 edition
 
 // compile-pass
-// compile-flags: --edition=2018
+// edition:2018
 // run-rustfix
 
 trait Foo {
index 35406806df99c13a6f37694dc483495aa6b06cb5..13eb5dfd816ab0e18860b4ccb9871e9712c99d6e 100644 (file)
@@ -11,7 +11,7 @@
 // tests that the anonymous_parameters lint is warn-by-default on the 2018 edition
 
 // compile-pass
-// compile-flags: --edition=2018
+// edition:2018
 // run-rustfix
 
 trait Foo {
index 93a7c1a0c6c33a0c3165040276dbeae87f7d6bd9..122e393f97a5367b7a4b34fba17bf5d361727373 100644 (file)
@@ -24,15 +24,16 @@ LL | |     });
    = note: where '_#1r: '_#0r
 
 error: free region `ReFree(DefId(0/0:6 ~ propagate_approximated_shorter_to_static_no_bound[317d]::supply[0]), BrNamed(crate0:DefIndex(1:16), 'a))` does not outlive free region `ReStatic`
-  --> $DIR/propagate-approximated-shorter-to-static-no-bound.rs:45:5
+  --> $DIR/propagate-approximated-shorter-to-static-no-bound.rs:45:47
    |
-LL | /     establish_relationships(&cell_a, &cell_b, |_outlives, x, y| {
+LL |       establish_relationships(&cell_a, &cell_b, |_outlives, x, y| {
+   |  _______________________________________________^
 LL | |         //~^ ERROR does not outlive free region
 LL | |
 LL | |         // Only works if 'x: 'y:
 LL | |         demand_y(x, y, x.get()) //~ WARNING not reporting region error due to nll
 LL | |     });
-   | |______^
+   | |_____^
 
 note: No external requirements
   --> $DIR/propagate-approximated-shorter-to-static-no-bound.rs:44:1
index c62f62efda361795c70e2426b4f14881f3eac9ce..8cdbc26458150140ee75b5e47e553d2b2c088b68 100644 (file)
@@ -24,15 +24,16 @@ LL | |     });
    = note: where '_#1r: '_#0r
 
 error: free region `ReFree(DefId(0/0:6 ~ propagate_approximated_shorter_to_static_wrong_bound[317d]::supply[0]), BrNamed(crate0:DefIndex(1:16), 'a))` does not outlive free region `ReStatic`
-  --> $DIR/propagate-approximated-shorter-to-static-wrong-bound.rs:48:5
+  --> $DIR/propagate-approximated-shorter-to-static-wrong-bound.rs:48:47
    |
-LL | /     establish_relationships(&cell_a, &cell_b, |_outlives1, _outlives2, x, y| {
+LL |       establish_relationships(&cell_a, &cell_b, |_outlives1, _outlives2, x, y| {
+   |  _______________________________________________^
 LL | |         //~^ ERROR does not outlive free region
 LL | |         // Only works if 'x: 'y:
 LL | |         demand_y(x, y, x.get())
 LL | |         //~^ WARNING not reporting region error due to nll
 LL | |     });
-   | |______^
+   | |_____^
 
 note: No external requirements
   --> $DIR/propagate-approximated-shorter-to-static-wrong-bound.rs:47:1
diff --git a/src/test/ui/nll/issue-50716-1.rs b/src/test/ui/nll/issue-50716-1.rs
new file mode 100644 (file)
index 0000000..ced9b1c
--- /dev/null
@@ -0,0 +1,23 @@
+// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+//
+// An additional regression test for the issue #50716 “NLL ignores lifetimes
+// bounds derived from `Sized` requirements” that checks that the fixed compiler
+// accepts this code fragment with both AST and MIR borrow checkers.
+//
+// revisions: ast mir
+//
+// compile-pass
+
+#![cfg_attr(mir, feature(nll))]
+
+struct Qey<Q: ?Sized>(Q);
+
+fn main() {}
diff --git a/src/test/ui/nll/issue-50716.rs b/src/test/ui/nll/issue-50716.rs
new file mode 100644 (file)
index 0000000..310600a
--- /dev/null
@@ -0,0 +1,28 @@
+// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+//
+// Regression test for the issue #50716: NLL ignores lifetimes bounds
+// derived from `Sized` requirements
+
+#![feature(nll)]
+
+trait A {
+    type X: ?Sized;
+}
+
+fn foo<'a, T: 'static>(s: Box<<&'a T as A>::X>)
+where
+    for<'b> &'b T: A,
+    <&'static T as A>::X: Sized
+{
+    let _x = *s; //~ ERROR free region `'a` does not outlive free region `'static`
+}
+
+fn main() {}
diff --git a/src/test/ui/nll/issue-50716.stderr b/src/test/ui/nll/issue-50716.stderr
new file mode 100644 (file)
index 0000000..20b03d6
--- /dev/null
@@ -0,0 +1,8 @@
+error: free region `'a` does not outlive free region `'static`
+  --> $DIR/issue-50716.rs:25:14
+   |
+LL |     let _x = *s; //~ ERROR free region `'a` does not outlive free region `'static`
+   |              ^^
+
+error: aborting due to previous error
+
index f2ecc44771883752d6296d7bbd38a82facf371e9..9cb754167cfe5f1e2a0b213725dc3a548696c3df 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// compile-flags: --edition=2018
+// edition:2018
 
 #![feature(arbitrary_self_types, async_await, await_macro, futures_api, pin)]
 
index 12699d8b25f81750393da26779a59a5d70d020e1..041bae4a421080644c088473bf386227040f847c 100644 (file)
@@ -14,7 +14,10 @@ LL | fn main() {
    |           - expected `()` because of default return type
 ...
 LL |   let u: &str = if true { s[..2] } else { s };
-   |                           ^^^^^^ expected &str, found str
+   |                           ^^^^^^
+   |                           |
+   |                           expected &str, found str
+   |                           help: consider borrowing here: `&s[..2]`
    |
    = note: expected type `&str`
               found type `str`
index f66f5c5b70e7ed38c213989043b456ceb19f397a..1dd7fe7f0cb39d5fbdd2414677aef566eb81c3de 100644 (file)
@@ -298,6 +298,10 @@ fn load_from(&mut self, testfile: &Path, cfg: Option<&str>, config: &Config) {
                     .extend(flags.split_whitespace().map(|s| s.to_owned()));
             }
 
+            if let Some(edition) = config.parse_edition(ln) {
+                self.compile_flags.push(format!("--edition={}", edition));
+            }
+
             if let Some(r) = config.parse_revisions(ln) {
                 self.revisions.extend(r);
             }
@@ -371,9 +375,9 @@ fn load_from(&mut self, testfile: &Path, cfg: Option<&str>, config: &Config) {
                 self.compile_pass = config.parse_compile_pass(ln) || self.run_pass;
             }
 
-                        if !self.skip_codegen {
-                            self.skip_codegen = config.parse_skip_codegen(ln);
-                        }
+            if !self.skip_codegen {
+                self.skip_codegen = config.parse_skip_codegen(ln);
+            }
 
             if !self.disable_ui_testing_normalization {
                 self.disable_ui_testing_normalization =
@@ -647,6 +651,10 @@ pub fn find_rust_src_root(&self) -> Option<PathBuf> {
     fn parse_run_rustfix(&self, line: &str) -> bool {
         self.parse_name_directive(line, "run-rustfix")
     }
+
+    fn parse_edition(&self, line: &str) -> Option<String> {
+        self.parse_name_value_directive(line, "edition")
+    }
 }
 
 pub fn lldb_version_to_int(version_string: &str) -> isize {
index caf73f4f68b036d912a9933b5e651cb6c4d36764..408eda5ba5bb58aa6d19848ac6be270c2a15ee4d 100644 (file)
@@ -1368,6 +1368,7 @@ fn document(&self, out_dir: &Path) -> ProcRes {
             .arg(out_dir)
             .arg(&self.testpaths.file)
             .args(&self.props.compile_flags);
+
         if let Some(ref linker) = self.config.linker {
             rustdoc
                 .arg("--linker")