version = "0.0.0"
dependencies = [
"arena 0.0.0",
+ "cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc_data_structures 0.0.0",
"scoped-tls 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"serialize 0.0.0",
#![cfg_attr(stage0, feature(repr_transparent))]
#![feature(rustc_attrs)]
#![feature(specialization)]
+#![feature(split_ascii_whitespace)]
#![feature(staged_api)]
#![feature(str_internals)]
#![feature(trusted_len)]
pub use core::str::pattern;
#[stable(feature = "encode_utf16", since = "1.8.0")]
pub use core::str::EncodeUtf16;
+#[unstable(feature = "split_ascii_whitespace", issue = "48656")]
+pub use core::str::SplitAsciiWhitespace;
#[unstable(feature = "slice_concat_ext",
reason = "trait should not have to exist",
/// or returns `LayoutErr` if either of the following conditions
/// are not met:
///
+ /// * `align` must not be zero,
+ ///
/// * `align` must be a power of two,
///
/// * `size`, when rounded up to the nearest multiple of `align`,
}
}
-// Values [1, MIN_WRITING-1] represent the number of `Ref` active. Values in
-// [MIN_WRITING, MAX-1] represent the number of `RefMut` active. Multiple
-// `RefMut`s can only be active at a time if they refer to distinct,
-// nonoverlapping components of a `RefCell` (e.g., different ranges of a slice).
+// Positive values represent the number of `Ref` active. Negative values
+// represent the number of `RefMut` active. Multiple `RefMut`s can only be
+// active at a time if they refer to distinct, nonoverlapping components of a
+// `RefCell` (e.g., different ranges of a slice).
//
// `Ref` and `RefMut` are both two words in size, and so there will likely never
// be enough `Ref`s or `RefMut`s in existence to overflow half of the `usize`
-// range. Thus, a `BorrowFlag` will probably never overflow. However, this is
-// not a guarantee, as a pathological program could repeatedly create and then
-// mem::forget `Ref`s or `RefMut`s. Thus, all code must explicitly check for
-// overflow in order to avoid unsafety.
-type BorrowFlag = usize;
+// range. Thus, a `BorrowFlag` will probably never overflow or underflow.
+// However, this is not a guarantee, as a pathological program could repeatedly
+// create and then mem::forget `Ref`s or `RefMut`s. Thus, all code must
+// explicitly check for overflow and underflow in order to avoid unsafety, or at
+// least behave correctly in the event that overflow or underflow happens (e.g.,
+// see BorrowRef::new).
+type BorrowFlag = isize;
const UNUSED: BorrowFlag = 0;
-const MIN_WRITING: BorrowFlag = (!0)/2 + 1; // 0b1000...
+
+#[inline(always)]
+fn is_writing(x: BorrowFlag) -> bool {
+ x < UNUSED
+}
+
+#[inline(always)]
+fn is_reading(x: BorrowFlag) -> bool {
+ x > UNUSED
+}
impl<T> RefCell<T> {
/// Creates a new `RefCell` containing `value`.
#[inline]
fn new(borrow: &'b Cell<BorrowFlag>) -> Option<BorrowRef<'b>> {
let b = borrow.get();
- if b >= MIN_WRITING {
+ if is_writing(b) || b == isize::max_value() {
+ // If there's currently a writing borrow, or if incrementing the
+ // refcount would overflow into a writing borrow.
None
} else {
- // Prevent the borrow counter from overflowing into
- // a writing borrow.
- assert!(b < MIN_WRITING - 1);
borrow.set(b + 1);
Some(BorrowRef { borrow })
}
#[inline]
fn drop(&mut self) {
let borrow = self.borrow.get();
- debug_assert!(borrow < MIN_WRITING && borrow != UNUSED);
+ debug_assert!(is_reading(borrow));
self.borrow.set(borrow - 1);
}
}
#[inline]
fn clone(&self) -> BorrowRef<'b> {
// Since this Ref exists, we know the borrow flag
- // is not set to WRITING.
+ // is a reading borrow.
let borrow = self.borrow.get();
- debug_assert!(borrow != UNUSED);
+ debug_assert!(is_reading(borrow));
// Prevent the borrow counter from overflowing into
// a writing borrow.
- assert!(borrow < MIN_WRITING - 1);
+ assert!(borrow != isize::max_value());
self.borrow.set(borrow + 1);
BorrowRef { borrow: self.borrow }
}
#[inline]
fn drop(&mut self) {
let borrow = self.borrow.get();
- debug_assert!(borrow >= MIN_WRITING);
- self.borrow.set(if borrow == MIN_WRITING {
- UNUSED
- } else {
- borrow - 1
- });
+ debug_assert!(is_writing(borrow));
+ self.borrow.set(borrow + 1);
}
}
// NOTE: Unlike BorrowRefMut::clone, new is called to create the initial
// mutable reference, and so there must currently be no existing
// references. Thus, while clone increments the mutable refcount, here
- // we simply go directly from UNUSED to MIN_WRITING.
+ // we explicitly only allow going from UNUSED to UNUSED - 1.
match borrow.get() {
UNUSED => {
- borrow.set(MIN_WRITING);
+ borrow.set(UNUSED - 1);
Some(BorrowRefMut { borrow: borrow })
},
_ => None,
#[inline]
fn clone(&self) -> BorrowRefMut<'b> {
let borrow = self.borrow.get();
- debug_assert!(borrow >= MIN_WRITING);
- // Prevent the borrow counter from overflowing.
- assert!(borrow != !0);
- self.borrow.set(borrow + 1);
+ debug_assert!(is_writing(borrow));
+ // Prevent the borrow counter from underflowing.
+ assert!(borrow != isize::min_value());
+ self.borrow.set(borrow - 1);
BorrowRefMut { borrow: self.borrow }
}
}
/// # Examples
///
/// ```
- /// #![feature(int_to_from_bytes)]
- ///
/// let bytes = i32::min_value().to_be().to_bytes();
/// assert_eq!(bytes, [0x80, 0, 0, 0]);
/// ```
- #[unstable(feature = "int_to_from_bytes", issue = "49792")]
+ #[stable(feature = "int_to_from_bytes", since = "1.29.0")]
#[inline]
pub fn to_bytes(self) -> [u8; mem::size_of::<Self>()] {
unsafe { mem::transmute(self) }
/// # Examples
///
/// ```
- /// #![feature(int_to_from_bytes)]
- ///
/// let int = i32::from_be(i32::from_bytes([0x80, 0, 0, 0]));
/// assert_eq!(int, i32::min_value());
/// ```
- #[unstable(feature = "int_to_from_bytes", issue = "49792")]
+ #[stable(feature = "int_to_from_bytes", since = "1.29.0")]
#[inline]
pub fn from_bytes(bytes: [u8; mem::size_of::<Self>()]) -> Self {
unsafe { mem::transmute(bytes) }
/// # Examples
///
/// ```
- /// #![feature(int_to_from_bytes)]
- ///
/// let bytes = 0x1234_5678_u32.to_be().to_bytes();
/// assert_eq!(bytes, [0x12, 0x34, 0x56, 0x78]);
/// ```
- #[unstable(feature = "int_to_from_bytes", issue = "49792")]
+ #[stable(feature = "int_to_from_bytes", since = "1.29.0")]
#[inline]
pub fn to_bytes(self) -> [u8; mem::size_of::<Self>()] {
unsafe { mem::transmute(self) }
/// # Examples
///
/// ```
- /// #![feature(int_to_from_bytes)]
- ///
/// let int = u32::from_be(u32::from_bytes([0x12, 0x34, 0x56, 0x78]));
/// assert_eq!(int, 0x1234_5678_u32);
/// ```
- #[unstable(feature = "int_to_from_bytes", issue = "49792")]
+ #[stable(feature = "int_to_from_bytes", since = "1.29.0")]
#[inline]
pub fn from_bytes(bytes: [u8; mem::size_of::<Self>()]) -> Self {
unsafe { mem::transmute(bytes) }
/// [`split_at_mut`]: #method.split_at_mut
#[stable(feature = "copy_from_slice", since = "1.9.0")]
pub fn copy_from_slice(&mut self, src: &[T]) where T: Copy {
- assert!(self.len() == src.len(),
- "destination and source slices have different lengths");
+ assert_eq!(self.len(), src.len(),
+ "destination and source slices have different lengths");
unsafe {
ptr::copy_nonoverlapping(
src.as_ptr(), self.as_mut_ptr(), self.len());
use fmt;
use iter::{Map, Cloned, FusedIterator, TrustedLen, Filter};
use iter_private::TrustedRandomAccess;
-use slice::{self, SliceIndex};
+use slice::{self, SliceIndex, Split as SliceSplit};
use mem;
pub mod pattern;
/// the original string slice, separated by any amount of whitespace.
///
/// 'Whitespace' is defined according to the terms of the Unicode Derived
- /// Core Property `White_Space`.
+ /// Core Property `White_Space`. If you only want to split on ASCII whitespace
+ /// instead, use [`split_ascii_whitespace`].
+ ///
+ /// [`split_ascii_whitespace`]: #method.split_ascii_whitespace
///
/// # Examples
///
SplitWhitespace { inner: self.split(IsWhitespace).filter(IsNotEmpty) }
}
+ /// Split a string slice by ASCII whitespace.
+ ///
+ /// The iterator returned will return string slices that are sub-slices of
+ /// the original string slice, separated by any amount of ASCII whitespace.
+ ///
+ /// To split by Unicode `Whitespace` instead, use [`split_whitespace`].
+ ///
+ /// [`split_whitespace`]: #method.split_whitespace
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// #![feature(split_ascii_whitespace)]
+ /// let mut iter = "A few words".split_ascii_whitespace();
+ ///
+ /// assert_eq!(Some("A"), iter.next());
+ /// assert_eq!(Some("few"), iter.next());
+ /// assert_eq!(Some("words"), iter.next());
+ ///
+ /// assert_eq!(None, iter.next());
+ /// ```
+ ///
+ /// All kinds of ASCII whitespace are considered:
+ ///
+ /// ```
+ /// let mut iter = " Mary had\ta little \n\t lamb".split_whitespace();
+ /// assert_eq!(Some("Mary"), iter.next());
+ /// assert_eq!(Some("had"), iter.next());
+ /// assert_eq!(Some("a"), iter.next());
+ /// assert_eq!(Some("little"), iter.next());
+ /// assert_eq!(Some("lamb"), iter.next());
+ ///
+ /// assert_eq!(None, iter.next());
+ /// ```
+ #[unstable(feature = "split_ascii_whitespace", issue = "48656")]
+ #[inline]
+ pub fn split_ascii_whitespace(&self) -> SplitAsciiWhitespace {
+ let inner = self
+ .as_bytes()
+ .split(IsAsciiWhitespace)
+ .filter(IsNotEmpty)
+ .map(UnsafeBytesToStr);
+ SplitAsciiWhitespace { inner }
+ }
+
/// An iterator over the lines of a string, as string slices.
///
/// Lines are ended with either a newline (`\n`) or a carriage return with
inner: Filter<Split<'a, IsWhitespace>, IsNotEmpty>,
}
+/// An iterator over the non-ASCII-whitespace substrings of a string,
+/// separated by any amount of ASCII whitespace.
+///
+/// This struct is created by the [`split_ascii_whitespace`] method on [`str`].
+/// See its documentation for more.
+///
+/// [`split_ascii_whitespace`]: ../../std/primitive.str.html#method.split_ascii_whitespace
+/// [`str`]: ../../std/primitive.str.html
+#[unstable(feature = "split_ascii_whitespace", issue = "48656")]
+#[derive(Clone, Debug)]
+pub struct SplitAsciiWhitespace<'a> {
+ inner: Map<Filter<SliceSplit<'a, u8, IsAsciiWhitespace>, IsNotEmpty>, UnsafeBytesToStr>,
+}
+
#[derive(Clone)]
struct IsWhitespace;
}
}
+#[derive(Clone)]
+struct IsAsciiWhitespace;
+
+impl<'a> FnOnce<(&'a u8, )> for IsAsciiWhitespace {
+ type Output = bool;
+
+ #[inline]
+ extern "rust-call" fn call_once(mut self, arg: (&u8, )) -> bool {
+ self.call_mut(arg)
+ }
+}
+
+impl<'a> FnMut<(&'a u8, )> for IsAsciiWhitespace {
+ #[inline]
+ extern "rust-call" fn call_mut(&mut self, arg: (&u8, )) -> bool {
+ arg.0.is_ascii_whitespace()
+ }
+}
+
#[derive(Clone)]
struct IsNotEmpty;
type Output = bool;
#[inline]
- extern "rust-call" fn call_once(mut self, arg: (&&str, )) -> bool {
+ extern "rust-call" fn call_once(mut self, arg: (&'a &'b str, )) -> bool {
self.call_mut(arg)
}
}
impl<'a, 'b> FnMut<(&'a &'b str, )> for IsNotEmpty {
#[inline]
- extern "rust-call" fn call_mut(&mut self, arg: (&&str, )) -> bool {
+ extern "rust-call" fn call_mut(&mut self, arg: (&'a &'b str, )) -> bool {
+ !arg.0.is_empty()
+ }
+}
+
+impl<'a, 'b> FnOnce<(&'a &'b [u8], )> for IsNotEmpty {
+ type Output = bool;
+
+ #[inline]
+ extern "rust-call" fn call_once(mut self, arg: (&'a &'b [u8], )) -> bool {
+ self.call_mut(arg)
+ }
+}
+
+impl<'a, 'b> FnMut<(&'a &'b [u8], )> for IsNotEmpty {
+ #[inline]
+ extern "rust-call" fn call_mut(&mut self, arg: (&'a &'b [u8], )) -> bool {
!arg.0.is_empty()
}
}
+#[derive(Clone)]
+struct UnsafeBytesToStr;
+
+impl<'a> FnOnce<(&'a [u8], )> for UnsafeBytesToStr {
+ type Output = &'a str;
+
+ #[inline]
+ extern "rust-call" fn call_once(mut self, arg: (&'a [u8], )) -> &'a str {
+ self.call_mut(arg)
+ }
+}
+
+impl<'a> FnMut<(&'a [u8], )> for UnsafeBytesToStr {
+ #[inline]
+ extern "rust-call" fn call_mut(&mut self, arg: (&'a [u8], )) -> &'a str {
+ unsafe { from_utf8_unchecked(arg.0) }
+ }
+}
+
#[stable(feature = "split_whitespace", since = "1.1.0")]
impl<'a> Iterator for SplitWhitespace<'a> {
type Item = &'a str;
+ #[inline]
fn next(&mut self) -> Option<&'a str> {
self.inner.next()
}
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.inner.size_hint()
+ }
}
#[stable(feature = "split_whitespace", since = "1.1.0")]
impl<'a> DoubleEndedIterator for SplitWhitespace<'a> {
+ #[inline]
fn next_back(&mut self) -> Option<&'a str> {
self.inner.next_back()
}
#[stable(feature = "fused", since = "1.26.0")]
impl<'a> FusedIterator for SplitWhitespace<'a> {}
+#[unstable(feature = "split_ascii_whitespace", issue = "48656")]
+impl<'a> Iterator for SplitAsciiWhitespace<'a> {
+ type Item = &'a str;
+
+ #[inline]
+ fn next(&mut self) -> Option<&'a str> {
+ self.inner.next()
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.inner.size_hint()
+ }
+}
+
+#[unstable(feature = "split_ascii_whitespace", issue = "48656")]
+impl<'a> DoubleEndedIterator for SplitAsciiWhitespace<'a> {
+ #[inline]
+ fn next_back(&mut self) -> Option<&'a str> {
+ self.inner.next_back()
+ }
+}
+
+#[unstable(feature = "split_ascii_whitespace", issue = "48656")]
+impl<'a> FusedIterator for SplitAsciiWhitespace<'a> {}
+
/// An iterator of [`u16`] over the string encoded as UTF-16.
///
/// [`u16`]: ../../std/primitive.u16.html
assert_eq!((-9.0 as $fty).max($nan), -9.0);
assert!(($nan as $fty).max($nan).is_nan());
}
+ #[test]
+ fn mod_euc() {
+ let a: $fty = 42.0;
+ assert!($inf.mod_euc(a).is_nan());
+ assert_eq!(a.mod_euc($inf), a);
+ assert!(a.mod_euc($nan).is_nan());
+ assert!($inf.mod_euc($inf).is_nan());
+ assert!($inf.mod_euc($nan).is_nan());
+ assert!($nan.mod_euc($inf).is_nan());
+ }
+ #[test]
+ fn div_euc() {
+ let a: $fty = 42.0;
+ assert_eq!(a.div_euc($inf), 0.0);
+ assert!(a.div_euc($nan).is_nan());
+ assert!($inf.div_euc($inf).is_nan());
+ assert!($inf.div_euc($nan).is_nan());
+ assert!($nan.div_euc($inf).is_nan());
+ }
} }
}
//! user of the `DepNode` API of having to know how to compute the expected
//! fingerprint for a given set of node parameters.
-use mir::interpret::{GlobalId, ConstValue};
+use mir::interpret::GlobalId;
use hir::def_id::{CrateNum, DefId, DefIndex, CRATE_DEF_INDEX};
use hir::map::DefPathHash;
use hir::{HirId, ItemLocalId};
use std::fmt;
use std::hash::Hash;
use syntax_pos::symbol::InternedString;
-use traits::query::{CanonicalProjectionGoal,
- CanonicalTyGoal, CanonicalPredicateGoal};
-use ty::{TyCtxt, Instance, InstanceDef, ParamEnv, ParamEnvAnd, PolyTraitRef, Ty};
+use traits::query::{
+ CanonicalProjectionGoal, CanonicalTyGoal, CanonicalTypeOpEqGoal, CanonicalTypeOpSubtypeGoal,
+ CanonicalPredicateGoal, CanonicalTypeOpProvePredicateGoal, CanonicalTypeOpNormalizeGoal,
+};
+use ty::{TyCtxt, FnSig, Instance, InstanceDef,
+ ParamEnv, ParamEnvAnd, Predicate, PolyFnSig, PolyTraitRef, Ty, self};
use ty::subst::Substs;
// erase!() just makes tokens go away. It's used to specify which macro argument
// queries). Making them anonymous avoids hashing the result, which
// may save a bit of time.
[anon] EraseRegionsTy { ty: Ty<'tcx> },
- [anon] ConstValueToAllocation { val: ConstValue<'tcx>, ty: Ty<'tcx> },
+ [anon] ConstValueToAllocation { val: &'tcx ty::Const<'tcx> },
[input] Freevars(DefId),
[input] MaybeUnusedTraitImport(DefId),
[] NormalizeTyAfterErasingRegions(ParamEnvAnd<'tcx, Ty<'tcx>>),
[] DropckOutlives(CanonicalTyGoal<'tcx>),
[] EvaluateObligation(CanonicalPredicateGoal<'tcx>),
+ [] TypeOpEq(CanonicalTypeOpEqGoal<'tcx>),
+ [] TypeOpSubtype(CanonicalTypeOpSubtypeGoal<'tcx>),
+ [] TypeOpProvePredicate(CanonicalTypeOpProvePredicateGoal<'tcx>),
+ [] TypeOpNormalizeTy(CanonicalTypeOpNormalizeGoal<'tcx, Ty<'tcx>>),
+ [] TypeOpNormalizePredicate(CanonicalTypeOpNormalizeGoal<'tcx, Predicate<'tcx>>),
+ [] TypeOpNormalizePolyFnSig(CanonicalTypeOpNormalizeGoal<'tcx, PolyFnSig<'tcx>>),
+ [] TypeOpNormalizeFnSig(CanonicalTypeOpNormalizeGoal<'tcx, FnSig<'tcx>>),
[] SubstituteNormalizeAndTestPredicates { key: (DefId, &'tcx Substs<'tcx>) },
ItemFn(Name, &'a Generics, FnHeader, &'a Visibility, &'a [Attribute]),
/// fn foo(&self)
- Method(Name, &'a MethodSig, Option<&'a Visibility>, &'a [Attribute]),
+ Method(Ident, &'a MethodSig, Option<&'a Visibility>, &'a [Attribute]),
/// |x, y| {}
Closure(&'a [Attribute]),
}
pub fn walk_label<'v, V: Visitor<'v>>(visitor: &mut V, label: &'v Label) {
- visitor.visit_name(label.span, label.name);
+ visitor.visit_ident(label.ident);
}
pub fn walk_lifetime<'v, V: Visitor<'v>>(visitor: &mut V, lifetime: &'v Lifetime) {
visitor.visit_id(lifetime.id);
match lifetime.name {
- LifetimeName::Param(ParamName::Plain(name)) => {
- visitor.visit_name(lifetime.span, name);
+ LifetimeName::Param(ParamName::Plain(ident)) => {
+ visitor.visit_ident(ident);
}
LifetimeName::Param(ParamName::Fresh(_)) |
LifetimeName::Static |
pub fn walk_path_segment<'v, V: Visitor<'v>>(visitor: &mut V,
path_span: Span,
segment: &'v PathSegment) {
- visitor.visit_name(path_span, segment.name);
+ visitor.visit_ident(segment.ident);
if let Some(ref args) = segment.args {
visitor.visit_generic_args(path_span, args);
}
pub fn walk_assoc_type_binding<'v, V: Visitor<'v>>(visitor: &mut V,
type_binding: &'v TypeBinding) {
visitor.visit_id(type_binding.id);
- visitor.visit_name(type_binding.span, type_binding.name);
+ visitor.visit_ident(type_binding.ident);
visitor.visit_ty(&type_binding.ty);
}
PatKind::Ref(ref subpattern, _) => {
visitor.visit_pat(subpattern)
}
- PatKind::Binding(_, canonical_id, ref pth1, ref optional_subpattern) => {
+ PatKind::Binding(_, canonical_id, ident, ref optional_subpattern) => {
visitor.visit_def_mention(Def::Local(canonical_id));
- visitor.visit_name(pth1.span, pth1.node);
+ visitor.visit_ident(ident);
walk_list!(visitor, visit_pat, optional_subpattern);
}
PatKind::Lit(ref expression) => visitor.visit_expr(expression),
visitor.visit_name(foreign_item.span, foreign_item.name);
match foreign_item.node {
- ForeignItemFn(ref function_declaration, ref names, ref generics) => {
+ ForeignItemFn(ref function_declaration, ref param_names, ref generics) => {
visitor.visit_generics(generics);
visitor.visit_fn_decl(function_declaration);
- for name in names {
- visitor.visit_name(name.span, name.node);
+ for ¶m_name in param_names {
+ visitor.visit_ident(param_name);
}
}
ForeignItemStatic(ref typ, _) => visitor.visit_ty(typ),
visitor.visit_id(param.id);
walk_list!(visitor, visit_attribute, ¶m.attrs);
match param.name {
- ParamName::Plain(name) => visitor.visit_name(param.span, name),
+ ParamName::Plain(ident) => visitor.visit_ident(ident),
ParamName::Fresh(_) => {}
}
match param.kind {
}
pub fn walk_trait_item<'v, V: Visitor<'v>>(visitor: &mut V, trait_item: &'v TraitItem) {
- visitor.visit_name(trait_item.span, trait_item.name);
+ visitor.visit_ident(trait_item.ident);
walk_list!(visitor, visit_attribute, &trait_item.attrs);
visitor.visit_generics(&trait_item.generics);
match trait_item.node {
visitor.visit_ty(ty);
walk_list!(visitor, visit_nested_body, default);
}
- TraitItemKind::Method(ref sig, TraitMethod::Required(ref names)) => {
+ TraitItemKind::Method(ref sig, TraitMethod::Required(ref param_names)) => {
visitor.visit_id(trait_item.id);
visitor.visit_fn_decl(&sig.decl);
- for name in names {
- visitor.visit_name(name.span, name.node);
+ for ¶m_name in param_names {
+ visitor.visit_ident(param_name);
}
}
TraitItemKind::Method(ref sig, TraitMethod::Provided(body_id)) => {
- visitor.visit_fn(FnKind::Method(trait_item.name,
+ visitor.visit_fn(FnKind::Method(trait_item.ident,
sig,
None,
&trait_item.attrs),
pub fn walk_trait_item_ref<'v, V: Visitor<'v>>(visitor: &mut V, trait_item_ref: &'v TraitItemRef) {
// NB: Deliberately force a compilation error if/when new fields are added.
- let TraitItemRef { id, name, ref kind, span, ref defaultness } = *trait_item_ref;
+ let TraitItemRef { id, ident, ref kind, span: _, ref defaultness } = *trait_item_ref;
visitor.visit_nested_trait_item(id);
- visitor.visit_name(span, name);
+ visitor.visit_ident(ident);
visitor.visit_associated_item_kind(kind);
visitor.visit_defaultness(defaultness);
}
let ImplItem {
id: _,
hir_id: _,
- name,
+ ident,
ref vis,
ref defaultness,
ref attrs,
ref generics,
ref node,
- span
+ span: _,
} = *impl_item;
- visitor.visit_name(span, name);
+ visitor.visit_ident(ident);
visitor.visit_vis(vis);
visitor.visit_defaultness(defaultness);
walk_list!(visitor, visit_attribute, attrs);
visitor.visit_nested_body(body);
}
ImplItemKind::Method(ref sig, body_id) => {
- visitor.visit_fn(FnKind::Method(impl_item.name,
+ visitor.visit_fn(FnKind::Method(impl_item.ident,
sig,
Some(&impl_item.vis),
&impl_item.attrs),
pub fn walk_impl_item_ref<'v, V: Visitor<'v>>(visitor: &mut V, impl_item_ref: &'v ImplItemRef) {
// NB: Deliberately force a compilation error if/when new fields are added.
- let ImplItemRef { id, name, ref kind, span, ref vis, ref defaultness } = *impl_item_ref;
+ let ImplItemRef { id, ident, ref kind, span: _, ref vis, ref defaultness } = *impl_item_ref;
visitor.visit_nested_impl_item(id);
- visitor.visit_name(span, name);
+ visitor.visit_ident(ident);
visitor.visit_associated_item_kind(kind);
visitor.visit_vis(vis);
visitor.visit_defaultness(defaultness);
use rustc_data_structures::indexed_vec::IndexVec;
use session::Session;
use util::common::FN_OUTPUT_NAME;
-use util::nodemap::{DefIdMap, FxHashMap, NodeMap};
+use util::nodemap::{DefIdMap, NodeMap};
use std::collections::{BTreeMap, HashSet};
use std::fmt::Debug;
cstore: &'a CrateStore,
resolver: &'a mut Resolver,
- name_map: FxHashMap<Ident, Name>,
/// The items being lowered are collected here.
items: BTreeMap<NodeId, hir::Item>,
// When `is_collectin_in_band_lifetimes` is true, each lifetime is checked
// against this list to see if it is already in-scope, or if a definition
// needs to be created for it.
- in_scope_lifetimes: Vec<Name>,
+ in_scope_lifetimes: Vec<Ident>,
type_def_lifetime_params: DefIdMap<usize>,
sess,
cstore,
resolver,
- name_map: FxHashMap(),
items: BTreeMap::new(),
trait_items: BTreeMap::new(),
impl_items: BTreeMap::new(),
self.sess.diagnostic()
}
- fn str_to_ident(&self, s: &'static str) -> Name {
- Symbol::gensym(s)
+ fn str_to_ident(&self, s: &'static str) -> Ident {
+ Ident::with_empty_ctxt(Symbol::gensym(s))
}
fn allow_internal_unstable(&self, reason: CompilerDesugaringKind, span: Span) -> Span {
format: codemap::CompilerDesugaring(reason),
allow_internal_unstable: true,
allow_internal_unsafe: false,
+ local_inner_macros: false,
edition: codemap::hygiene::default_edition(),
});
span.with_ctxt(SyntaxContext::empty().apply_mark(mark))
// that collisions are ok here and this shouldn't
// really show up for end-user.
let str_name = match hir_name {
- ParamName::Plain(name) => name.as_str(),
- ParamName::Fresh(_) => keywords::UnderscoreLifetime.name().as_str(),
+ ParamName::Plain(ident) => ident.as_interned_str(),
+ ParamName::Fresh(_) => keywords::UnderscoreLifetime.name().as_interned_str(),
};
// Add a definition for the in-band lifetime def
self.resolver.definitions().create_def_with_parent(
parent_id.index,
def_node_id,
- DefPathData::LifetimeParam(str_name.as_interned_str()),
+ DefPathData::LifetimeParam(str_name),
DefIndexAddressSpace::High,
Mark::root(),
span,
/// lifetimes are enabled, then we want to push that lifetime into
/// the vector of names to define later. In that case, it will get
/// added to the appropriate generics.
- fn maybe_collect_in_band_lifetime(&mut self, span: Span, name: Name) {
+ fn maybe_collect_in_band_lifetime(&mut self, ident: Ident) {
if !self.is_collecting_in_band_lifetimes {
return;
}
- if self.in_scope_lifetimes.contains(&name) {
+ if self.in_scope_lifetimes.contains(&ident.modern()) {
return;
}
- let hir_name = ParamName::Plain(name);
+ let hir_name = ParamName::Plain(ident);
- if self.lifetimes_to_define.iter().any(|(_, lt_name)| *lt_name == hir_name) {
+ if self.lifetimes_to_define.iter()
+ .any(|(_, lt_name)| lt_name.modern() == hir_name.modern()) {
return;
}
- self.lifetimes_to_define.push((span, hir_name));
+ self.lifetimes_to_define.push((ident.span, hir_name));
}
/// When we have either an elided or `'_` lifetime in an impl
{
let old_len = self.in_scope_lifetimes.len();
let lt_def_names = params.iter().filter_map(|param| match param.kind {
- GenericParamKind::Lifetime { .. } => Some(param.ident.name),
+ GenericParamKind::Lifetime { .. } => Some(param.ident.modern()),
_ => None,
});
self.in_scope_lifetimes.extend(lt_def_names);
{
let old_len = self.in_scope_lifetimes.len();
let lt_def_names = params.iter().filter_map(|param| match param.kind {
- hir::GenericParamKind::Lifetime { .. } => Some(param.name.name()),
+ hir::GenericParamKind::Lifetime { .. } => Some(param.name.ident().modern()),
_ => None,
});
self.in_scope_lifetimes.extend(lt_def_names);
}
}
- fn lower_ident(&mut self, ident: Ident) -> Name {
- let ident = ident.modern();
- if ident.span.ctxt() == SyntaxContext::empty() {
- return ident.name;
- }
- *self.name_map
- .entry(ident)
- .or_insert_with(|| Symbol::from_ident(ident))
- }
-
fn lower_label(&mut self, label: Option<Label>) -> Option<hir::Label> {
label.map(|label| hir::Label {
- name: label.ident.name,
- span: label.ident.span,
+ ident: label.ident,
})
}
fn lower_ty_binding(&mut self, b: &TypeBinding, itctx: ImplTraitContext) -> hir::TypeBinding {
hir::TypeBinding {
id: self.lower_node_id(b.id).node_id,
- name: self.lower_ident(b.ident),
+ ident: b.ident,
ty: self.lower_ty(&b.ty, itctx),
span: b.span,
}
-> hir::GenericArg {
match arg {
ast::GenericArg::Lifetime(lt) => GenericArg::Lifetime(self.lower_lifetime(<)),
- ast::GenericArg::Type(ty) => GenericArg::Type(self.lower_ty(&ty, itctx)),
+ ast::GenericArg::Type(ty) => GenericArg::Type(self.lower_ty_direct(&ty, itctx)),
}
}
fn lower_ty(&mut self, t: &Ty, itctx: ImplTraitContext) -> P<hir::Ty> {
+ P(self.lower_ty_direct(t, itctx))
+ }
+
+ fn lower_ty_direct(&mut self, t: &Ty, itctx: ImplTraitContext) -> hir::Ty {
let kind = match t.node {
TyKind::Infer => hir::TyInfer,
TyKind::Err => hir::TyErr,
),
TyKind::Never => hir::TyNever,
TyKind::Tup(ref tys) => {
- hir::TyTup(tys.iter().map(|ty| self.lower_ty(ty, itctx)).collect())
+ hir::TyTup(tys.iter().map(|ty| self.lower_ty_direct(ty, itctx)).collect())
}
TyKind::Paren(ref ty) => {
- return self.lower_ty(ty, itctx);
+ return self.lower_ty_direct(ty, itctx);
}
TyKind::Path(ref qself, ref path) => {
let id = self.lower_node_id(t.id);
None,
P(hir::Path {
def: self.expect_full_def(t.id),
- segments: hir_vec![hir::PathSegment::from_name(keywords::SelfType.name())],
+ segments: hir_vec![hir::PathSegment::from_ident(keywords::SelfType.ident())],
span: t.span,
}),
)),
let hir_bounds = self.lower_param_bounds(bounds, itctx);
// Set the name to `impl Bound1 + Bound2`
- let name = Symbol::intern(&pprust::ty_to_string(t));
+ let ident = Ident::from_str(&pprust::ty_to_string(t)).with_span_pos(span);
self.in_band_ty_params.push(hir::GenericParam {
id: def_node_id,
- name: ParamName::Plain(name),
- span,
+ name: ParamName::Plain(ident),
pure_wrt_drop: false,
attrs: hir_vec![],
bounds: hir_bounds,
+ span,
kind: hir::GenericParamKind::Type {
default: None,
synthetic: Some(hir::SyntheticTyParamKind::ImplTrait),
P(hir::Path {
span,
def: Def::TyParam(DefId::local(def_index)),
- segments: hir_vec![hir::PathSegment::from_name(name)],
+ segments: hir_vec![hir::PathSegment::from_ident(ident)],
}),
))
}
};
let LoweredNodeId { node_id, hir_id } = self.lower_node_id(t.id);
- P(hir::Ty {
+ hir::Ty {
id: node_id,
node: kind,
span: t.span,
hir_id,
- })
+ }
}
fn lower_existential_impl_trait(
self.context.resolver.definitions().create_def_with_parent(
self.parent,
def_node_id,
- DefPathData::LifetimeParam(name.name().as_interned_str()),
+ DefPathData::LifetimeParam(name.ident().as_interned_str()),
DefIndexAddressSpace::High,
Mark::root(),
lifetime.span,
let name = match name {
hir::LifetimeName::Underscore => {
- hir::ParamName::Plain(keywords::UnderscoreLifetime.name())
+ hir::ParamName::Plain(keywords::UnderscoreLifetime.ident())
}
hir::LifetimeName::Param(param_name) => param_name,
_ => bug!("expected LifetimeName::Param or ParamName::Plain"),
// e.g. `Vec` in `Vec::new` or `<I as Iterator>::Item` in
// `<I as Iterator>::Item::default`.
let new_id = self.next_id();
- self.ty_path(new_id, p.span, hir::QPath::Resolved(qself, path))
+ P(self.ty_path(new_id, p.span, hir::QPath::Resolved(qself, path)))
};
// Anything after the base path are associated "extensions",
// Wrap the associated extension in another type node.
let new_id = self.next_id();
- ty = self.ty_path(new_id, p.span, qpath);
+ ty = P(self.ty_path(new_id, p.span, qpath));
}
// Should've returned in the for loop above.
&mut self,
def: Def,
p: &Path,
- name: Option<Name>,
+ ident: Option<Ident>,
param_mode: ParamMode,
) -> hir::Path {
hir::Path {
ImplTraitContext::Disallowed,
)
})
- .chain(name.map(|name| hir::PathSegment::from_name(name)))
+ .chain(ident.map(|ident| hir::PathSegment::from_ident(ident)))
.collect(),
span: p.span,
}
}
hir::PathSegment::new(
- self.lower_ident(segment.ident),
+ segment.ident,
generic_args,
infer_types,
)
|this| {
const DISALLOWED: ImplTraitContext = ImplTraitContext::Disallowed;
let &ParenthesisedArgs { ref inputs, ref output, span } = data;
- let inputs = inputs.iter().map(|ty| this.lower_ty(ty, DISALLOWED)).collect();
+ let inputs = inputs.iter().map(|ty| this.lower_ty_direct(ty, DISALLOWED)).collect();
let mk_tup = |this: &mut Self, tys, span| {
let LoweredNodeId { node_id, hir_id } = this.next_id();
- P(hir::Ty { node: hir::TyTup(tys), id: node_id, hir_id, span })
+ hir::Ty { node: hir::TyTup(tys), id: node_id, hir_id, span }
};
(
bindings: hir_vec![
hir::TypeBinding {
id: this.next_id().node_id,
- name: Symbol::intern(FN_OUTPUT_NAME),
+ ident: Ident::from_str(FN_OUTPUT_NAME),
ty: output
.as_ref()
.map(|ty| this.lower_ty(&ty, DISALLOWED))
- .unwrap_or_else(|| mk_tup(this, hir::HirVec::new(), span)),
+ .unwrap_or_else(|| P(mk_tup(this, hir::HirVec::new(), span))),
span: output.as_ref().map_or(span, |ty| ty.span),
}
],
}
}
- fn lower_fn_args_to_names(&mut self, decl: &FnDecl) -> hir::HirVec<Spanned<Name>> {
+ fn lower_fn_args_to_names(&mut self, decl: &FnDecl) -> hir::HirVec<Ident> {
decl.inputs
.iter()
.map(|arg| match arg.pat.node {
- PatKind::Ident(_, ident, None) => respan(ident.span, ident.name),
- _ => respan(arg.pat.span, keywords::Invalid.name()),
+ PatKind::Ident(_, ident, _) => ident,
+ _ => Ident::new(keywords::Invalid.name(), arg.pat.span),
})
.collect()
}
.iter()
.map(|arg| {
if let Some(def_id) = fn_def_id {
- self.lower_ty(&arg.ty, ImplTraitContext::Universal(def_id))
+ self.lower_ty_direct(&arg.ty, ImplTraitContext::Universal(def_id))
} else {
- self.lower_ty(&arg.ty, ImplTraitContext::Disallowed)
+ self.lower_ty_direct(&arg.ty, ImplTraitContext::Disallowed)
}
})
.collect::<HirVec<_>>();
// fn_def_id: DefId of the parent function. Used to create child impl trait definition.
fn lower_async_fn_ret_ty(
&mut self,
- inputs: &[P<hir::Ty>],
+ inputs: &[hir::Ty],
output: &FunctionRetTy,
fn_def_id: DefId,
) -> hir::FunctionRetTy {
let future_params = P(hir::GenericArgs {
args: hir_vec![],
bindings: hir_vec![hir::TypeBinding {
- name: Symbol::intern(FN_OUTPUT_NAME),
+ ident: Ident::from_str(FN_OUTPUT_NAME),
ty: output_ty,
id: this.next_id().node_id,
span,
fn lower_lifetime(&mut self, l: &Lifetime) -> hir::Lifetime {
let span = l.ident.span;
- match self.lower_ident(l.ident) {
- x if x == "'static" => self.new_named_lifetime(l.id, span, hir::LifetimeName::Static),
- x if x == "'_" => match self.anonymous_lifetime_mode {
- AnonymousLifetimeMode::CreateParameter => {
- let fresh_name = self.collect_fresh_in_band_lifetime(span);
- self.new_named_lifetime(l.id, span, hir::LifetimeName::Param(fresh_name))
- }
+ match l.ident {
+ ident if ident.name == keywords::StaticLifetime.name() =>
+ self.new_named_lifetime(l.id, span, hir::LifetimeName::Static),
+ ident if ident.name == keywords::UnderscoreLifetime.name() =>
+ match self.anonymous_lifetime_mode {
+ AnonymousLifetimeMode::CreateParameter => {
+ let fresh_name = self.collect_fresh_in_band_lifetime(span);
+ self.new_named_lifetime(l.id, span, hir::LifetimeName::Param(fresh_name))
+ }
- AnonymousLifetimeMode::PassThrough => {
- self.new_named_lifetime(l.id, span, hir::LifetimeName::Underscore)
- }
- },
- name => {
- self.maybe_collect_in_band_lifetime(span, name);
- let param_name = ParamName::Plain(name);
+ AnonymousLifetimeMode::PassThrough => {
+ self.new_named_lifetime(l.id, span, hir::LifetimeName::Underscore)
+ }
+ },
+ ident => {
+ self.maybe_collect_in_band_lifetime(ident);
+ let param_name = ParamName::Plain(ident);
self.new_named_lifetime(l.id, span, hir::LifetimeName::Param(param_name))
}
}
let lt = self.lower_lifetime(&Lifetime { id: param.id, ident: param.ident });
let param_name = match lt.name {
hir::LifetimeName::Param(param_name) => param_name,
- _ => hir::ParamName::Plain(lt.name.name()),
+ _ => hir::ParamName::Plain(lt.name.ident()),
};
let param = hir::GenericParam {
id: lt.id,
param
}
GenericParamKind::Type { ref default, .. } => {
- let mut name = self.lower_ident(param.ident);
-
// Don't expose `Self` (recovered "keyword used as ident" parse error).
// `rustc::ty` expects `Self` to be only used for a trait's `Self`.
// Instead, use gensym("Self") to create a distinct name that looks the same.
- if name == keywords::SelfType.name() {
- name = Symbol::gensym("Self");
- }
+ let ident = if param.ident.name == keywords::SelfType.name() {
+ param.ident.gensym()
+ } else {
+ param.ident
+ };
let add_bounds = add_bounds.get(¶m.id).map_or(&[][..], |x| &x);
if !add_bounds.is_empty() {
hir::GenericParam {
id: self.lower_node_id(param.id).node_id,
- name: hir::ParamName::Plain(name),
- span: param.ident.span,
+ name: hir::ParamName::Plain(ident),
pure_wrt_drop: attr::contains_name(¶m.attrs, "may_dangle"),
attrs: self.lower_attrs(¶m.attrs),
bounds,
+ span: ident.span,
kind: hir::GenericParamKind::Type {
default: default.as_ref().map(|x| {
self.lower_ty(x, ImplTraitContext::Disallowed)
hir::TraitItem {
id: node_id,
hir_id,
- name: self.lower_ident(i.ident),
+ ident: i.ident,
attrs: self.lower_attrs(&i.attrs),
generics,
node,
};
hir::TraitItemRef {
id: hir::TraitItemId { node_id: i.id },
- name: self.lower_ident(i.ident),
+ ident: i.ident,
span: i.span,
defaultness: self.lower_defaultness(Defaultness::Default, has_default),
kind,
hir::ImplItem {
id: node_id,
hir_id,
- name: self.lower_ident(i.ident),
+ ident: i.ident,
attrs: self.lower_attrs(&i.attrs),
generics,
vis: self.lower_visibility(&i.vis, None),
fn lower_impl_item_ref(&mut self, i: &ImplItem) -> hir::ImplItemRef {
hir::ImplItemRef {
id: hir::ImplItemId { node_id: i.id },
- name: self.lower_ident(i.ident),
+ ident: i.ident,
span: i.span,
vis: self.lower_visibility(&i.vis, Some(i.id)),
defaultness: self.lower_defaultness(i.defaultness, true /* [1] */),
hir::PatKind::Binding(
self.lower_binding_mode(binding_mode),
canonical_id,
- respan(ident.span, ident.name),
+ ident,
sub.as_ref().map(|x| self.lower_pat(x)),
)
}
P(hir::Path {
span: ident.span,
def,
- segments: hir_vec![hir::PathSegment::from_name(ident.name)],
+ segments: hir_vec![hir::PathSegment::from_ident(ident)],
}),
)),
}
let e1 = self.lower_expr(e1);
let e2 = self.lower_expr(e2);
let ty_path = P(self.std_path(span, &["ops", "RangeInclusive"], None, false));
- let ty = self.ty_path(id, span, hir::QPath::Resolved(None, ty_path));
- let new_seg = P(hir::PathSegment::from_name(Symbol::intern("new")));
+ let ty = P(self.ty_path(id, span, hir::QPath::Resolved(None, ty_path)));
+ let new_seg = P(hir::PathSegment::from_ident(Ident::from_str("new")));
let new_path = hir::QPath::TypeRelative(ty, new_seg);
let new = P(self.expr(span, hir::ExprPath(new_path), ThinVec::new()));
hir::ExprCall(new, hir_vec![e1, e2])
self.expr(span, hir::ExprCall(e, args), ThinVec::new())
}
- fn expr_ident(&mut self, span: Span, id: Name, binding: NodeId) -> hir::Expr {
- self.expr_ident_with_attrs(span, id, binding, ThinVec::new())
+ fn expr_ident(&mut self, span: Span, ident: Ident, binding: NodeId) -> hir::Expr {
+ self.expr_ident_with_attrs(span, ident, binding, ThinVec::new())
}
fn expr_ident_with_attrs(
&mut self,
span: Span,
- id: Name,
+ ident: Ident,
binding: NodeId,
attrs: ThinVec<Attribute>,
) -> hir::Expr {
P(hir::Path {
span,
def: Def::Local(binding),
- segments: hir_vec![hir::PathSegment::from_name(id)],
+ segments: hir_vec![hir::PathSegment::from_ident(ident)],
}),
));
&mut self,
sp: Span,
mutbl: bool,
- ident: Name,
+ ident: Ident,
ex: P<hir::Expr>,
) -> (hir::Stmt, NodeId) {
let pat = if mutbl {
self.pat(span, pt)
}
- fn pat_ident(&mut self, span: Span, name: Name) -> P<hir::Pat> {
- self.pat_ident_binding_mode(span, name, hir::BindingAnnotation::Unannotated)
+ fn pat_ident(&mut self, span: Span, ident: Ident) -> P<hir::Pat> {
+ self.pat_ident_binding_mode(span, ident, hir::BindingAnnotation::Unannotated)
}
fn pat_ident_binding_mode(
&mut self,
span: Span,
- name: Name,
+ ident: Ident,
bm: hir::BindingAnnotation,
) -> P<hir::Pat> {
let LoweredNodeId { node_id, hir_id } = self.next_id();
P(hir::Pat {
id: node_id,
hir_id,
- node: hir::PatKind::Binding(bm, node_id, Spanned { span, node: name }, None),
+ node: hir::PatKind::Binding(bm, node_id, ident.with_span_pos(span), None),
span,
})
}
.resolve_str_path(span, self.crate_root, components, params, is_value)
}
- fn ty_path(&mut self, id: LoweredNodeId, span: Span, qpath: hir::QPath) -> P<hir::Ty> {
+ fn ty_path(&mut self, id: LoweredNodeId, span: Span, qpath: hir::QPath) -> hir::Ty {
let mut id = id;
let node = match qpath {
hir::QPath::Resolved(None, path) => {
}
_ => hir::TyPath(qpath),
};
- P(hir::Ty {
+ hir::Ty {
id: id.node_id,
hir_id: id.hir_id,
node,
span,
- })
+ }
}
/// Invoked to create the lifetime argument for a type `&T`
use hir::map::{self, Node};
use hir::{Expr, FnDecl};
use hir::intravisit::FnKind;
-use syntax::ast::{Attribute, Name, NodeId};
+use syntax::ast::{Attribute, Ident, Name, NodeId};
use syntax_pos::Span;
/// An FnLikeNode is a Node that is like a fn, in that it has a decl
let closure = |c: ClosureParts<'a>| {
FnKind::Closure(c.attrs)
};
- let method = |_, name: Name, sig: &'a ast::MethodSig, vis, _, _, attrs| {
- FnKind::Method(name, sig, vis, attrs)
+ let method = |_, ident: Ident, sig: &'a ast::MethodSig, vis, _, _, attrs| {
+ FnKind::Method(ident, sig, vis, attrs)
};
self.handle(item, method, closure)
}
fn handle<A, I, M, C>(self, item_fn: I, method: M, closure: C) -> A where
I: FnOnce(ItemFnParts<'a>) -> A,
M: FnOnce(NodeId,
- Name,
+ Ident,
&'a ast::MethodSig,
Option<&'a ast::Visibility>,
ast::BodyId,
},
map::NodeTraitItem(ti) => match ti.node {
ast::TraitItemKind::Method(ref sig, ast::TraitMethod::Provided(body)) => {
- method(ti.id, ti.name, sig, None, body, ti.span, &ti.attrs)
+ method(ti.id, ti.ident, sig, None, body, ti.span, &ti.attrs)
}
_ => bug!("trait method FnLikeNode that is not fn-like"),
},
map::NodeImplItem(ii) => {
match ii.node {
ast::ImplItemKind::Method(ref sig, body) => {
- method(ii.id, ii.name, sig, Some(&ii.vis), body, ii.span, &ii.attrs)
+ method(ii.id, ii.ident, sig, Some(&ii.vis), body, ii.span, &ii.attrs)
}
_ => {
bug!("impl method FnLikeNode that is not fn-like")
// map the actual nodes, not the duplicate ones in the *Ref.
let TraitItemRef {
id,
- name: _,
+ ident: _,
kind: _,
span: _,
defaultness: _,
// map the actual nodes, not the duplicate ones in the *Ref.
let ImplItemRef {
id,
- name: _,
+ ident: _,
kind: _,
span: _,
vis: _,
// information we encapsulate into, the better
let def_data = match i.node {
ItemKind::Impl(..) => DefPathData::Impl,
- ItemKind::Trait(..) => DefPathData::Trait(i.ident.name.as_interned_str()),
+ ItemKind::Trait(..) => DefPathData::Trait(i.ident.as_interned_str()),
ItemKind::Enum(..) | ItemKind::Struct(..) | ItemKind::Union(..) |
ItemKind::TraitAlias(..) |
ItemKind::ExternCrate(..) | ItemKind::ForeignMod(..) | ItemKind::Ty(..) =>
- DefPathData::TypeNs(i.ident.name.as_interned_str()),
+ DefPathData::TypeNs(i.ident.as_interned_str()),
ItemKind::Mod(..) if i.ident == keywords::Invalid.ident() => {
return visit::walk_item(self, i);
}
|this| visit::walk_item(this, i)
)
}
- ItemKind::Mod(..) => DefPathData::Module(i.ident.name.as_interned_str()),
+ ItemKind::Mod(..) => DefPathData::Module(i.ident.as_interned_str()),
ItemKind::Static(..) | ItemKind::Const(..) | ItemKind::Fn(..) =>
- DefPathData::ValueNs(i.ident.name.as_interned_str()),
- ItemKind::MacroDef(..) => DefPathData::MacroDef(i.ident.name.as_interned_str()),
+ DefPathData::ValueNs(i.ident.as_interned_str()),
+ ItemKind::MacroDef(..) => DefPathData::MacroDef(i.ident.as_interned_str()),
ItemKind::Mac(..) => return self.visit_macro_invoc(i.id),
ItemKind::GlobalAsm(..) => DefPathData::Misc,
ItemKind::Use(..) => {
}
let def = self.create_def(foreign_item.id,
- DefPathData::ValueNs(foreign_item.ident.name.as_interned_str()),
+ DefPathData::ValueNs(foreign_item.ident.as_interned_str()),
REGULAR_SPACE,
foreign_item.span);
fn visit_variant(&mut self, v: &'a Variant, g: &'a Generics, item_id: NodeId) {
let def = self.create_def(v.node.data.id(),
- DefPathData::EnumVariant(v.node.ident
- .name.as_interned_str()),
+ DefPathData::EnumVariant(v.node.ident.as_interned_str()),
REGULAR_SPACE,
v.span);
self.with_parent(def, |this| visit::walk_variant(this, v, g, item_id));
}
fn visit_generic_param(&mut self, param: &'a GenericParam) {
- let name = param.ident.name.as_interned_str();
+ let name = param.ident.as_interned_str();
let def_path_data = match param.kind {
GenericParamKind::Lifetime { .. } => DefPathData::LifetimeParam(name),
GenericParamKind::Type { .. } => DefPathData::TypeParam(name),
fn visit_trait_item(&mut self, ti: &'a TraitItem) {
let def_data = match ti.node {
TraitItemKind::Method(..) | TraitItemKind::Const(..) =>
- DefPathData::ValueNs(ti.ident.name.as_interned_str()),
+ DefPathData::ValueNs(ti.ident.as_interned_str()),
TraitItemKind::Type(..) => {
- DefPathData::AssocTypeInTrait(ti.ident.name.as_interned_str())
+ DefPathData::AssocTypeInTrait(ti.ident.as_interned_str())
},
TraitItemKind::Macro(..) => return self.visit_macro_invoc(ti.id),
};
)
}
ImplItemKind::Method(..) | ImplItemKind::Const(..) =>
- DefPathData::ValueNs(ii.ident.name.as_interned_str()),
- ImplItemKind::Type(..) => DefPathData::AssocTypeInImpl(ii.ident.name.as_interned_str()),
+ DefPathData::ValueNs(ii.ident.as_interned_str()),
+ ImplItemKind::Type(..) => DefPathData::AssocTypeInImpl(ii.ident.as_interned_str()),
ImplItemKind::Macro(..) => return self.visit_macro_invoc(ii.id),
};
NodeItem(&Item { node: ItemTrait(..), .. }) => {
keywords::SelfType.name()
}
- NodeGenericParam(param) => param.name.name(),
+ NodeGenericParam(param) => param.name.ident().name,
_ => bug!("ty_param_name: {} not a type parameter", self.node_to_string(id)),
}
}
match self.get(id) {
NodeItem(i) => i.name,
NodeForeignItem(i) => i.name,
- NodeImplItem(ii) => ii.name,
- NodeTraitItem(ti) => ti.name,
+ NodeImplItem(ii) => ii.ident.name,
+ NodeTraitItem(ti) => ti.ident.name,
NodeVariant(v) => v.node.name,
NodeField(f) => f.ident.name,
- NodeLifetime(lt) => lt.name.name(),
- NodeGenericParam(param) => param.name.name(),
- NodeBinding(&Pat { node: PatKind::Binding(_,_,l,_), .. }) => l.node,
+ NodeLifetime(lt) => lt.name.ident().name,
+ NodeGenericParam(param) => param.name.ident().name,
+ NodeBinding(&Pat { node: PatKind::Binding(_,_,l,_), .. }) => l.name,
NodeStructCtor(_) => self.name(self.get_parent(id)),
_ => bug!("no name for {}", self.node_to_string(id))
}
impl Named for ForeignItem { fn name(&self) -> Name { self.name } }
impl Named for Variant_ { fn name(&self) -> Name { self.name } }
impl Named for StructField { fn name(&self) -> Name { self.ident.name } }
-impl Named for TraitItem { fn name(&self) -> Name { self.name } }
-impl Named for ImplItem { fn name(&self) -> Name { self.name } }
+impl Named for TraitItem { fn name(&self) -> Name { self.ident.name } }
+impl Named for ImplItem { fn name(&self) -> Name { self.ident.name } }
pub fn map_crate<'hir>(sess: &::session::Session,
Some(NodeImplItem(ii)) => {
match ii.node {
ImplItemKind::Const(..) => {
- format!("assoc const {} in {}{}", ii.name, path_str(), id_str)
+ format!("assoc const {} in {}{}", ii.ident, path_str(), id_str)
}
ImplItemKind::Method(..) => {
- format!("method {} in {}{}", ii.name, path_str(), id_str)
+ format!("method {} in {}{}", ii.ident, path_str(), id_str)
}
ImplItemKind::Type(_) => {
- format!("assoc type {} in {}{}", ii.name, path_str(), id_str)
+ format!("assoc type {} in {}{}", ii.ident, path_str(), id_str)
}
}
}
TraitItemKind::Type(..) => "assoc type",
};
- format!("{} {} in {}{}", kind, ti.name, path_str(), id_str)
+ format!("{} {} in {}{}", kind, ti.ident, path_str(), id_str)
}
Some(NodeVariant(ref variant)) => {
format!("variant {} in {}{}",
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)]
pub struct Label {
- pub name: Name,
- pub span: Span,
+ pub ident: Ident,
}
impl fmt::Debug for Label {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- write!(f, "label({:?})", self.name)
+ write!(f, "label({:?})", self.ident)
}
}
#[derive(Debug, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)]
pub enum ParamName {
/// Some user-given name like `T` or `'x`.
- Plain(Name),
+ Plain(Ident),
/// Synthetic name generated when user elided a lifetime in an impl header,
/// e.g. the lifetimes in cases like these:
}
impl ParamName {
- pub fn name(&self) -> Name {
+ pub fn ident(&self) -> Ident {
+ match *self {
+ ParamName::Plain(ident) => ident,
+ ParamName::Fresh(_) => keywords::UnderscoreLifetime.ident(),
+ }
+ }
+
+ pub fn modern(&self) -> ParamName {
match *self {
- ParamName::Plain(name) => name,
- ParamName::Fresh(_) => keywords::UnderscoreLifetime.name(),
+ ParamName::Plain(ident) => ParamName::Plain(ident.modern()),
+ param_name => param_name,
}
}
}
}
impl LifetimeName {
- pub fn name(&self) -> Name {
- use self::LifetimeName::*;
+ pub fn ident(&self) -> Ident {
match *self {
- Implicit => keywords::Invalid.name(),
- Underscore => keywords::UnderscoreLifetime.name(),
- Static => keywords::StaticLifetime.name(),
- Param(param_name) => param_name.name(),
+ LifetimeName::Implicit => keywords::Invalid.ident(),
+ LifetimeName::Underscore => keywords::UnderscoreLifetime.ident(),
+ LifetimeName::Static => keywords::StaticLifetime.ident(),
+ LifetimeName::Param(param_name) => param_name.ident(),
}
}
pub fn is_elided(&self) -> bool {
- use self::LifetimeName::*;
match self {
- Implicit | Underscore => true,
+ LifetimeName::Implicit | LifetimeName::Underscore => true,
// It might seem surprising that `Fresh(_)` counts as
// *not* elided -- but this is because, as far as the code
// in the compiler is concerned -- `Fresh(_)` variants act
// equivalently to "some fresh name". They correspond to
// early-bound regions on an impl, in other words.
- Param(_) | Static => false,
+ LifetimeName::Param(_) | LifetimeName::Static => false,
}
}
fn is_static(&self) -> bool {
self == &LifetimeName::Static
}
+
+ pub fn modern(&self) -> LifetimeName {
+ match *self {
+ LifetimeName::Param(param_name) => LifetimeName::Param(param_name.modern()),
+ lifetime_name => lifetime_name,
+ }
+ }
+}
+
+impl fmt::Display for Lifetime {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ self.name.ident().fmt(f)
+ }
}
impl fmt::Debug for Lifetime {
impl Path {
pub fn is_global(&self) -> bool {
- !self.segments.is_empty() && self.segments[0].name == keywords::CrateRoot.name()
+ !self.segments.is_empty() && self.segments[0].ident.name == keywords::CrateRoot.name()
}
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct PathSegment {
/// The identifier portion of this path segment.
- pub name: Name,
+ pub ident: Ident,
/// Type/lifetime parameters attached to this path. They come in
/// two flavors: `Path<A,B,C>` and `Path(A,B) -> C`. Note that
impl PathSegment {
/// Convert an identifier to the corresponding segment.
- pub fn from_name(name: Name) -> PathSegment {
+ pub fn from_ident(ident: Ident) -> PathSegment {
PathSegment {
- name,
+ ident,
infer_types: true,
args: None,
}
}
- pub fn new(name: Name, args: GenericArgs, infer_types: bool) -> Self {
+ pub fn new(ident: Ident, args: GenericArgs, infer_types: bool) -> Self {
PathSegment {
- name,
+ ident,
infer_types,
args: if args.is_empty() {
None
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum GenericArg {
Lifetime(Lifetime),
- Type(P<Ty>),
+ Type(Ty),
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
self.args.is_empty() && self.bindings.is_empty() && !self.parenthesized
}
- pub fn inputs(&self) -> &[P<Ty>] {
+ pub fn inputs(&self) -> &[Ty] {
if self.parenthesized {
for arg in &self.args {
match arg {
/// The `NodeId` is the canonical ID for the variable being bound,
/// e.g. in `Ok(x) | Err(x)`, both `x` use the same canonical ID,
/// which is the pattern ID of the first `x`.
- Binding(BindingAnnotation, NodeId, Spanned<Name>, Option<P<Pat>>),
+ Binding(BindingAnnotation, NodeId, Ident, Option<P<Pat>>),
/// A struct or struct variant pattern, e.g. `Variant {x, y, ..}`.
/// The `bool` is `true` in the presence of a `..`.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct TraitItem {
pub id: NodeId,
- pub name: Name,
+ pub ident: Ident,
pub hir_id: HirId,
pub attrs: HirVec<Attribute>,
pub generics: Generics,
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum TraitMethod {
/// No default body in the trait, just a signature.
- Required(HirVec<Spanned<Name>>),
+ Required(HirVec<Ident>),
/// Both signature and body are provided in the trait.
Provided(BodyId),
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct ImplItem {
pub id: NodeId,
- pub name: Name,
+ pub ident: Ident,
pub hir_id: HirId,
pub vis: Visibility,
pub defaultness: Defaultness,
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct TypeBinding {
pub id: NodeId,
- pub name: Name,
+ pub ident: Ident,
pub ty: P<Ty>,
pub span: Span,
}
pub abi: Abi,
pub generic_params: HirVec<GenericParam>,
pub decl: P<FnDecl>,
- pub arg_names: HirVec<Spanned<Name>>,
+ pub arg_names: HirVec<Ident>,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
/// The never type (`!`)
TyNever,
/// A tuple (`(A, B, C, D,...)`)
- TyTup(HirVec<P<Ty>>),
+ TyTup(HirVec<Ty>),
/// A path to a type definition (`module::module::...::Type`), or an
/// associated type, e.g. `<Vec<T> as Trait>::Type` or `<T>::Target`.
///
/// Represents the header (not the body) of a function declaration
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct FnDecl {
- pub inputs: HirVec<P<Ty>>,
+ pub inputs: HirVec<Ty>,
pub output: FunctionRetTy,
pub variadic: bool,
/// True if this function has an `self`, `&self` or `&mut self` receiver
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct TraitItemRef {
pub id: TraitItemId,
- pub name: Name,
+ pub ident: Ident,
pub kind: AssociatedItemKind,
pub span: Span,
pub defaultness: Defaultness,
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct ImplItemRef {
pub id: ImplItemId,
- pub name: Name,
+ pub ident: Ident,
pub kind: AssociatedItemKind,
pub span: Span,
pub vis: Visibility,
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum ForeignItem_ {
/// A foreign function
- ForeignItemFn(P<FnDecl>, HirVec<Spanned<Name>>, Generics),
+ ForeignItemFn(P<FnDecl>, HirVec<Ident>, Generics),
/// A foreign static item (`static ext: u8`), with optional mutability
/// (the boolean is true when mutable)
ForeignItemStatic(P<Ty>, bool),
use hir::def_id::DefId;
use hir::{self, HirId, PatKind};
use syntax::ast;
-use syntax::codemap::Spanned;
use syntax_pos::Span;
use std::iter::{Enumerate, ExactSizeIterator};
/// Call `f` on every "binding" in a pattern, e.g., on `a` in
/// `match foo() { Some(a) => (), None => () }`
pub fn each_binding<F>(&self, mut f: F)
- where F: FnMut(hir::BindingAnnotation, HirId, Span, &Spanned<ast::Name>),
+ where F: FnMut(hir::BindingAnnotation, HirId, Span, ast::Ident),
{
self.walk(|p| {
- if let PatKind::Binding(binding_mode, _, ref pth, _) = p.node {
- f(binding_mode, p.hir_id, p.span, pth);
+ if let PatKind::Binding(binding_mode, _, ident, _) = p.node {
+ f(binding_mode, p.hir_id, p.span, ident);
}
true
});
contains_bindings
}
- pub fn simple_name(&self) -> Option<ast::Name> {
+ pub fn simple_ident(&self) -> Option<ast::Ident> {
match self.node {
- PatKind::Binding(hir::BindingAnnotation::Unannotated, _, ref path1, None) |
- PatKind::Binding(hir::BindingAnnotation::Mutable, _, ref path1, None) =>
- Some(path1.node),
- _ => None,
- }
- }
-
- pub fn simple_span(&self) -> Option<Span> {
- match self.node {
- PatKind::Binding(hir::BindingAnnotation::Unannotated, _, ref path1, None) |
- PatKind::Binding(hir::BindingAnnotation::Mutable, _, ref path1, None) =>
- Some(path1.span),
+ PatKind::Binding(hir::BindingAnnotation::Unannotated, _, ident, None) |
+ PatKind::Binding(hir::BindingAnnotation::Mutable, _, ident, None) => Some(ident),
_ => None,
}
}
use rustc_target::spec::abi::Abi;
use syntax::ast;
-use syntax::codemap::{CodeMap, Spanned};
+use syntax::codemap::CodeMap;
use syntax::parse::ParseSess;
use syntax::parse::lexer::comments;
use syntax::print::pp::{self, Breaks};
}
fn print_associated_const(&mut self,
- name: ast::Name,
+ ident: ast::Ident,
ty: &hir::Ty,
default: Option<hir::BodyId>,
vis: &hir::Visibility)
-> io::Result<()> {
self.s.word(&visibility_qualified(vis, ""))?;
self.word_space("const")?;
- self.print_name(name)?;
+ self.print_ident(ident)?;
self.word_space(":")?;
self.print_type(ty)?;
if let Some(expr) = default {
}
fn print_associated_type(&mut self,
- name: ast::Name,
+ ident: ast::Ident,
bounds: Option<&hir::GenericBounds>,
ty: Option<&hir::Ty>)
-> io::Result<()> {
self.word_space("type")?;
- self.print_name(name)?;
+ self.print_ident(ident)?;
if let Some(bounds) = bounds {
self.print_bounds(":", bounds)?;
}
match kind {
hir::UseKind::Single => {
- if path.segments.last().unwrap().name != item.name {
+ if path.segments.last().unwrap().ident.name != item.name {
self.s.space()?;
self.word_space("as")?;
self.print_name(item.name)?;
hir::Visibility::Crate(ast::CrateSugar::PubCrate) => self.word_nbsp("pub(crate)")?,
hir::Visibility::Restricted { ref path, .. } => {
self.s.word("pub(")?;
- if path.segments.len() == 1 && path.segments[0].name == keywords::Super.name() {
+ if path.segments.len() == 1 &&
+ path.segments[0].ident.name == keywords::Super.name() {
// Special case: `super` can print like `pub(super)`.
self.s.word("super")?;
} else {
Ok(())
}
pub fn print_method_sig(&mut self,
- name: ast::Name,
+ ident: ast::Ident,
m: &hir::MethodSig,
generics: &hir::Generics,
vis: &hir::Visibility,
- arg_names: &[Spanned<ast::Name>],
+ arg_names: &[ast::Ident],
body_id: Option<hir::BodyId>)
-> io::Result<()> {
self.print_fn(&m.decl,
m.header,
- Some(name),
+ Some(ident.name),
generics,
vis,
arg_names,
self.print_outer_attributes(&ti.attrs)?;
match ti.node {
hir::TraitItemKind::Const(ref ty, default) => {
- self.print_associated_const(ti.name, &ty, default, &hir::Inherited)?;
+ self.print_associated_const(ti.ident, &ty, default, &hir::Inherited)?;
}
hir::TraitItemKind::Method(ref sig, hir::TraitMethod::Required(ref arg_names)) => {
- self.print_method_sig(ti.name, sig, &ti.generics, &hir::Inherited, arg_names,
+ self.print_method_sig(ti.ident, sig, &ti.generics, &hir::Inherited, arg_names,
None)?;
self.s.word(";")?;
}
hir::TraitItemKind::Method(ref sig, hir::TraitMethod::Provided(body)) => {
self.head("")?;
- self.print_method_sig(ti.name, sig, &ti.generics, &hir::Inherited, &[],
+ self.print_method_sig(ti.ident, sig, &ti.generics, &hir::Inherited, &[],
Some(body))?;
self.nbsp()?;
self.end()?; // need to close a box
self.ann.nested(self, Nested::Body(body))?;
}
hir::TraitItemKind::Type(ref bounds, ref default) => {
- self.print_associated_type(ti.name,
+ self.print_associated_type(ti.ident,
Some(bounds),
default.as_ref().map(|ty| &**ty))?;
}
match ii.node {
hir::ImplItemKind::Const(ref ty, expr) => {
- self.print_associated_const(ii.name, &ty, Some(expr), &ii.vis)?;
+ self.print_associated_const(ii.ident, &ty, Some(expr), &ii.vis)?;
}
hir::ImplItemKind::Method(ref sig, body) => {
self.head("")?;
- self.print_method_sig(ii.name, sig, &ii.generics, &ii.vis, &[], Some(body))?;
+ self.print_method_sig(ii.ident, sig, &ii.generics, &ii.vis, &[], Some(body))?;
self.nbsp()?;
self.end()?; // need to close a box
self.end()?; // need to close a box
self.ann.nested(self, Nested::Body(body))?;
}
hir::ImplItemKind::Type(ref ty) => {
- self.print_associated_type(ii.name, None, Some(ty))?;
+ self.print_associated_type(ii.ident, None, Some(ty))?;
}
}
self.ann.post(self, NodeSubItem(ii.id))
let base_args = &args[1..];
self.print_expr_maybe_paren(&args[0], parser::PREC_POSTFIX)?;
self.s.word(".")?;
- self.print_name(segment.name)?;
+ self.print_ident(segment.ident)?;
segment.with_generic_args(|generic_args| {
if !generic_args.args.is_empty() || !generic_args.bindings.is_empty() {
}
hir::ExprWhile(ref test, ref blk, opt_label) => {
if let Some(label) = opt_label {
- self.print_name(label.name)?;
+ self.print_ident(label.ident)?;
self.word_space(":")?;
}
self.head("while")?;
}
hir::ExprLoop(ref blk, opt_label, _) => {
if let Some(label) = opt_label {
- self.print_name(label.name)?;
+ self.print_ident(label.ident)?;
self.word_space(":")?;
}
self.head("loop")?;
}
hir::ExprBlock(ref blk, opt_label) => {
if let Some(label) = opt_label {
- self.print_name(label.name)?;
+ self.print_ident(label.ident)?;
self.word_space(":")?;
}
// containing cbox, will be closed by print-block at }
self.s.word("break")?;
self.s.space()?;
if let Some(label) = destination.label {
- self.print_name(label.name)?;
+ self.print_ident(label.ident)?;
self.s.space()?;
}
if let Some(ref expr) = *opt_expr {
self.s.word("continue")?;
self.s.space()?;
if let Some(label) = destination.label {
- self.print_name(label.name)?;
+ self.print_ident(label.ident)?;
self.s.space()?
}
}
}
pub fn print_name(&mut self, name: ast::Name) -> io::Result<()> {
- self.print_ident(name.to_ident())
+ self.print_ident(ast::Ident::with_empty_ctxt(name))
}
pub fn print_for_decl(&mut self, loc: &hir::Local, coll: &hir::Expr) -> io::Result<()> {
if i > 0 {
self.s.word("::")?
}
- if segment.name != keywords::CrateRoot.name() &&
- segment.name != keywords::DollarCrate.name() {
- self.print_name(segment.name)?;
+ if segment.ident.name != keywords::CrateRoot.name() &&
+ segment.ident.name != keywords::DollarCrate.name() {
+ self.print_ident(segment.ident)?;
segment.with_generic_args(|generic_args| {
self.print_generic_args(generic_args, segment.infer_types,
colons_before_params)
if i > 0 {
self.s.word("::")?
}
- if segment.name != keywords::CrateRoot.name() &&
- segment.name != keywords::DollarCrate.name() {
- self.print_name(segment.name)?;
+ if segment.ident.name != keywords::CrateRoot.name() &&
+ segment.ident.name != keywords::DollarCrate.name() {
+ self.print_ident(segment.ident)?;
segment.with_generic_args(|generic_args| {
self.print_generic_args(generic_args,
segment.infer_types,
self.s.word(">")?;
self.s.word("::")?;
let item_segment = path.segments.last().unwrap();
- self.print_name(item_segment.name)?;
+ self.print_ident(item_segment.ident)?;
item_segment.with_generic_args(|generic_args| {
self.print_generic_args(generic_args,
item_segment.infer_types,
self.print_type(qself)?;
self.s.word(">")?;
self.s.word("::")?;
- self.print_name(item_segment.name)?;
+ self.print_ident(item_segment.ident)?;
item_segment.with_generic_args(|generic_args| {
self.print_generic_args(generic_args,
item_segment.infer_types,
for binding in generic_args.bindings.iter() {
start_or_comma(self)?;
- self.print_name(binding.name)?;
+ self.print_ident(binding.ident)?;
self.s.space()?;
self.word_space("=")?;
self.print_type(&binding.ty)?;
// is that it doesn't matter
match pat.node {
PatKind::Wild => self.s.word("_")?,
- PatKind::Binding(binding_mode, _, ref path1, ref sub) => {
+ PatKind::Binding(binding_mode, _, ident, ref sub) => {
match binding_mode {
hir::BindingAnnotation::Ref => {
self.word_nbsp("ref")?;
self.word_nbsp("mut")?;
}
}
- self.print_name(path1.node)?;
+ self.print_ident(ident)?;
if let Some(ref p) = *sub {
self.s.word("@")?;
self.print_pat(&p)?;
match arm.body.node {
hir::ExprBlock(ref blk, opt_label) => {
if let Some(label) = opt_label {
- self.print_name(label.name)?;
+ self.print_ident(label.ident)?;
self.word_space(":")?;
}
// the block will close the pattern's ibox
name: Option<ast::Name>,
generics: &hir::Generics,
vis: &hir::Visibility,
- arg_names: &[Spanned<ast::Name>],
+ arg_names: &[ast::Ident],
body_id: Option<hir::BodyId>)
-> io::Result<()> {
self.print_fn_header_info(header, vis)?;
assert!(arg_names.is_empty() || body_id.is_none());
self.commasep(Inconsistent, &decl.inputs, |s, ty| {
s.ibox(indent_unit)?;
- if let Some(name) = arg_names.get(i) {
- s.s.word(&name.node.as_str())?;
+ if let Some(arg_name) = arg_names.get(i) {
+ s.s.word(&arg_name.as_str())?;
s.s.word(":")?;
s.s.space()?;
} else if let Some(body_id) = body_id {
}
pub fn print_generic_param(&mut self, param: &GenericParam) -> io::Result<()> {
- self.print_name(param.name.name())?;
+ self.print_ident(param.name.ident())?;
match param.kind {
GenericParamKind::Lifetime { .. } => {
let mut sep = ":";
}
pub fn print_lifetime(&mut self, lifetime: &hir::Lifetime) -> io::Result<()> {
- self.print_name(lifetime.name.name())
+ self.print_ident(lifetime.name.ident())
}
pub fn print_where_clause(&mut self, where_clause: &hir::WhereClause) -> io::Result<()> {
decl: &hir::FnDecl,
name: Option<ast::Name>,
generic_params: &[hir::GenericParam],
- arg_names: &[Spanned<ast::Name>])
+ arg_names: &[ast::Ident])
-> io::Result<()> {
self.ibox(indent_unit)?;
if !generic_params.is_empty() {
pub fn encode_opaque(&self, encoder: &mut Encoder) -> EncodeResult {
let bytes: [u8; 16] = unsafe { mem::transmute([self.0.to_le(), self.1.to_le()]) };
- encoder.emit_raw_bytes(&bytes)
+ encoder.emit_raw_bytes(&bytes);
+ Ok(())
}
pub fn decode_opaque<'a>(decoder: &mut Decoder<'a>) -> Result<Fingerprint, String> {
impl serialize::UseSpecializedDecodable for Fingerprint { }
-impl<'a> serialize::SpecializedEncoder<Fingerprint> for serialize::opaque::Encoder<'a> {
+impl serialize::SpecializedEncoder<Fingerprint> for serialize::opaque::Encoder {
fn specialized_encode(&mut self, f: &Fingerprint) -> Result<(), Self::Error> {
f.encode_opaque(self)
}
});
impl_stable_hash_for!(struct hir::Label {
- span,
- name
+ ident
});
impl_stable_hash_for!(struct hir::Lifetime {
});
impl_stable_hash_for!(struct hir::PathSegment {
- name,
+ ident -> (ident.name),
infer_types,
args
});
impl_stable_hash_for!(struct hir::GenericParam {
id,
name,
- span,
pure_wrt_drop,
attrs,
bounds,
+ span,
kind
});
impl_stable_hash_for!(struct hir::TypeBinding {
id,
- name,
+ ident -> (ident.name),
ty,
span
});
Return(t)
});
-impl<'a> HashStable<StableHashingContext<'a>> for hir::TraitRef {
- fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a>,
- hasher: &mut StableHasher<W>) {
- let hir::TraitRef {
- ref path,
- // Don't hash the ref_id. It is tracked via the thing it is used to access
- ref_id: _,
- } = *self;
-
- path.hash_stable(hcx, hasher);
- }
-}
-
+impl_stable_hash_for!(struct hir::TraitRef {
+ // Don't hash the ref_id. It is tracked via the thing it is used to access
+ ref_id -> _,
+ path,
+});
impl_stable_hash_for!(struct hir::PolyTraitRef {
bound_generic_params,
body
});
+impl_stable_hash_for!(struct hir::Block {
+ stmts,
+ expr,
+ id -> _,
+ hir_id -> _,
+ rules,
+ span,
+ targeted_by_break,
+ recovered,
+});
-impl<'a> HashStable<StableHashingContext<'a>> for hir::Block {
- fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a>,
- hasher: &mut StableHasher<W>) {
- let hir::Block {
- ref stmts,
- ref expr,
- id: _,
- hir_id: _,
- rules,
- span,
- targeted_by_break,
- recovered,
- } = *self;
-
- stmts.hash_stable(hcx, hasher);
- expr.hash_stable(hcx, hasher);
- rules.hash_stable(hcx, hasher);
- span.hash_stable(hcx, hasher);
- recovered.hash_stable(hcx, hasher);
- targeted_by_break.hash_stable(hcx, hasher);
- }
-}
-
-impl<'a> HashStable<StableHashingContext<'a>> for hir::Pat {
- fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a>,
- hasher: &mut StableHasher<W>) {
- let hir::Pat {
- id: _,
- hir_id: _,
- ref node,
- ref span
- } = *self;
-
-
- node.hash_stable(hcx, hasher);
- span.hash_stable(hcx, hasher);
- }
-}
+impl_stable_hash_for!(struct hir::Pat {
+ id -> _,
+ hir_id -> _,
+ node,
+ span,
+});
impl_stable_hash_for_spanned!(hir::FieldPat);
-impl<'a> HashStable<StableHashingContext<'a>> for hir::FieldPat {
- fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a>,
- hasher: &mut StableHasher<W>) {
- let hir::FieldPat {
- id: _,
- ident,
- ref pat,
- is_shorthand,
- } = *self;
-
- ident.hash_stable(hcx, hasher);
- pat.hash_stable(hcx, hasher);
- is_shorthand.hash_stable(hcx, hasher);
- }
-}
+impl_stable_hash_for!(struct hir::FieldPat {
+ id -> _,
+ ident -> (ident.name),
+ pat,
+ is_shorthand,
+});
impl_stable_hash_for!(enum hir::BindingAnnotation {
Unannotated,
body
});
-impl<'a> HashStable<StableHashingContext<'a>> for hir::Field {
- fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a>,
- hasher: &mut StableHasher<W>) {
- let hir::Field {
- id: _,
- ident,
- ref expr,
- span,
- is_shorthand,
- } = *self;
-
- ident.hash_stable(hcx, hasher);
- expr.hash_stable(hcx, hasher);
- span.hash_stable(hcx, hasher);
- is_shorthand.hash_stable(hcx, hasher);
- }
-}
+impl_stable_hash_for!(struct hir::Field {
+ id -> _,
+ ident,
+ expr,
+ span,
+ is_shorthand,
+});
impl_stable_hash_for_spanned!(ast::Name);
UnresolvedLabel
});
-impl<'a> HashStable<StableHashingContext<'a>> for ast::Ident {
- fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a>,
- hasher: &mut StableHasher<W>) {
- let ast::Ident {
- name,
- span,
- } = *self;
-
- name.hash_stable(hcx, hasher);
- span.hash_stable(hcx, hasher);
- }
-}
+impl_stable_hash_for!(struct ast::Ident {
+ name,
+ span,
+});
impl<'a> HashStable<StableHashingContext<'a>> for hir::TraitItem {
fn hash_stable<W: StableHasherResult>(&self,
let hir::TraitItem {
id: _,
hir_id: _,
- name,
+ ident,
ref attrs,
ref generics,
ref node,
} = *self;
hcx.hash_hir_item_like(|hcx| {
- name.hash_stable(hcx, hasher);
+ ident.name.hash_stable(hcx, hasher);
attrs.hash_stable(hcx, hasher);
generics.hash_stable(hcx, hasher);
node.hash_stable(hcx, hasher);
let hir::ImplItem {
id: _,
hir_id: _,
- name,
+ ident,
ref vis,
defaultness,
ref attrs,
} = *self;
hcx.hash_hir_item_like(|hcx| {
- name.hash_stable(hcx, hasher);
+ ident.name.hash_stable(hcx, hasher);
vis.hash_stable(hcx, hasher);
defaultness.hash_stable(hcx, hasher);
attrs.hash_stable(hcx, hasher);
Negative
});
-impl<'a> HashStable<StableHashingContext<'a>> for hir::Mod {
- fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a>,
- hasher: &mut StableHasher<W>) {
- let hir::Mod {
- inner,
- // We are not hashing the IDs of the items contained in the module.
- // This is harmless and matches the current behavior but it's not
- // actually correct. See issue #40876.
- item_ids: _,
- } = *self;
-
- inner.hash_stable(hcx, hasher);
- }
-}
+impl_stable_hash_for!(struct hir::Mod {
+ inner,
+ // We are not hashing the IDs of the items contained in the module.
+ // This is harmless and matches the current behavior but it's not
+ // actually correct. See issue #40876.
+ item_ids -> _,
+});
impl_stable_hash_for!(struct hir::ForeignMod {
abi,
impl_stable_hash_for!(struct hir::StructField {
span,
- ident,
+ ident -> (ident.name),
vis,
id,
ty,
impl_stable_hash_for!(struct hir::TraitItemRef {
id,
- name,
+ ident -> (ident.name),
kind,
span,
defaultness
impl_stable_hash_for!(struct hir::ImplItemRef {
id,
- name,
+ ident -> (ident.name),
kind,
span,
vis,
defaultness
});
-impl<'a> HashStable<StableHashingContext<'a>>
-for hir::AssociatedItemKind {
+impl<'a> HashStable<StableHashingContext<'a>> for hir::AssociatedItemKind {
fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut StableHashingContext<'a>,
hasher: &mut StableHasher<W>) {
is_indirect
});
-impl<'a> HashStable<StableHashingContext<'a>> for hir::GlobalAsm {
- fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a>,
- hasher: &mut StableHasher<W>) {
- let hir::GlobalAsm {
- asm,
- ctxt: _
- } = *self;
-
- asm.hash_stable(hcx, hasher);
- }
-}
-
-impl<'a> HashStable<StableHashingContext<'a>> for hir::InlineAsm {
- fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a>,
- hasher: &mut StableHasher<W>) {
- let hir::InlineAsm {
- asm,
- asm_str_style,
- ref outputs,
- ref inputs,
- ref clobbers,
- volatile,
- alignstack,
- dialect,
- ctxt: _, // This is used for error reporting
- } = *self;
+impl_stable_hash_for!(struct hir::GlobalAsm {
+ asm,
+ ctxt -> _, // This is used for error reporting
+});
- asm.hash_stable(hcx, hasher);
- asm_str_style.hash_stable(hcx, hasher);
- outputs.hash_stable(hcx, hasher);
- inputs.hash_stable(hcx, hasher);
- clobbers.hash_stable(hcx, hasher);
- volatile.hash_stable(hcx, hasher);
- alignstack.hash_stable(hcx, hasher);
- dialect.hash_stable(hcx, hasher);
- }
-}
+impl_stable_hash_for!(struct hir::InlineAsm {
+ asm,
+ asm_str_style,
+ outputs,
+ inputs,
+ clobbers,
+ volatile,
+ alignstack,
+ dialect,
+ ctxt -> _, // This is used for error reporting
+});
impl_stable_hash_for!(enum hir::def::CtorKind {
Fn,
NotConst
});
-impl<'a> HashStable<StableHashingContext<'a>>
-for hir::def_id::DefIndex {
+impl<'a> HashStable<StableHashingContext<'a>> for hir::def_id::DefIndex {
fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut StableHashingContext<'a>,
span
});
-impl<'a> HashStable<StableHashingContext<'a>>
-for ::middle::lang_items::LangItem {
+impl<'a> HashStable<StableHashingContext<'a>> for ::middle::lang_items::LangItem {
fn hash_stable<W: StableHasherResult>(&self,
_: &mut StableHashingContext<'a>,
hasher: &mut StableHasher<W>) {
missing
});
-impl<'a> HashStable<StableHashingContext<'a>>
-for hir::TraitCandidate {
+impl<'a> HashStable<StableHashingContext<'a>> for hir::TraitCandidate {
fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut StableHashingContext<'a>,
hasher: &mut StableHasher<W>) {
}
}
-impl<'hir> HashStable<StableHashingContext<'hir>> for hir::CodegenFnAttrs
-{
- fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'hir>,
- hasher: &mut StableHasher<W>) {
- let hir::CodegenFnAttrs {
- flags,
- inline,
- export_name,
- ref target_features,
- linkage,
- } = *self;
-
- flags.hash_stable(hcx, hasher);
- inline.hash_stable(hcx, hasher);
- export_name.hash_stable(hcx, hasher);
- target_features.hash_stable(hcx, hasher);
- linkage.hash_stable(hcx, hasher);
- }
-}
+impl_stable_hash_for!(struct hir::CodegenFnAttrs {
+ flags,
+ inline,
+ export_name,
+ target_features,
+ linkage,
+});
impl<'hir> HashStable<StableHashingContext<'hir>> for hir::CodegenFnAttrFlags
{
format,
allow_internal_unstable,
allow_internal_unsafe,
+ local_inner_macros,
edition
});
src_hash.hash_stable(hcx, hasher);
// We only hash the relative position within this filemap
- lines.with_lock(|lines| {
- lines.len().hash_stable(hcx, hasher);
- for &line in lines.iter() {
- stable_byte_pos(line, start_pos).hash_stable(hcx, hasher);
- }
- });
+ lines.len().hash_stable(hcx, hasher);
+ for &line in lines.iter() {
+ stable_byte_pos(line, start_pos).hash_stable(hcx, hasher);
+ }
// We only hash the relative position within this filemap
- multibyte_chars.with_lock(|multibyte_chars| {
- multibyte_chars.len().hash_stable(hcx, hasher);
- for &char_pos in multibyte_chars.iter() {
- stable_multibyte_char(char_pos, start_pos).hash_stable(hcx, hasher);
- }
- });
+ multibyte_chars.len().hash_stable(hcx, hasher);
+ for &char_pos in multibyte_chars.iter() {
+ stable_multibyte_char(char_pos, start_pos).hash_stable(hcx, hasher);
+ }
- non_narrow_chars.with_lock(|non_narrow_chars| {
- non_narrow_chars.len().hash_stable(hcx, hasher);
- for &char_pos in non_narrow_chars.iter() {
- stable_non_narrow_char(char_pos, start_pos).hash_stable(hcx, hasher);
- }
- });
+ non_narrow_chars.len().hash_stable(hcx, hasher);
+ for &char_pos in non_narrow_chars.iter() {
+ stable_non_narrow_char(char_pos, start_pos).hash_stable(hcx, hasher);
+ }
}
}
Relative(distance)
});
-impl<'a, 'gcx> HashStable<StableHashingContext<'a>> for ty::FieldDef {
- fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a>,
- hasher: &mut StableHasher<W>) {
- let ty::FieldDef { did, ident, vis } = *self;
-
- did.hash_stable(hcx, hasher);
- ident.name.hash_stable(hcx, hasher);
- vis.hash_stable(hcx, hasher);
- }
-}
+impl_stable_hash_for!(struct ty::FieldDef {
+ did,
+ ident -> (ident.name),
+ vis,
+});
impl<'a, 'gcx> HashStable<StableHashingContext<'a>>
-for ::middle::const_val::ConstVal<'gcx> {
+for ::mir::interpret::ConstValue<'gcx> {
fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut StableHashingContext<'a>,
hasher: &mut StableHasher<W>) {
- use middle::const_val::ConstVal::*;
+ use mir::interpret::ConstValue::*;
mem::discriminant(self).hash_stable(hcx, hasher);
def_id.hash_stable(hcx, hasher);
substs.hash_stable(hcx, hasher);
}
- Value(ref value) => {
- value.hash_stable(hcx, hasher);
- }
- }
- }
-}
-
-impl<'a, 'gcx> HashStable<StableHashingContext<'a>>
-for ::mir::interpret::ConstValue<'gcx> {
- fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a>,
- hasher: &mut StableHasher<W>) {
- use mir::interpret::ConstValue::*;
-
- mem::discriminant(self).hash_stable(hcx, hasher);
-
- match *self {
Scalar(val) => {
val.hash_stable(hcx, hasher);
}
val
});
-impl_stable_hash_for!(struct ::middle::const_val::ConstEvalErr<'tcx> {
+impl_stable_hash_for!(struct ::mir::interpret::ConstEvalErr<'tcx> {
span,
- kind
+ stacktrace,
+ error
});
-impl_stable_hash_for!(struct ::middle::const_val::FrameInfo {
+impl_stable_hash_for!(struct ::mir::interpret::FrameInfo {
span,
lint_root,
location
});
-impl<'a, 'gcx> HashStable<StableHashingContext<'a>>
-for ::middle::const_val::ErrKind<'gcx> {
- fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a>,
- hasher: &mut StableHasher<W>) {
- use middle::const_val::ErrKind::*;
-
- mem::discriminant(self).hash_stable(hcx, hasher);
-
- match *self {
- TypeckError |
- CouldNotResolve |
- CheckMatchError => {
- // nothing to do
- }
- Miri(ref err, ref trace) => {
- err.hash_stable(hcx, hasher);
- trace.hash_stable(hcx, hasher);
- },
- }
- }
-}
-
impl_stable_hash_for!(struct ty::ClosureSubsts<'tcx> { substs });
impl_stable_hash_for!(struct ty::GeneratorSubsts<'tcx> { substs });
predicates
});
-
-impl<'a, 'gcx> HashStable<StableHashingContext<'a>>
-for ::mir::interpret::EvalError<'gcx> {
- fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a>,
- hasher: &mut StableHasher<W>) {
- self.kind.hash_stable(hcx, hasher)
- }
-}
+impl_stable_hash_for!(struct ::mir::interpret::EvalError<'tcx> { kind });
impl<'a, 'gcx, O: HashStable<StableHashingContext<'a>>> HashStable<StableHashingContext<'a>>
for ::mir::interpret::EvalErrorKind<'gcx, O> {
ReadFromReturnPointer |
UnimplementedTraitSelection |
TypeckError |
+ TooGeneric |
+ CheckMatchError |
DerefFunctionPointer |
ExecuteMemory |
OverflowNeg |
Struct(index)
});
-impl<'a> HashStable<StableHashingContext<'a>> for ty::Generics {
- fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a>,
- hasher: &mut StableHasher<W>) {
- let ty::Generics {
- parent,
- ref parent_count,
- ref params,
-
- // Reverse map to each param's `index` field, from its `def_id`.
- param_def_id_to_index: _, // Don't hash this
- has_self,
- has_late_bound_regions,
- } = *self;
-
- parent.hash_stable(hcx, hasher);
- parent_count.hash_stable(hcx, hasher);
- params.hash_stable(hcx, hasher);
- has_self.hash_stable(hcx, hasher);
- has_late_bound_regions.hash_stable(hcx, hasher);
- }
-}
+impl_stable_hash_for!(struct ty::Generics {
+ parent,
+ parent_count,
+ params,
+ // Reverse map to each param's `index` field, from its `def_id`.
+ param_def_id_to_index -> _, // Don't hash this
+ has_self,
+ has_late_bound_regions,
+});
impl_stable_hash_for!(struct ty::GenericParamDef {
name,
}
}
-impl<'a> HashStable<StableHashingContext<'a>> for ty::TraitDef {
- fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a>,
- hasher: &mut StableHasher<W>) {
- let ty::TraitDef {
- // We already have the def_path_hash below, no need to hash it twice
- def_id: _,
- unsafety,
- paren_sugar,
- has_auto_impl,
- def_path_hash,
- } = *self;
-
- unsafety.hash_stable(hcx, hasher);
- paren_sugar.hash_stable(hcx, hasher);
- has_auto_impl.hash_stable(hcx, hasher);
- def_path_hash.hash_stable(hcx, hasher);
- }
-}
+impl_stable_hash_for!(struct ty::TraitDef {
+ // We already have the def_path_hash below, no need to hash it twice
+ def_id -> _,
+ unsafety,
+ paren_sugar,
+ has_auto_impl,
+ def_path_hash,
+});
impl_stable_hash_for!(struct ty::Destructor {
did
});
-impl<'a> HashStable<StableHashingContext<'a>> for ty::CrateVariancesMap {
- fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a>,
- hasher: &mut StableHasher<W>) {
- let ty::CrateVariancesMap {
- ref variances,
- // This is just an irrelevant helper value.
- empty_variance: _,
- } = *self;
-
- variances.hash_stable(hcx, hasher);
- }
-}
-
-impl<'a, 'gcx> HashStable<StableHashingContext<'a>> for ty::CratePredicatesMap<'gcx> {
- fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a>,
- hasher: &mut StableHasher<W>) {
- let ty::CratePredicatesMap {
- ref predicates,
- // This is just an irrelevant helper value.
- empty_predicate: _,
- } = *self;
+impl_stable_hash_for!(struct ty::CrateVariancesMap {
+ variances,
+ // This is just an irrelevant helper value.
+ empty_variance -> _,
+});
- predicates.hash_stable(hcx, hasher);
- }
-}
+impl_stable_hash_for!(struct ty::CratePredicatesMap<'tcx> {
+ predicates,
+ // This is just an irrelevant helper value.
+ empty_predicate -> _,
+});
impl_stable_hash_for!(struct ty::AssociatedItem {
def_id,
- name,
+ ident -> (ident.name),
kind,
vis,
defaultness,
+++ /dev/null
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! **Canonicalization** is the key to constructing a query in the
-//! middle of type inference. Ordinarily, it is not possible to store
-//! types from type inference in query keys, because they contain
-//! references to inference variables whose lifetimes are too short
-//! and so forth. Canonicalizing a value T1 using `canonicalize_query`
-//! produces two things:
-//!
-//! - a value T2 where each unbound inference variable has been
-//! replaced with a **canonical variable**;
-//! - a map M (of type `CanonicalVarValues`) from those canonical
-//! variables back to the original.
-//!
-//! We can then do queries using T2. These will give back constriants
-//! on the canonical variables which can be translated, using the map
-//! M, into constraints in our source context. This process of
-//! translating the results back is done by the
-//! `instantiate_query_result` method.
-//!
-//! For a more detailed look at what is happening here, check
-//! out the [chapter in the rustc guide][c].
-//!
-//! [c]: https://rust-lang-nursery.github.io/rustc-guide/traits/canonicalization.html
-
-use infer::{InferCtxt, InferOk, InferResult, RegionVariableOrigin, TypeVariableOrigin};
-use rustc_data_structures::indexed_vec::Idx;
-use serialize::UseSpecializedDecodable;
-use std::fmt::Debug;
-use std::ops::Index;
-use std::sync::atomic::Ordering;
-use syntax::codemap::Span;
-use traits::{Obligation, ObligationCause, PredicateObligation};
-use ty::{self, CanonicalVar, Lift, Region, Slice, Ty, TyCtxt, TypeFlags};
-use ty::subst::{Kind, UnpackedKind};
-use ty::fold::{TypeFoldable, TypeFolder};
-
-use rustc_data_structures::indexed_vec::IndexVec;
-use rustc_data_structures::fx::FxHashMap;
-
-/// A "canonicalized" type `V` is one where all free inference
-/// variables have been rewriten to "canonical vars". These are
-/// numbered starting from 0 in order of first appearance.
-#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcDecodable, RustcEncodable)]
-pub struct Canonical<'gcx, V> {
- pub variables: CanonicalVarInfos<'gcx>,
- pub value: V,
-}
-
-pub type CanonicalVarInfos<'gcx> = &'gcx Slice<CanonicalVarInfo>;
-
-impl<'gcx> UseSpecializedDecodable for CanonicalVarInfos<'gcx> { }
-
-/// A set of values corresponding to the canonical variables from some
-/// `Canonical`. You can give these values to
-/// `canonical_value.substitute` to substitute them into the canonical
-/// value at the right places.
-///
-/// When you canonicalize a value `V`, you get back one of these
-/// vectors with the original values that were replaced by canonical
-/// variables.
-///
-/// You can also use `infcx.fresh_inference_vars_for_canonical_vars`
-/// to get back a `CanonicalVarValues` containing fresh inference
-/// variables.
-#[derive(Clone, Debug, PartialEq, Eq, Hash, RustcDecodable, RustcEncodable)]
-pub struct CanonicalVarValues<'tcx> {
- pub var_values: IndexVec<CanonicalVar, Kind<'tcx>>,
-}
-
-/// Information about a canonical variable that is included with the
-/// canonical value. This is sufficient information for code to create
-/// a copy of the canonical value in some other inference context,
-/// with fresh inference variables replacing the canonical values.
-#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcDecodable, RustcEncodable)]
-pub struct CanonicalVarInfo {
- pub kind: CanonicalVarKind,
-}
-
-/// Describes the "kind" of the canonical variable. This is a "kind"
-/// in the type-theory sense of the term -- i.e., a "meta" type system
-/// that analyzes type-like values.
-#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcDecodable, RustcEncodable)]
-pub enum CanonicalVarKind {
- /// Some kind of type inference variable.
- Ty(CanonicalTyVarKind),
-
- /// Region variable `'?R`.
- Region,
-}
-
-/// Rust actually has more than one category of type variables;
-/// notably, the type variables we create for literals (e.g., 22 or
-/// 22.) can only be instantiated with integral/float types (e.g.,
-/// usize or f32). In order to faithfully reproduce a type, we need to
-/// know what set of types a given type variable can be unified with.
-#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcDecodable, RustcEncodable)]
-pub enum CanonicalTyVarKind {
- /// General type variable `?T` that can be unified with arbitrary types.
- General,
-
- /// Integral type variable `?I` (that can only be unified with integral types).
- Int,
-
- /// Floating-point type variable `?F` (that can only be unified with float types).
- Float,
-}
-
-/// After we execute a query with a canonicalized key, we get back a
-/// `Canonical<QueryResult<..>>`. You can use
-/// `instantiate_query_result` to access the data in this result.
-#[derive(Clone, Debug)]
-pub struct QueryResult<'tcx, R> {
- pub var_values: CanonicalVarValues<'tcx>,
- pub region_constraints: Vec<QueryRegionConstraint<'tcx>>,
- pub certainty: Certainty,
- pub value: R,
-}
-
-/// Indicates whether or not we were able to prove the query to be
-/// true.
-#[derive(Copy, Clone, Debug)]
-pub enum Certainty {
- /// The query is known to be true, presuming that you apply the
- /// given `var_values` and the region-constraints are satisfied.
- Proven,
-
- /// The query is not known to be true, but also not known to be
- /// false. The `var_values` represent *either* values that must
- /// hold in order for the query to be true, or helpful tips that
- /// *might* make it true. Currently rustc's trait solver cannot
- /// distinguish the two (e.g., due to our preference for where
- /// clauses over impls).
- ///
- /// After some unifiations and things have been done, it makes
- /// sense to try and prove again -- of course, at that point, the
- /// canonical form will be different, making this a distinct
- /// query.
- Ambiguous,
-}
-
-impl Certainty {
- pub fn is_proven(&self) -> bool {
- match self {
- Certainty::Proven => true,
- Certainty::Ambiguous => false,
- }
- }
-
- pub fn is_ambiguous(&self) -> bool {
- !self.is_proven()
- }
-}
-
-impl<'tcx, R> QueryResult<'tcx, R> {
- pub fn is_proven(&self) -> bool {
- self.certainty.is_proven()
- }
-
- pub fn is_ambiguous(&self) -> bool {
- !self.is_proven()
- }
-}
-
-impl<'tcx, R> Canonical<'tcx, QueryResult<'tcx, R>> {
- pub fn is_proven(&self) -> bool {
- self.value.is_proven()
- }
-
- pub fn is_ambiguous(&self) -> bool {
- !self.is_proven()
- }
-}
-
-pub type QueryRegionConstraint<'tcx> = ty::Binder<ty::OutlivesPredicate<Kind<'tcx>, Region<'tcx>>>;
-
-/// Trait implemented by values that can be canonicalized. It mainly
-/// serves to identify the interning table we will use.
-pub trait Canonicalize<'gcx: 'tcx, 'tcx>: TypeFoldable<'tcx> + Lift<'gcx> {
- type Canonicalized: 'gcx + Debug;
-
- /// After a value has been fully canonicalized and lifted, this
- /// method will allocate it in a global arena.
- fn intern(
- gcx: TyCtxt<'_, 'gcx, 'gcx>,
- value: Canonical<'gcx, Self::Lifted>,
- ) -> Self::Canonicalized;
-}
-
-impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> {
- /// Creates a substitution S for the canonical value with fresh
- /// inference variables and applies it to the canonical value.
- /// Returns both the instantiated result *and* the substitution S.
- ///
- /// This is useful at the start of a query: it basically brings
- /// the canonical value "into scope" within your new infcx. At the
- /// end of processing, the substitution S (once canonicalized)
- /// then represents the values that you computed for each of the
- /// canonical inputs to your query.
- pub fn instantiate_canonical_with_fresh_inference_vars<T>(
- &self,
- span: Span,
- canonical: &Canonical<'tcx, T>,
- ) -> (T, CanonicalVarValues<'tcx>)
- where
- T: TypeFoldable<'tcx>,
- {
- let canonical_inference_vars =
- self.fresh_inference_vars_for_canonical_vars(span, canonical.variables);
- let result = canonical.substitute(self.tcx, &canonical_inference_vars);
- (result, canonical_inference_vars)
- }
-
- /// Given the "infos" about the canonical variables from some
- /// canonical, creates fresh inference variables with the same
- /// characteristics. You can then use `substitute` to instantiate
- /// the canonical variable with these inference variables.
- pub fn fresh_inference_vars_for_canonical_vars(
- &self,
- span: Span,
- variables: &Slice<CanonicalVarInfo>,
- ) -> CanonicalVarValues<'tcx> {
- let var_values: IndexVec<CanonicalVar, Kind<'tcx>> = variables
- .iter()
- .map(|info| self.fresh_inference_var_for_canonical_var(span, *info))
- .collect();
-
- CanonicalVarValues { var_values }
- }
-
- /// Given the "info" about a canonical variable, creates a fresh
- /// inference variable with the same characteristics.
- pub fn fresh_inference_var_for_canonical_var(
- &self,
- span: Span,
- cv_info: CanonicalVarInfo,
- ) -> Kind<'tcx> {
- match cv_info.kind {
- CanonicalVarKind::Ty(ty_kind) => {
- let ty = match ty_kind {
- CanonicalTyVarKind::General => {
- self.next_ty_var(
- TypeVariableOrigin::MiscVariable(span),
- )
- }
-
- CanonicalTyVarKind::Int => self.tcx.mk_int_var(self.next_int_var_id()),
-
- CanonicalTyVarKind::Float => self.tcx.mk_float_var(self.next_float_var_id()),
- };
- ty.into()
- }
-
- CanonicalVarKind::Region => {
- self.next_region_var(RegionVariableOrigin::MiscVariable(span)).into()
- }
- }
- }
-
- /// Given the (canonicalized) result to a canonical query,
- /// instantiates the result so it can be used, plugging in the
- /// values from the canonical query. (Note that the result may
- /// have been ambiguous; you should check the certainty level of
- /// the query before applying this function.)
- ///
- /// To get a good understanding of what is happening here, check
- /// out the [chapter in the rustc guide][c].
- ///
- /// [c]: https://rust-lang-nursery.github.io/rustc-guide/traits/canonicalization.html#processing-the-canonicalized-query-result
- pub fn instantiate_query_result<R>(
- &self,
- cause: &ObligationCause<'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- original_values: &CanonicalVarValues<'tcx>,
- query_result: &Canonical<'tcx, QueryResult<'tcx, R>>,
- ) -> InferResult<'tcx, R>
- where
- R: Debug + TypeFoldable<'tcx>,
- {
- debug!(
- "instantiate_query_result(original_values={:#?}, query_result={:#?})",
- original_values, query_result,
- );
-
- // Every canonical query result includes values for each of
- // the inputs to the query. Therefore, we begin by unifying
- // these values with the original inputs that were
- // canonicalized.
- let result_values = &query_result.value.var_values;
- assert_eq!(original_values.len(), result_values.len());
-
- // Quickly try to find initial values for the canonical
- // variables in the result in terms of the query. We do this
- // by iterating down the values that the query gave to each of
- // the canonical inputs. If we find that one of those values
- // is directly equal to one of the canonical variables in the
- // result, then we can type the corresponding value from the
- // input. See the example above.
- let mut opt_values: IndexVec<CanonicalVar, Option<Kind<'tcx>>> =
- IndexVec::from_elem_n(None, query_result.variables.len());
-
- // In terms of our example above, we are iterating over pairs like:
- // [(?A, Vec<?0>), ('static, '?1), (?B, ?0)]
- for (original_value, result_value) in original_values.iter().zip(result_values) {
- match result_value.unpack() {
- UnpackedKind::Type(result_value) => {
- // e.g., here `result_value` might be `?0` in the example above...
- if let ty::TyInfer(ty::InferTy::CanonicalTy(index)) = result_value.sty {
- // in which case we would set `canonical_vars[0]` to `Some(?U)`.
- opt_values[index] = Some(original_value);
- }
- }
- UnpackedKind::Lifetime(result_value) => {
- // e.g., here `result_value` might be `'?1` in the example above...
- if let &ty::RegionKind::ReCanonical(index) = result_value {
- // in which case we would set `canonical_vars[0]` to `Some('static)`.
- opt_values[index] = Some(original_value);
- }
- }
- }
- }
-
- // Create a result substitution: if we found a value for a
- // given variable in the loop above, use that. Otherwise, use
- // a fresh inference variable.
- let result_subst = &CanonicalVarValues {
- var_values: query_result
- .variables
- .iter()
- .enumerate()
- .map(|(index, info)| match opt_values[CanonicalVar::new(index)] {
- Some(k) => k,
- None => self.fresh_inference_var_for_canonical_var(cause.span, *info),
- })
- .collect(),
- };
-
- // Unify the original values for the canonical variables in
- // the input with the value found in the query
- // post-substitution. Often, but not always, this is a no-op,
- // because we already found the mapping in the first step.
- let substituted_values = |index: CanonicalVar| -> Kind<'tcx> {
- query_result.substitute_projected(self.tcx, result_subst, |v| &v.var_values[index])
- };
- let mut obligations =
- self.unify_canonical_vars(cause, param_env, original_values, substituted_values)?
- .into_obligations();
-
- obligations.extend(self.query_region_constraints_into_obligations(
- cause,
- param_env,
- &query_result.value.region_constraints,
- result_subst,
- ));
-
- let user_result: R =
- query_result.substitute_projected(self.tcx, result_subst, |q_r| &q_r.value);
-
- Ok(InferOk {
- value: user_result,
- obligations,
- })
- }
-
- /// Converts the region constraints resulting from a query into an
- /// iterator of obligations.
- fn query_region_constraints_into_obligations<'a>(
- &'a self,
- cause: &'a ObligationCause<'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- unsubstituted_region_constraints: &'a [QueryRegionConstraint<'tcx>],
- result_subst: &'a CanonicalVarValues<'tcx>,
- ) -> impl Iterator<Item = PredicateObligation<'tcx>> + 'a {
- Box::new(unsubstituted_region_constraints.iter().map(move |constraint| {
- let ty::OutlivesPredicate(k1, r2) = constraint.skip_binder(); // restored below
- let k1 = substitute_value(self.tcx, result_subst, k1);
- let r2 = substitute_value(self.tcx, result_subst, r2);
- match k1.unpack() {
- UnpackedKind::Lifetime(r1) =>
- Obligation::new(
- cause.clone(),
- param_env,
- ty::Predicate::RegionOutlives(
- ty::Binder::dummy(ty::OutlivesPredicate(r1, r2))),
- ),
-
- UnpackedKind::Type(t1) =>
- Obligation::new(
- cause.clone(),
- param_env,
- ty::Predicate::TypeOutlives(
- ty::Binder::dummy(ty::OutlivesPredicate(t1, r2))),
- ),
- }
- })) as Box<dyn Iterator<Item = _>>
- }
-
- /// Given two sets of values for the same set of canonical variables, unify them.
- /// The second set is produced lazilly by supplying indices from the first set.
- fn unify_canonical_vars(
- &self,
- cause: &ObligationCause<'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- variables1: &CanonicalVarValues<'tcx>,
- variables2: impl Fn(CanonicalVar) -> Kind<'tcx>,
- ) -> InferResult<'tcx, ()> {
- self.commit_if_ok(|_| {
- let mut obligations = vec![];
- for (index, value1) in variables1.var_values.iter_enumerated() {
- let value2 = variables2(index);
-
- match (value1.unpack(), value2.unpack()) {
- (UnpackedKind::Type(v1), UnpackedKind::Type(v2)) => {
- obligations
- .extend(self.at(cause, param_env).eq(v1, v2)?.into_obligations());
- }
- (
- UnpackedKind::Lifetime(ty::ReErased),
- UnpackedKind::Lifetime(ty::ReErased),
- ) => {
- // no action needed
- }
- (UnpackedKind::Lifetime(v1), UnpackedKind::Lifetime(v2)) => {
- obligations
- .extend(self.at(cause, param_env).eq(v1, v2)?.into_obligations());
- }
- _ => {
- bug!("kind mismatch, cannot unify {:?} and {:?}", value1, value2,);
- }
- }
- }
- Ok(InferOk {
- value: (),
- obligations,
- })
- })
- }
-
- /// Canonicalizes a query value `V`. When we canonicalize a query,
- /// we not only canonicalize unbound inference variables, but we
- /// *also* replace all free regions whatsoever. So for example a
- /// query like `T: Trait<'static>` would be canonicalized to
- ///
- /// ```text
- /// T: Trait<'?0>
- /// ```
- ///
- /// with a mapping M that maps `'?0` to `'static`.
- ///
- /// To get a good understanding of what is happening here, check
- /// out the [chapter in the rustc guide][c].
- ///
- /// [c]: https://rust-lang-nursery.github.io/rustc-guide/traits/canonicalization.html#canonicalizing-the-query
- pub fn canonicalize_query<V>(&self, value: &V) -> (V::Canonicalized, CanonicalVarValues<'tcx>)
- where
- V: Canonicalize<'gcx, 'tcx>,
- {
- self.tcx.sess.perf_stats.queries_canonicalized.fetch_add(1, Ordering::Relaxed);
-
- Canonicalizer::canonicalize(
- value,
- Some(self),
- self.tcx,
- CanonicalizeAllFreeRegions(true),
- )
- }
-
- /// Canonicalizes a query *response* `V`. When we canonicalize a
- /// query response, we only canonicalize unbound inference
- /// variables, and we leave other free regions alone. So,
- /// continuing with the example from `canonicalize_query`, if
- /// there was an input query `T: Trait<'static>`, it would have
- /// been canonicalized to
- ///
- /// ```text
- /// T: Trait<'?0>
- /// ```
- ///
- /// with a mapping M that maps `'?0` to `'static`. But if we found that there
- /// exists only one possible impl of `Trait`, and it looks like
- ///
- /// impl<T> Trait<'static> for T { .. }
- ///
- /// then we would prepare a query result R that (among other
- /// things) includes a mapping to `'?0 := 'static`. When
- /// canonicalizing this query result R, we would leave this
- /// reference to `'static` alone.
- ///
- /// To get a good understanding of what is happening here, check
- /// out the [chapter in the rustc guide][c].
- ///
- /// [c]: https://rust-lang-nursery.github.io/rustc-guide/traits/canonicalization.html#canonicalizing-the-query-result
- pub fn canonicalize_response<V>(
- &self,
- value: &V,
- ) -> (V::Canonicalized, CanonicalVarValues<'tcx>)
- where
- V: Canonicalize<'gcx, 'tcx>,
- {
- Canonicalizer::canonicalize(
- value,
- Some(self),
- self.tcx,
- CanonicalizeAllFreeRegions(false),
- )
- }
-}
-
-/// If this flag is true, then all free regions will be replaced with
-/// a canonical var. This is used to make queries as generic as
-/// possible. For example, the query `F: Foo<'static>` would be
-/// canonicalized to `F: Foo<'0>`.
-struct CanonicalizeAllFreeRegions(bool);
-
-struct Canonicalizer<'cx, 'gcx: 'tcx, 'tcx: 'cx> {
- infcx: Option<&'cx InferCtxt<'cx, 'gcx, 'tcx>>,
- tcx: TyCtxt<'cx, 'gcx, 'tcx>,
- variables: IndexVec<CanonicalVar, CanonicalVarInfo>,
- indices: FxHashMap<Kind<'tcx>, CanonicalVar>,
- var_values: IndexVec<CanonicalVar, Kind<'tcx>>,
- canonicalize_all_free_regions: CanonicalizeAllFreeRegions,
- needs_canonical_flags: TypeFlags,
-}
-
-impl<'cx, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for Canonicalizer<'cx, 'gcx, 'tcx> {
- fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> {
- self.tcx
- }
-
- fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
- match *r {
- ty::ReLateBound(..) => {
- // leave bound regions alone
- r
- }
-
- ty::ReVar(vid) => {
- let r = self.infcx
- .unwrap()
- .borrow_region_constraints()
- .opportunistic_resolve_var(self.tcx, vid);
- let info = CanonicalVarInfo {
- kind: CanonicalVarKind::Region,
- };
- debug!(
- "canonical: region var found with vid {:?}, \
- opportunistically resolved to {:?}",
- vid, r
- );
- let cvar = self.canonical_var(info, r.into());
- self.tcx().mk_region(ty::ReCanonical(cvar))
- }
-
- ty::ReStatic
- | ty::ReEarlyBound(..)
- | ty::ReFree(_)
- | ty::ReScope(_)
- | ty::ReSkolemized(..)
- | ty::ReEmpty
- | ty::ReErased => {
- if self.canonicalize_all_free_regions.0 {
- let info = CanonicalVarInfo {
- kind: CanonicalVarKind::Region,
- };
- let cvar = self.canonical_var(info, r.into());
- self.tcx().mk_region(ty::ReCanonical(cvar))
- } else {
- r
- }
- }
-
- ty::ReClosureBound(..) | ty::ReCanonical(_) => {
- bug!("canonical region encountered during canonicalization")
- }
- }
- }
-
- fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> {
- match t.sty {
- ty::TyInfer(ty::TyVar(_)) => self.canonicalize_ty_var(CanonicalTyVarKind::General, t),
-
- ty::TyInfer(ty::IntVar(_)) => self.canonicalize_ty_var(CanonicalTyVarKind::Int, t),
-
- ty::TyInfer(ty::FloatVar(_)) => self.canonicalize_ty_var(CanonicalTyVarKind::Float, t),
-
- ty::TyInfer(ty::FreshTy(_))
- | ty::TyInfer(ty::FreshIntTy(_))
- | ty::TyInfer(ty::FreshFloatTy(_)) => {
- bug!("encountered a fresh type during canonicalization")
- }
-
- ty::TyInfer(ty::CanonicalTy(_)) => {
- bug!("encountered a canonical type during canonicalization")
- }
-
- ty::TyClosure(..)
- | ty::TyGenerator(..)
- | ty::TyGeneratorWitness(..)
- | ty::TyBool
- | ty::TyChar
- | ty::TyInt(..)
- | ty::TyUint(..)
- | ty::TyFloat(..)
- | ty::TyAdt(..)
- | ty::TyStr
- | ty::TyError
- | ty::TyArray(..)
- | ty::TySlice(..)
- | ty::TyRawPtr(..)
- | ty::TyRef(..)
- | ty::TyFnDef(..)
- | ty::TyFnPtr(_)
- | ty::TyDynamic(..)
- | ty::TyNever
- | ty::TyTuple(..)
- | ty::TyProjection(..)
- | ty::TyForeign(..)
- | ty::TyParam(..)
- | ty::TyAnon(..) => {
- if t.flags.intersects(self.needs_canonical_flags) {
- t.super_fold_with(self)
- } else {
- t
- }
- }
- }
- }
-}
-
-impl<'cx, 'gcx, 'tcx> Canonicalizer<'cx, 'gcx, 'tcx> {
- /// The main `canonicalize` method, shared impl of
- /// `canonicalize_query` and `canonicalize_response`.
- fn canonicalize<V>(
- value: &V,
- infcx: Option<&'cx InferCtxt<'cx, 'gcx, 'tcx>>,
- tcx: TyCtxt<'cx, 'gcx, 'tcx>,
- canonicalize_all_free_regions: CanonicalizeAllFreeRegions,
- ) -> (V::Canonicalized, CanonicalVarValues<'tcx>)
- where
- V: Canonicalize<'gcx, 'tcx>,
- {
- debug_assert!(
- !value.has_type_flags(TypeFlags::HAS_CANONICAL_VARS),
- "canonicalizing a canonical value: {:?}",
- value,
- );
-
- let needs_canonical_flags = if canonicalize_all_free_regions.0 {
- TypeFlags::HAS_FREE_REGIONS | TypeFlags::KEEP_IN_LOCAL_TCX
- } else {
- TypeFlags::KEEP_IN_LOCAL_TCX
- };
-
- let gcx = tcx.global_tcx();
-
- // Fast path: nothing that needs to be canonicalized.
- if !value.has_type_flags(needs_canonical_flags) {
- let out_value = gcx.lift(value).unwrap();
- let canon_value = V::intern(
- gcx,
- Canonical {
- variables: Slice::empty(),
- value: out_value,
- },
- );
- let values = CanonicalVarValues {
- var_values: IndexVec::default(),
- };
- return (canon_value, values);
- }
-
- let mut canonicalizer = Canonicalizer {
- infcx,
- tcx,
- canonicalize_all_free_regions,
- needs_canonical_flags,
- variables: IndexVec::default(),
- indices: FxHashMap::default(),
- var_values: IndexVec::default(),
- };
- let out_value = value.fold_with(&mut canonicalizer);
-
- // Once we have canonicalized `out_value`, it should not
- // contain anything that ties it to this inference context
- // anymore, so it should live in the global arena.
- let out_value = gcx.lift(&out_value).unwrap_or_else(|| {
- bug!(
- "failed to lift `{:?}`, canonicalized from `{:?}`",
- out_value,
- value
- )
- });
-
- let canonical_variables = tcx.intern_canonical_var_infos(&canonicalizer.variables.raw);
-
- let canonical_value = V::intern(
- gcx,
- Canonical {
- variables: canonical_variables,
- value: out_value,
- },
- );
- let canonical_var_values = CanonicalVarValues {
- var_values: canonicalizer.var_values,
- };
- (canonical_value, canonical_var_values)
- }
-
- /// Creates a canonical variable replacing `kind` from the input,
- /// or returns an existing variable if `kind` has already been
- /// seen. `kind` is expected to be an unbound variable (or
- /// potentially a free region).
- fn canonical_var(&mut self, info: CanonicalVarInfo, kind: Kind<'tcx>) -> CanonicalVar {
- let Canonicalizer {
- indices,
- variables,
- var_values,
- ..
- } = self;
-
- indices
- .entry(kind)
- .or_insert_with(|| {
- let cvar1 = variables.push(info);
- let cvar2 = var_values.push(kind);
- assert_eq!(cvar1, cvar2);
- cvar1
- })
- .clone()
- }
-
- /// Given a type variable `ty_var` of the given kind, first check
- /// if `ty_var` is bound to anything; if so, canonicalize
- /// *that*. Otherwise, create a new canonical variable for
- /// `ty_var`.
- fn canonicalize_ty_var(&mut self, ty_kind: CanonicalTyVarKind, ty_var: Ty<'tcx>) -> Ty<'tcx> {
- let infcx = self.infcx.expect("encountered ty-var without infcx");
- let bound_to = infcx.shallow_resolve(ty_var);
- if bound_to != ty_var {
- self.fold_ty(bound_to)
- } else {
- let info = CanonicalVarInfo {
- kind: CanonicalVarKind::Ty(ty_kind),
- };
- let cvar = self.canonical_var(info, ty_var.into());
- self.tcx().mk_infer(ty::InferTy::CanonicalTy(cvar))
- }
- }
-}
-
-impl<'tcx, V> Canonical<'tcx, V> {
- /// Instantiate the wrapped value, replacing each canonical value
- /// with the value given in `var_values`.
- fn substitute(&self, tcx: TyCtxt<'_, '_, 'tcx>, var_values: &CanonicalVarValues<'tcx>) -> V
- where
- V: TypeFoldable<'tcx>,
- {
- self.substitute_projected(tcx, var_values, |value| value)
- }
-
- /// Invoke `projection_fn` with `self.value` to get a value V that
- /// is expressed in terms of the same canonical variables bound in
- /// `self`. Apply the substitution `var_values` to this value V,
- /// replacing each of the canonical variables.
- fn substitute_projected<T>(
- &self,
- tcx: TyCtxt<'_, '_, 'tcx>,
- var_values: &CanonicalVarValues<'tcx>,
- projection_fn: impl FnOnce(&V) -> &T,
- ) -> T
- where
- T: TypeFoldable<'tcx>,
- {
- assert_eq!(self.variables.len(), var_values.var_values.len());
- let value = projection_fn(&self.value);
- substitute_value(tcx, var_values, value)
- }
-}
-
-/// Substitute the values from `var_values` into `value`. `var_values`
-/// must be values for the set of cnaonical variables that appear in
-/// `value`.
-fn substitute_value<'a, 'tcx, T>(
- tcx: TyCtxt<'_, '_, 'tcx>,
- var_values: &CanonicalVarValues<'tcx>,
- value: &'a T,
-) -> T
-where
- T: TypeFoldable<'tcx>,
-{
- if var_values.var_values.is_empty() {
- debug_assert!(!value.has_type_flags(TypeFlags::HAS_CANONICAL_VARS));
- value.clone()
- } else if !value.has_type_flags(TypeFlags::HAS_CANONICAL_VARS) {
- value.clone()
- } else {
- value.fold_with(&mut CanonicalVarValuesSubst { tcx, var_values })
- }
-}
-
-struct CanonicalVarValuesSubst<'cx, 'gcx: 'tcx, 'tcx: 'cx> {
- tcx: TyCtxt<'cx, 'gcx, 'tcx>,
- var_values: &'cx CanonicalVarValues<'tcx>,
-}
-
-impl<'cx, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for CanonicalVarValuesSubst<'cx, 'gcx, 'tcx> {
- fn tcx(&self) -> TyCtxt<'_, 'gcx, 'tcx> {
- self.tcx
- }
-
- fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> {
- match t.sty {
- ty::TyInfer(ty::InferTy::CanonicalTy(c)) => {
- match self.var_values.var_values[c].unpack() {
- UnpackedKind::Type(ty) => ty,
- r => bug!("{:?} is a type but value is {:?}", c, r),
- }
- }
- _ => {
- if !t.has_type_flags(TypeFlags::HAS_CANONICAL_VARS) {
- t
- } else {
- t.super_fold_with(self)
- }
- }
- }
- }
-
- fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
- match r {
- ty::RegionKind::ReCanonical(c) => match self.var_values.var_values[*c].unpack() {
- UnpackedKind::Lifetime(l) => l,
- r => bug!("{:?} is a region but value is {:?}", c, r),
- },
- _ => r.super_fold_with(self),
- }
- }
-}
-
-CloneTypeFoldableAndLiftImpls! {
- ::infer::canonical::Certainty,
- ::infer::canonical::CanonicalVarInfo,
- ::infer::canonical::CanonicalVarKind,
-}
-
-CloneTypeFoldableImpls! {
- for <'tcx> {
- ::infer::canonical::CanonicalVarInfos<'tcx>,
- }
-}
-
-BraceStructTypeFoldableImpl! {
- impl<'tcx, C> TypeFoldable<'tcx> for Canonical<'tcx, C> {
- variables,
- value,
- } where C: TypeFoldable<'tcx>
-}
-
-BraceStructLiftImpl! {
- impl<'a, 'tcx, T> Lift<'tcx> for Canonical<'a, T> {
- type Lifted = Canonical<'tcx, T::Lifted>;
- variables, value
- } where T: Lift<'tcx>
-}
-
-impl<'tcx> CanonicalVarValues<'tcx> {
- fn iter<'a>(&'a self) -> impl Iterator<Item = Kind<'tcx>> + 'a {
- self.var_values.iter().cloned()
- }
-
- fn len(&self) -> usize {
- self.var_values.len()
- }
-}
-
-impl<'a, 'tcx> IntoIterator for &'a CanonicalVarValues<'tcx> {
- type Item = Kind<'tcx>;
- type IntoIter = ::std::iter::Cloned<::std::slice::Iter<'a, Kind<'tcx>>>;
-
- fn into_iter(self) -> Self::IntoIter {
- self.var_values.iter().cloned()
- }
-}
-
-BraceStructLiftImpl! {
- impl<'a, 'tcx> Lift<'tcx> for CanonicalVarValues<'a> {
- type Lifted = CanonicalVarValues<'tcx>;
- var_values,
- }
-}
-
-BraceStructTypeFoldableImpl! {
- impl<'tcx> TypeFoldable<'tcx> for CanonicalVarValues<'tcx> {
- var_values,
- }
-}
-
-BraceStructTypeFoldableImpl! {
- impl<'tcx, R> TypeFoldable<'tcx> for QueryResult<'tcx, R> {
- var_values, region_constraints, certainty, value
- } where R: TypeFoldable<'tcx>,
-}
-
-BraceStructLiftImpl! {
- impl<'a, 'tcx, R> Lift<'tcx> for QueryResult<'a, R> {
- type Lifted = QueryResult<'tcx, R::Lifted>;
- var_values, region_constraints, certainty, value
- } where R: Lift<'tcx>
-}
-
-impl<'tcx> Index<CanonicalVar> for CanonicalVarValues<'tcx> {
- type Output = Kind<'tcx>;
-
- fn index(&self, value: CanonicalVar) -> &Kind<'tcx> {
- &self.var_values[value]
- }
-}
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! This module contains the "canonicalizer" itself.
+//!
+//! For an overview of what canonicaliation is and how it fits into
+//! rustc, check out the [chapter in the rustc guide][c].
+//!
+//! [c]: https://rust-lang-nursery.github.io/rustc-guide/traits/canonicalization.html
+
+use infer::canonical::{
+ Canonical, CanonicalTyVarKind, CanonicalVarInfo, CanonicalVarKind, CanonicalVarValues,
+ Canonicalized,
+};
+use infer::InferCtxt;
+use std::sync::atomic::Ordering;
+use ty::fold::{TypeFoldable, TypeFolder};
+use ty::subst::Kind;
+use ty::{self, CanonicalVar, Lift, Slice, Ty, TyCtxt, TypeFlags};
+
+use rustc_data_structures::fx::FxHashMap;
+use rustc_data_structures::indexed_vec::IndexVec;
+
+impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> {
+ /// Canonicalizes a query value `V`. When we canonicalize a query,
+ /// we not only canonicalize unbound inference variables, but we
+ /// *also* replace all free regions whatsoever. So for example a
+ /// query like `T: Trait<'static>` would be canonicalized to
+ ///
+ /// ```text
+ /// T: Trait<'?0>
+ /// ```
+ ///
+ /// with a mapping M that maps `'?0` to `'static`.
+ ///
+ /// To get a good understanding of what is happening here, check
+ /// out the [chapter in the rustc guide][c].
+ ///
+ /// [c]: https://rust-lang-nursery.github.io/rustc-guide/traits/canonicalization.html#canonicalizing-the-query
+ pub fn canonicalize_query<V>(
+ &self,
+ value: &V,
+ ) -> (Canonicalized<'gcx, V>, CanonicalVarValues<'tcx>)
+ where
+ V: TypeFoldable<'tcx> + Lift<'gcx>,
+ {
+ self.tcx
+ .sess
+ .perf_stats
+ .queries_canonicalized
+ .fetch_add(1, Ordering::Relaxed);
+
+ Canonicalizer::canonicalize(
+ value,
+ Some(self),
+ self.tcx,
+ CanonicalizeRegionMode {
+ static_region: true,
+ other_free_regions: true,
+ },
+ )
+ }
+
+ /// Canonicalizes a query *response* `V`. When we canonicalize a
+ /// query response, we only canonicalize unbound inference
+ /// variables, and we leave other free regions alone. So,
+ /// continuing with the example from `canonicalize_query`, if
+ /// there was an input query `T: Trait<'static>`, it would have
+ /// been canonicalized to
+ ///
+ /// ```text
+ /// T: Trait<'?0>
+ /// ```
+ ///
+ /// with a mapping M that maps `'?0` to `'static`. But if we found that there
+ /// exists only one possible impl of `Trait`, and it looks like
+ ///
+ /// impl<T> Trait<'static> for T { .. }
+ ///
+ /// then we would prepare a query result R that (among other
+ /// things) includes a mapping to `'?0 := 'static`. When
+ /// canonicalizing this query result R, we would leave this
+ /// reference to `'static` alone.
+ ///
+ /// To get a good understanding of what is happening here, check
+ /// out the [chapter in the rustc guide][c].
+ ///
+ /// [c]: https://rust-lang-nursery.github.io/rustc-guide/traits/canonicalization.html#canonicalizing-the-query-result
+ pub fn canonicalize_response<V>(
+ &self,
+ value: &V,
+ ) -> (Canonicalized<'gcx, V>, CanonicalVarValues<'tcx>)
+ where
+ V: TypeFoldable<'tcx> + Lift<'gcx>,
+ {
+ Canonicalizer::canonicalize(
+ value,
+ Some(self),
+ self.tcx,
+ CanonicalizeRegionMode {
+ static_region: false,
+ other_free_regions: false,
+ },
+ )
+ }
+
+ /// A hacky variant of `canonicalize_query` that does not
+ /// canonicalize `'static`. Unfortunately, the existing leak
+ /// check treaks `'static` differently in some cases (see also
+ /// #33684), so if we are performing an operation that may need to
+ /// prove "leak-check" related things, we leave `'static`
+ /// alone.
+ ///
+ /// FIXME(#48536) -- once we have universes, we can remove this and just use
+ /// `canonicalize_query`.
+ pub fn canonicalize_hr_query_hack<V>(
+ &self,
+ value: &V,
+ ) -> (Canonicalized<'gcx, V>, CanonicalVarValues<'tcx>)
+ where
+ V: TypeFoldable<'tcx> + Lift<'gcx>,
+ {
+ self.tcx
+ .sess
+ .perf_stats
+ .queries_canonicalized
+ .fetch_add(1, Ordering::Relaxed);
+
+ Canonicalizer::canonicalize(
+ value,
+ Some(self),
+ self.tcx,
+ CanonicalizeRegionMode {
+ static_region: false,
+ other_free_regions: true,
+ },
+ )
+ }
+}
+
+/// If this flag is true, then all free regions will be replaced with
+/// a canonical var. This is used to make queries as generic as
+/// possible. For example, the query `F: Foo<'static>` would be
+/// canonicalized to `F: Foo<'0>`.
+struct CanonicalizeRegionMode {
+ static_region: bool,
+ other_free_regions: bool,
+}
+
+impl CanonicalizeRegionMode {
+ fn any(&self) -> bool {
+ self.static_region || self.other_free_regions
+ }
+}
+
+struct Canonicalizer<'cx, 'gcx: 'tcx, 'tcx: 'cx> {
+ infcx: Option<&'cx InferCtxt<'cx, 'gcx, 'tcx>>,
+ tcx: TyCtxt<'cx, 'gcx, 'tcx>,
+ variables: IndexVec<CanonicalVar, CanonicalVarInfo>,
+ indices: FxHashMap<Kind<'tcx>, CanonicalVar>,
+ var_values: IndexVec<CanonicalVar, Kind<'tcx>>,
+ canonicalize_region_mode: CanonicalizeRegionMode,
+ needs_canonical_flags: TypeFlags,
+}
+
+impl<'cx, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for Canonicalizer<'cx, 'gcx, 'tcx> {
+ fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> {
+ self.tcx
+ }
+
+ fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
+ match *r {
+ ty::ReLateBound(..) => {
+ // leave bound regions alone
+ r
+ }
+
+ ty::ReVar(vid) => {
+ let r = self
+ .infcx
+ .unwrap()
+ .borrow_region_constraints()
+ .opportunistic_resolve_var(self.tcx, vid);
+ let info = CanonicalVarInfo {
+ kind: CanonicalVarKind::Region,
+ };
+ debug!(
+ "canonical: region var found with vid {:?}, \
+ opportunistically resolved to {:?}",
+ vid, r
+ );
+ let cvar = self.canonical_var(info, r.into());
+ self.tcx().mk_region(ty::ReCanonical(cvar))
+ }
+
+ ty::ReStatic => {
+ if self.canonicalize_region_mode.static_region {
+ let info = CanonicalVarInfo {
+ kind: CanonicalVarKind::Region,
+ };
+ let cvar = self.canonical_var(info, r.into());
+ self.tcx().mk_region(ty::ReCanonical(cvar))
+ } else {
+ r
+ }
+ }
+
+ ty::ReEarlyBound(..)
+ | ty::ReFree(_)
+ | ty::ReScope(_)
+ | ty::ReSkolemized(..)
+ | ty::ReEmpty
+ | ty::ReErased => {
+ if self.canonicalize_region_mode.other_free_regions {
+ let info = CanonicalVarInfo {
+ kind: CanonicalVarKind::Region,
+ };
+ let cvar = self.canonical_var(info, r.into());
+ self.tcx().mk_region(ty::ReCanonical(cvar))
+ } else {
+ r
+ }
+ }
+
+ ty::ReClosureBound(..) | ty::ReCanonical(_) => {
+ bug!("canonical region encountered during canonicalization")
+ }
+ }
+ }
+
+ fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> {
+ match t.sty {
+ ty::TyInfer(ty::TyVar(_)) => self.canonicalize_ty_var(CanonicalTyVarKind::General, t),
+
+ ty::TyInfer(ty::IntVar(_)) => self.canonicalize_ty_var(CanonicalTyVarKind::Int, t),
+
+ ty::TyInfer(ty::FloatVar(_)) => self.canonicalize_ty_var(CanonicalTyVarKind::Float, t),
+
+ ty::TyInfer(ty::FreshTy(_))
+ | ty::TyInfer(ty::FreshIntTy(_))
+ | ty::TyInfer(ty::FreshFloatTy(_)) => {
+ bug!("encountered a fresh type during canonicalization")
+ }
+
+ ty::TyInfer(ty::CanonicalTy(_)) => {
+ bug!("encountered a canonical type during canonicalization")
+ }
+
+ ty::TyClosure(..)
+ | ty::TyGenerator(..)
+ | ty::TyGeneratorWitness(..)
+ | ty::TyBool
+ | ty::TyChar
+ | ty::TyInt(..)
+ | ty::TyUint(..)
+ | ty::TyFloat(..)
+ | ty::TyAdt(..)
+ | ty::TyStr
+ | ty::TyError
+ | ty::TyArray(..)
+ | ty::TySlice(..)
+ | ty::TyRawPtr(..)
+ | ty::TyRef(..)
+ | ty::TyFnDef(..)
+ | ty::TyFnPtr(_)
+ | ty::TyDynamic(..)
+ | ty::TyNever
+ | ty::TyTuple(..)
+ | ty::TyProjection(..)
+ | ty::TyForeign(..)
+ | ty::TyParam(..)
+ | ty::TyAnon(..) => {
+ if t.flags.intersects(self.needs_canonical_flags) {
+ t.super_fold_with(self)
+ } else {
+ t
+ }
+ }
+ }
+ }
+}
+
+impl<'cx, 'gcx, 'tcx> Canonicalizer<'cx, 'gcx, 'tcx> {
+ /// The main `canonicalize` method, shared impl of
+ /// `canonicalize_query` and `canonicalize_response`.
+ fn canonicalize<V>(
+ value: &V,
+ infcx: Option<&'cx InferCtxt<'cx, 'gcx, 'tcx>>,
+ tcx: TyCtxt<'cx, 'gcx, 'tcx>,
+ canonicalize_region_mode: CanonicalizeRegionMode,
+ ) -> (Canonicalized<'gcx, V>, CanonicalVarValues<'tcx>)
+ where
+ V: TypeFoldable<'tcx> + Lift<'gcx>,
+ {
+ debug_assert!(
+ !value.has_type_flags(TypeFlags::HAS_CANONICAL_VARS),
+ "canonicalizing a canonical value: {:?}",
+ value,
+ );
+
+ let needs_canonical_flags = if canonicalize_region_mode.any() {
+ TypeFlags::HAS_FREE_REGIONS | TypeFlags::KEEP_IN_LOCAL_TCX
+ } else {
+ TypeFlags::KEEP_IN_LOCAL_TCX
+ };
+
+ let gcx = tcx.global_tcx();
+
+ // Fast path: nothing that needs to be canonicalized.
+ if !value.has_type_flags(needs_canonical_flags) {
+ let out_value = gcx.lift(value).unwrap();
+ let canon_value = Canonical {
+ variables: Slice::empty(),
+ value: out_value,
+ };
+ let values = CanonicalVarValues {
+ var_values: IndexVec::default(),
+ };
+ return (canon_value, values);
+ }
+
+ let mut canonicalizer = Canonicalizer {
+ infcx,
+ tcx,
+ canonicalize_region_mode,
+ needs_canonical_flags,
+ variables: IndexVec::default(),
+ indices: FxHashMap::default(),
+ var_values: IndexVec::default(),
+ };
+ let out_value = value.fold_with(&mut canonicalizer);
+
+ // Once we have canonicalized `out_value`, it should not
+ // contain anything that ties it to this inference context
+ // anymore, so it should live in the global arena.
+ let out_value = gcx.lift(&out_value).unwrap_or_else(|| {
+ bug!(
+ "failed to lift `{:?}`, canonicalized from `{:?}`",
+ out_value,
+ value
+ )
+ });
+
+ let canonical_variables = tcx.intern_canonical_var_infos(&canonicalizer.variables.raw);
+
+ let canonical_value = Canonical {
+ variables: canonical_variables,
+ value: out_value,
+ };
+ let canonical_var_values = CanonicalVarValues {
+ var_values: canonicalizer.var_values,
+ };
+ (canonical_value, canonical_var_values)
+ }
+
+ /// Creates a canonical variable replacing `kind` from the input,
+ /// or returns an existing variable if `kind` has already been
+ /// seen. `kind` is expected to be an unbound variable (or
+ /// potentially a free region).
+ fn canonical_var(&mut self, info: CanonicalVarInfo, kind: Kind<'tcx>) -> CanonicalVar {
+ let Canonicalizer {
+ indices,
+ variables,
+ var_values,
+ ..
+ } = self;
+
+ indices
+ .entry(kind)
+ .or_insert_with(|| {
+ let cvar1 = variables.push(info);
+ let cvar2 = var_values.push(kind);
+ assert_eq!(cvar1, cvar2);
+ cvar1
+ })
+ .clone()
+ }
+
+ /// Given a type variable `ty_var` of the given kind, first check
+ /// if `ty_var` is bound to anything; if so, canonicalize
+ /// *that*. Otherwise, create a new canonical variable for
+ /// `ty_var`.
+ fn canonicalize_ty_var(&mut self, ty_kind: CanonicalTyVarKind, ty_var: Ty<'tcx>) -> Ty<'tcx> {
+ let infcx = self.infcx.expect("encountered ty-var without infcx");
+ let bound_to = infcx.shallow_resolve(ty_var);
+ if bound_to != ty_var {
+ self.fold_ty(bound_to)
+ } else {
+ let info = CanonicalVarInfo {
+ kind: CanonicalVarKind::Ty(ty_kind),
+ };
+ let cvar = self.canonical_var(info, ty_var.into());
+ self.tcx().mk_infer(ty::InferTy::CanonicalTy(cvar))
+ }
+ }
+}
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! **Canonicalization** is the key to constructing a query in the
+//! middle of type inference. Ordinarily, it is not possible to store
+//! types from type inference in query keys, because they contain
+//! references to inference variables whose lifetimes are too short
+//! and so forth. Canonicalizing a value T1 using `canonicalize_query`
+//! produces two things:
+//!
+//! - a value T2 where each unbound inference variable has been
+//! replaced with a **canonical variable**;
+//! - a map M (of type `CanonicalVarValues`) from those canonical
+//! variables back to the original.
+//!
+//! We can then do queries using T2. These will give back constriants
+//! on the canonical variables which can be translated, using the map
+//! M, into constraints in our source context. This process of
+//! translating the results back is done by the
+//! `instantiate_query_result` method.
+//!
+//! For a more detailed look at what is happening here, check
+//! out the [chapter in the rustc guide][c].
+//!
+//! [c]: https://rust-lang-nursery.github.io/rustc-guide/traits/canonicalization.html
+
+use infer::{InferCtxt, RegionVariableOrigin, TypeVariableOrigin};
+use rustc_data_structures::indexed_vec::IndexVec;
+use rustc_data_structures::sync::Lrc;
+use serialize::UseSpecializedDecodable;
+use std::ops::Index;
+use syntax::codemap::Span;
+use ty::fold::TypeFoldable;
+use ty::subst::Kind;
+use ty::{self, CanonicalVar, Lift, Region, Slice, TyCtxt};
+
+mod canonicalizer;
+
+pub mod query_result;
+
+mod substitute;
+
+/// A "canonicalized" type `V` is one where all free inference
+/// variables have been rewriten to "canonical vars". These are
+/// numbered starting from 0 in order of first appearance.
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcDecodable, RustcEncodable)]
+pub struct Canonical<'gcx, V> {
+ pub variables: CanonicalVarInfos<'gcx>,
+ pub value: V,
+}
+
+pub type CanonicalVarInfos<'gcx> = &'gcx Slice<CanonicalVarInfo>;
+
+impl<'gcx> UseSpecializedDecodable for CanonicalVarInfos<'gcx> {}
+
+/// A set of values corresponding to the canonical variables from some
+/// `Canonical`. You can give these values to
+/// `canonical_value.substitute` to substitute them into the canonical
+/// value at the right places.
+///
+/// When you canonicalize a value `V`, you get back one of these
+/// vectors with the original values that were replaced by canonical
+/// variables. You will need to supply it later to instantiate the
+/// canonicalized query response.
+#[derive(Clone, Debug, PartialEq, Eq, Hash, RustcDecodable, RustcEncodable)]
+pub struct CanonicalVarValues<'tcx> {
+ pub var_values: IndexVec<CanonicalVar, Kind<'tcx>>,
+}
+
+/// Information about a canonical variable that is included with the
+/// canonical value. This is sufficient information for code to create
+/// a copy of the canonical value in some other inference context,
+/// with fresh inference variables replacing the canonical values.
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcDecodable, RustcEncodable)]
+pub struct CanonicalVarInfo {
+ pub kind: CanonicalVarKind,
+}
+
+/// Describes the "kind" of the canonical variable. This is a "kind"
+/// in the type-theory sense of the term -- i.e., a "meta" type system
+/// that analyzes type-like values.
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcDecodable, RustcEncodable)]
+pub enum CanonicalVarKind {
+ /// Some kind of type inference variable.
+ Ty(CanonicalTyVarKind),
+
+ /// Region variable `'?R`.
+ Region,
+}
+
+/// Rust actually has more than one category of type variables;
+/// notably, the type variables we create for literals (e.g., 22 or
+/// 22.) can only be instantiated with integral/float types (e.g.,
+/// usize or f32). In order to faithfully reproduce a type, we need to
+/// know what set of types a given type variable can be unified with.
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcDecodable, RustcEncodable)]
+pub enum CanonicalTyVarKind {
+ /// General type variable `?T` that can be unified with arbitrary types.
+ General,
+
+ /// Integral type variable `?I` (that can only be unified with integral types).
+ Int,
+
+ /// Floating-point type variable `?F` (that can only be unified with float types).
+ Float,
+}
+
+/// After we execute a query with a canonicalized key, we get back a
+/// `Canonical<QueryResult<..>>`. You can use
+/// `instantiate_query_result` to access the data in this result.
+#[derive(Clone, Debug)]
+pub struct QueryResult<'tcx, R> {
+ pub var_values: CanonicalVarValues<'tcx>,
+ pub region_constraints: Vec<QueryRegionConstraint<'tcx>>,
+ pub certainty: Certainty,
+ pub value: R,
+}
+
+pub type Canonicalized<'gcx, V> = Canonical<'gcx, <V as Lift<'gcx>>::Lifted>;
+
+pub type CanonicalizedQueryResult<'gcx, T> =
+ Lrc<Canonical<'gcx, QueryResult<'gcx, <T as Lift<'gcx>>::Lifted>>>;
+
+/// Indicates whether or not we were able to prove the query to be
+/// true.
+#[derive(Copy, Clone, Debug)]
+pub enum Certainty {
+ /// The query is known to be true, presuming that you apply the
+ /// given `var_values` and the region-constraints are satisfied.
+ Proven,
+
+ /// The query is not known to be true, but also not known to be
+ /// false. The `var_values` represent *either* values that must
+ /// hold in order for the query to be true, or helpful tips that
+ /// *might* make it true. Currently rustc's trait solver cannot
+ /// distinguish the two (e.g., due to our preference for where
+ /// clauses over impls).
+ ///
+ /// After some unifiations and things have been done, it makes
+ /// sense to try and prove again -- of course, at that point, the
+ /// canonical form will be different, making this a distinct
+ /// query.
+ Ambiguous,
+}
+
+impl Certainty {
+ pub fn is_proven(&self) -> bool {
+ match self {
+ Certainty::Proven => true,
+ Certainty::Ambiguous => false,
+ }
+ }
+
+ pub fn is_ambiguous(&self) -> bool {
+ !self.is_proven()
+ }
+}
+
+impl<'tcx, R> QueryResult<'tcx, R> {
+ pub fn is_proven(&self) -> bool {
+ self.certainty.is_proven()
+ }
+
+ pub fn is_ambiguous(&self) -> bool {
+ !self.is_proven()
+ }
+}
+
+impl<'tcx, R> Canonical<'tcx, QueryResult<'tcx, R>> {
+ pub fn is_proven(&self) -> bool {
+ self.value.is_proven()
+ }
+
+ pub fn is_ambiguous(&self) -> bool {
+ !self.is_proven()
+ }
+}
+
+pub type QueryRegionConstraint<'tcx> = ty::Binder<ty::OutlivesPredicate<Kind<'tcx>, Region<'tcx>>>;
+
+impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> {
+ /// Creates a substitution S for the canonical value with fresh
+ /// inference variables and applies it to the canonical value.
+ /// Returns both the instantiated result *and* the substitution S.
+ ///
+ /// This is useful at the start of a query: it basically brings
+ /// the canonical value "into scope" within your new infcx. At the
+ /// end of processing, the substitution S (once canonicalized)
+ /// then represents the values that you computed for each of the
+ /// canonical inputs to your query.
+ pub fn instantiate_canonical_with_fresh_inference_vars<T>(
+ &self,
+ span: Span,
+ canonical: &Canonical<'tcx, T>,
+ ) -> (T, CanonicalVarValues<'tcx>)
+ where
+ T: TypeFoldable<'tcx>,
+ {
+ let canonical_inference_vars =
+ self.fresh_inference_vars_for_canonical_vars(span, canonical.variables);
+ let result = canonical.substitute(self.tcx, &canonical_inference_vars);
+ (result, canonical_inference_vars)
+ }
+
+ /// Given the "infos" about the canonical variables from some
+ /// canonical, creates fresh inference variables with the same
+ /// characteristics. You can then use `substitute` to instantiate
+ /// the canonical variable with these inference variables.
+ fn fresh_inference_vars_for_canonical_vars(
+ &self,
+ span: Span,
+ variables: &Slice<CanonicalVarInfo>,
+ ) -> CanonicalVarValues<'tcx> {
+ let var_values: IndexVec<CanonicalVar, Kind<'tcx>> = variables
+ .iter()
+ .map(|info| self.fresh_inference_var_for_canonical_var(span, *info))
+ .collect();
+
+ CanonicalVarValues { var_values }
+ }
+
+ /// Given the "info" about a canonical variable, creates a fresh
+ /// inference variable with the same characteristics.
+ fn fresh_inference_var_for_canonical_var(
+ &self,
+ span: Span,
+ cv_info: CanonicalVarInfo,
+ ) -> Kind<'tcx> {
+ match cv_info.kind {
+ CanonicalVarKind::Ty(ty_kind) => {
+ let ty = match ty_kind {
+ CanonicalTyVarKind::General => {
+ self.next_ty_var(TypeVariableOrigin::MiscVariable(span))
+ }
+
+ CanonicalTyVarKind::Int => self.tcx.mk_int_var(self.next_int_var_id()),
+
+ CanonicalTyVarKind::Float => self.tcx.mk_float_var(self.next_float_var_id()),
+ };
+ ty.into()
+ }
+
+ CanonicalVarKind::Region => self
+ .next_region_var(RegionVariableOrigin::MiscVariable(span))
+ .into(),
+ }
+ }
+}
+
+CloneTypeFoldableAndLiftImpls! {
+ ::infer::canonical::Certainty,
+ ::infer::canonical::CanonicalVarInfo,
+ ::infer::canonical::CanonicalVarKind,
+}
+
+CloneTypeFoldableImpls! {
+ for <'tcx> {
+ ::infer::canonical::CanonicalVarInfos<'tcx>,
+ }
+}
+
+BraceStructTypeFoldableImpl! {
+ impl<'tcx, C> TypeFoldable<'tcx> for Canonical<'tcx, C> {
+ variables,
+ value,
+ } where C: TypeFoldable<'tcx>
+}
+
+BraceStructLiftImpl! {
+ impl<'a, 'tcx, T> Lift<'tcx> for Canonical<'a, T> {
+ type Lifted = Canonical<'tcx, T::Lifted>;
+ variables, value
+ } where T: Lift<'tcx>
+}
+
+impl<'tcx> CanonicalVarValues<'tcx> {
+ fn iter<'a>(&'a self) -> impl Iterator<Item = Kind<'tcx>> + 'a {
+ self.var_values.iter().cloned()
+ }
+
+ fn len(&self) -> usize {
+ self.var_values.len()
+ }
+}
+
+impl<'a, 'tcx> IntoIterator for &'a CanonicalVarValues<'tcx> {
+ type Item = Kind<'tcx>;
+ type IntoIter = ::std::iter::Cloned<::std::slice::Iter<'a, Kind<'tcx>>>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ self.var_values.iter().cloned()
+ }
+}
+
+BraceStructLiftImpl! {
+ impl<'a, 'tcx> Lift<'tcx> for CanonicalVarValues<'a> {
+ type Lifted = CanonicalVarValues<'tcx>;
+ var_values,
+ }
+}
+
+BraceStructTypeFoldableImpl! {
+ impl<'tcx> TypeFoldable<'tcx> for CanonicalVarValues<'tcx> {
+ var_values,
+ }
+}
+
+BraceStructTypeFoldableImpl! {
+ impl<'tcx, R> TypeFoldable<'tcx> for QueryResult<'tcx, R> {
+ var_values, region_constraints, certainty, value
+ } where R: TypeFoldable<'tcx>,
+}
+
+BraceStructLiftImpl! {
+ impl<'a, 'tcx, R> Lift<'tcx> for QueryResult<'a, R> {
+ type Lifted = QueryResult<'tcx, R::Lifted>;
+ var_values, region_constraints, certainty, value
+ } where R: Lift<'tcx>
+}
+
+impl<'tcx> Index<CanonicalVar> for CanonicalVarValues<'tcx> {
+ type Output = Kind<'tcx>;
+
+ fn index(&self, value: CanonicalVar) -> &Kind<'tcx> {
+ &self.var_values[value]
+ }
+}
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! This module contains the code to instantiate a "query result", and
+//! in particular to extract out the resulting region obligations and
+//! encode them therein.
+//!
+//! For an overview of what canonicaliation is and how it fits into
+//! rustc, check out the [chapter in the rustc guide][c].
+//!
+//! [c]: https://rust-lang-nursery.github.io/rustc-guide/traits/canonicalization.html
+
+use infer::canonical::substitute::substitute_value;
+use infer::canonical::{Canonical, CanonicalVarKind, CanonicalVarValues, CanonicalizedQueryResult,
+ Certainty, QueryRegionConstraint, QueryResult};
+use infer::region_constraints::{Constraint, RegionConstraintData};
+use infer::InferCtxtBuilder;
+use infer::{InferCtxt, InferOk, InferResult, RegionObligation};
+use rustc_data_structures::indexed_vec::Idx;
+use rustc_data_structures::indexed_vec::IndexVec;
+use rustc_data_structures::sync::Lrc;
+use std::fmt::Debug;
+use syntax::ast;
+use syntax_pos::DUMMY_SP;
+use traits::query::{Fallible, NoSolution};
+use traits::{FulfillmentContext, TraitEngine};
+use traits::{Obligation, ObligationCause, PredicateObligation};
+use ty::fold::TypeFoldable;
+use ty::subst::{Kind, UnpackedKind};
+use ty::{self, CanonicalVar, Lift, TyCtxt};
+
+impl<'cx, 'gcx, 'tcx> InferCtxtBuilder<'cx, 'gcx, 'tcx> {
+ /// The "main method" for a canonicalized trait query. Given the
+ /// canonical key `canonical_key`, this method will create a new
+ /// inference context, instantiate the key, and run your operation
+ /// `op`. The operation should yield up a result (of type `R`) as
+ /// well as a set of trait obligations that must be fully
+ /// satisfied. These obligations will be processed and the
+ /// canonical result created.
+ ///
+ /// Returns `NoSolution` in the event of any error.
+ ///
+ /// (It might be mildly nicer to implement this on `TyCtxt`, and
+ /// not `InferCtxtBuilder`, but that is a bit tricky right now.
+ /// In part because we would need a `for<'gcx: 'tcx>` sort of
+ /// bound for the closure and in part because it is convenient to
+ /// have `'tcx` be free on this function so that we can talk about
+ /// `K: TypeFoldable<'tcx>`.)
+ pub fn enter_canonical_trait_query<K, R>(
+ &'tcx mut self,
+ canonical_key: &Canonical<'tcx, K>,
+ operation: impl FnOnce(&InferCtxt<'_, 'gcx, 'tcx>, &mut FulfillmentContext<'tcx>, K)
+ -> Fallible<R>,
+ ) -> Fallible<CanonicalizedQueryResult<'gcx, R>>
+ where
+ K: TypeFoldable<'tcx>,
+ R: Debug + Lift<'gcx> + TypeFoldable<'tcx>,
+ {
+ self.enter(|ref infcx| {
+ let (key, canonical_inference_vars) =
+ infcx.instantiate_canonical_with_fresh_inference_vars(DUMMY_SP, &canonical_key);
+ let fulfill_cx = &mut FulfillmentContext::new();
+ let value = operation(infcx, fulfill_cx, key)?;
+ infcx.make_canonicalized_query_result(canonical_inference_vars, value, fulfill_cx)
+ })
+ }
+}
+
+impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> {
+ /// This method is meant to be invoked as the final step of a canonical query
+ /// implementation. It is given:
+ ///
+ /// - the instantiated variables `inference_vars` created from the query key
+ /// - the result `answer` of the query
+ /// - a fulfillment context `fulfill_cx` that may contain various obligations which
+ /// have yet to be proven.
+ ///
+ /// Given this, the function will process the obligations pending
+ /// in `fulfill_cx`:
+ ///
+ /// - If all the obligations can be proven successfully, it will
+ /// package up any resulting region obligations (extracted from
+ /// `infcx`) along with the fully resolved value `answer` into a
+ /// query result (which is then itself canonicalized).
+ /// - If some obligations can be neither proven nor disproven, then
+ /// the same thing happens, but the resulting query is marked as ambiguous.
+ /// - Finally, if any of the obligations result in a hard error,
+ /// then `Err(NoSolution)` is returned.
+ pub fn make_canonicalized_query_result<T>(
+ &self,
+ inference_vars: CanonicalVarValues<'tcx>,
+ answer: T,
+ fulfill_cx: &mut FulfillmentContext<'tcx>,
+ ) -> Fallible<CanonicalizedQueryResult<'gcx, T>>
+ where
+ T: Debug + Lift<'gcx> + TypeFoldable<'tcx>,
+ {
+ let query_result = self.make_query_result(inference_vars, answer, fulfill_cx)?;
+ let (canonical_result, _) = self.canonicalize_response(&query_result);
+
+ debug!(
+ "make_canonicalized_query_result: canonical_result = {:#?}",
+ canonical_result
+ );
+
+ Ok(Lrc::new(canonical_result))
+ }
+
+ /// Helper for `make_canonicalized_query_result` that does
+ /// everything up until the final canonicalization.
+ fn make_query_result<T>(
+ &self,
+ inference_vars: CanonicalVarValues<'tcx>,
+ answer: T,
+ fulfill_cx: &mut FulfillmentContext<'tcx>,
+ ) -> Result<QueryResult<'tcx, T>, NoSolution>
+ where
+ T: Debug + TypeFoldable<'tcx> + Lift<'gcx>,
+ {
+ let tcx = self.tcx;
+
+ debug!(
+ "make_query_result(\
+ inference_vars={:?}, \
+ answer={:?})",
+ inference_vars, answer,
+ );
+
+ // Select everything, returning errors.
+ let true_errors = match fulfill_cx.select_where_possible(self) {
+ Ok(()) => vec![],
+ Err(errors) => errors,
+ };
+ debug!("true_errors = {:#?}", true_errors);
+
+ if !true_errors.is_empty() {
+ // FIXME -- we don't indicate *why* we failed to solve
+ debug!("make_query_result: true_errors={:#?}", true_errors);
+ return Err(NoSolution);
+ }
+
+ // Anything left unselected *now* must be an ambiguity.
+ let ambig_errors = match fulfill_cx.select_all_or_error(self) {
+ Ok(()) => vec![],
+ Err(errors) => errors,
+ };
+ debug!("ambig_errors = {:#?}", ambig_errors);
+
+ let region_obligations = self.take_registered_region_obligations();
+ let region_constraints = self.with_region_constraints(|region_constraints| {
+ make_query_outlives(tcx, region_obligations, region_constraints)
+ });
+
+ let certainty = if ambig_errors.is_empty() {
+ Certainty::Proven
+ } else {
+ Certainty::Ambiguous
+ };
+
+ Ok(QueryResult {
+ var_values: inference_vars,
+ region_constraints,
+ certainty,
+ value: answer,
+ })
+ }
+
+ /// Given the (canonicalized) result to a canonical query,
+ /// instantiates the result so it can be used, plugging in the
+ /// values from the canonical query. (Note that the result may
+ /// have been ambiguous; you should check the certainty level of
+ /// the query before applying this function.)
+ ///
+ /// To get a good understanding of what is happening here, check
+ /// out the [chapter in the rustc guide][c].
+ ///
+ /// [c]: https://rust-lang-nursery.github.io/rustc-guide/traits/canonicalization.html#processing-the-canonicalized-query-result
+ pub fn instantiate_query_result_and_region_obligations<R>(
+ &self,
+ cause: &ObligationCause<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ original_values: &CanonicalVarValues<'tcx>,
+ query_result: &Canonical<'tcx, QueryResult<'tcx, R>>,
+ ) -> InferResult<'tcx, R>
+ where
+ R: Debug + TypeFoldable<'tcx>,
+ {
+ let InferOk {
+ value: result_subst,
+ mut obligations,
+ } = self.query_result_substitution(cause, param_env, original_values, query_result)?;
+
+ obligations.extend(self.query_region_constraints_into_obligations(
+ cause,
+ param_env,
+ &query_result.value.region_constraints,
+ &result_subst,
+ ));
+
+ let user_result: R =
+ query_result.substitute_projected(self.tcx, &result_subst, |q_r| &q_r.value);
+
+ Ok(InferOk {
+ value: user_result,
+ obligations,
+ })
+ }
+
+ /// An alternative to
+ /// `instantiate_query_result_and_region_obligations` that is more
+ /// efficient for NLL. NLL is a bit more advanced in the
+ /// "transition to chalk" than the rest of the compiler. During
+ /// the NLL type check, all of the "processing" of types and
+ /// things happens in queries -- the NLL checker itself is only
+ /// interested in the region obligations (`'a: 'b` or `T: 'b`)
+ /// that come out of these queries, which it wants to convert into
+ /// MIR-based constraints and solve. Therefore, it is most
+ /// convenient for the NLL Type Checker to **directly consume**
+ /// the `QueryRegionConstraint` values that arise from doing a
+ /// query. This is contrast to other parts of the compiler, which
+ /// would prefer for those `QueryRegionConstraint` to be converted
+ /// into the older infcx-style constraints (e.g., calls to
+ /// `sub_regions` or `register_region_obligation`).
+ ///
+ /// Therefore, `instantiate_nll_query_result_and_region_obligations` performs the same
+ /// basic operations as `instantiate_query_result_and_region_obligations` but
+ /// it returns its result differently:
+ ///
+ /// - It creates a substitution `S` that maps from the original
+ /// query variables to the values computed in the query
+ /// result. If any errors arise, they are propagated back as an
+ /// `Err` result.
+ /// - In the case of a successful substitution, we will append
+ /// `QueryRegionConstraint` values onto the
+ /// `output_query_region_constraints` vector for the solver to
+ /// use (if an error arises, some values may also be pushed, but
+ /// they should be ignored).
+ /// - It **can happen** (though it rarely does currently) that
+ /// equating types and things will give rise to subobligations
+ /// that must be processed. In this case, those subobligations
+ /// are propagated back in the return value.
+ /// - Finally, the query result (of type `R`) is propagated back,
+ /// after applying the substitution `S`.
+ pub fn instantiate_nll_query_result_and_region_obligations<R>(
+ &self,
+ cause: &ObligationCause<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ original_values: &CanonicalVarValues<'tcx>,
+ query_result: &Canonical<'tcx, QueryResult<'tcx, R>>,
+ output_query_region_constraints: &mut Vec<QueryRegionConstraint<'tcx>>,
+ ) -> InferResult<'tcx, R>
+ where
+ R: Debug + TypeFoldable<'tcx>,
+ {
+ // In an NLL query, there should be no type variables in the
+ // query, only region variables.
+ debug_assert!(query_result.variables.iter().all(|v| match v.kind {
+ CanonicalVarKind::Ty(_) => false,
+ CanonicalVarKind::Region => true,
+ }));
+
+ let result_subst =
+ self.query_result_substitution_guess(cause, original_values, query_result);
+
+ // Compute `QueryRegionConstraint` values that unify each of
+ // the original values `v_o` that was canonicalized into a
+ // variable...
+ let mut obligations = vec![];
+
+ for (index, original_value) in original_values.var_values.iter_enumerated() {
+ // ...with the value `v_r` of that variable from the query.
+ let result_value = query_result
+ .substitute_projected(self.tcx, &result_subst, |v| &v.var_values[index]);
+ match (original_value.unpack(), result_value.unpack()) {
+ (UnpackedKind::Lifetime(ty::ReErased), UnpackedKind::Lifetime(ty::ReErased)) => {
+ // no action needed
+ }
+
+ (UnpackedKind::Lifetime(v_o), UnpackedKind::Lifetime(v_r)) => {
+ // To make `v_o = v_r`, we emit `v_o: v_r` and `v_r: v_o`.
+ if v_o != v_r {
+ output_query_region_constraints
+ .push(ty::Binder::dummy(ty::OutlivesPredicate(v_o.into(), v_r)));
+ output_query_region_constraints
+ .push(ty::Binder::dummy(ty::OutlivesPredicate(v_r.into(), v_o)));
+ }
+ }
+
+ (UnpackedKind::Type(v1), UnpackedKind::Type(v2)) => {
+ let ok = self.at(cause, param_env).eq(v1, v2)?;
+ obligations.extend(ok.into_obligations());
+ }
+
+ _ => {
+ bug!(
+ "kind mismatch, cannot unify {:?} and {:?}",
+ original_value,
+ result_value
+ );
+ }
+ }
+ }
+
+ // ...also include the other query region constraints from the query.
+ output_query_region_constraints.reserve(query_result.value.region_constraints.len());
+ for r_c in query_result.value.region_constraints.iter() {
+ output_query_region_constraints.push(r_c.map_bound(|ty::OutlivesPredicate(k1, r2)| {
+ let k1 = substitute_value(self.tcx, &result_subst, &k1);
+ let r2 = substitute_value(self.tcx, &result_subst, &r2);
+ ty::OutlivesPredicate(k1, r2)
+ }));
+ }
+
+ let user_result: R =
+ query_result.substitute_projected(self.tcx, &result_subst, |q_r| &q_r.value);
+
+ Ok(InferOk {
+ value: user_result,
+ obligations,
+ })
+ }
+
+ /// Given the original values and the (canonicalized) result from
+ /// computing a query, returns a substitution that can be applied
+ /// to the query result to convert the result back into the
+ /// original namespace.
+ ///
+ /// The substitution also comes accompanied with subobligations
+ /// that arose from unification; these might occur if (for
+ /// example) we are doing lazy normalization and the value
+ /// assigned to a type variable is unified with an unnormalized
+ /// projection.
+ fn query_result_substitution<R>(
+ &self,
+ cause: &ObligationCause<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ original_values: &CanonicalVarValues<'tcx>,
+ query_result: &Canonical<'tcx, QueryResult<'tcx, R>>,
+ ) -> InferResult<'tcx, CanonicalVarValues<'tcx>>
+ where
+ R: Debug + TypeFoldable<'tcx>,
+ {
+ debug!(
+ "query_result_substitution(original_values={:#?}, query_result={:#?})",
+ original_values, query_result,
+ );
+
+ let result_subst =
+ self.query_result_substitution_guess(cause, original_values, query_result);
+
+ let obligations = self.unify_query_result_substitution_guess(
+ cause,
+ param_env,
+ original_values,
+ &result_subst,
+ query_result,
+ )?
+ .into_obligations();
+
+ Ok(InferOk {
+ value: result_subst,
+ obligations,
+ })
+ }
+
+ /// Given the original values and the (canonicalized) result from
+ /// computing a query, returns a **guess** at a substitution that
+ /// can be applied to the query result to convert the result back
+ /// into the original namespace. This is called a **guess**
+ /// because it uses a quick heuristic to find the values for each
+ /// canonical variable; if that quick heuristic fails, then we
+ /// will instantiate fresh inference variables for each canonical
+ /// variable instead. Therefore, the result of this method must be
+ /// properly unified
+ fn query_result_substitution_guess<R>(
+ &self,
+ cause: &ObligationCause<'tcx>,
+ original_values: &CanonicalVarValues<'tcx>,
+ query_result: &Canonical<'tcx, QueryResult<'tcx, R>>,
+ ) -> CanonicalVarValues<'tcx>
+ where
+ R: Debug + TypeFoldable<'tcx>,
+ {
+ debug!(
+ "query_result_substitution_guess(original_values={:#?}, query_result={:#?})",
+ original_values, query_result,
+ );
+
+ // Every canonical query result includes values for each of
+ // the inputs to the query. Therefore, we begin by unifying
+ // these values with the original inputs that were
+ // canonicalized.
+ let result_values = &query_result.value.var_values;
+ assert_eq!(original_values.len(), result_values.len());
+
+ // Quickly try to find initial values for the canonical
+ // variables in the result in terms of the query. We do this
+ // by iterating down the values that the query gave to each of
+ // the canonical inputs. If we find that one of those values
+ // is directly equal to one of the canonical variables in the
+ // result, then we can type the corresponding value from the
+ // input. See the example above.
+ let mut opt_values: IndexVec<CanonicalVar, Option<Kind<'tcx>>> =
+ IndexVec::from_elem_n(None, query_result.variables.len());
+
+ // In terms of our example above, we are iterating over pairs like:
+ // [(?A, Vec<?0>), ('static, '?1), (?B, ?0)]
+ for (original_value, result_value) in original_values.iter().zip(result_values) {
+ match result_value.unpack() {
+ UnpackedKind::Type(result_value) => {
+ // e.g., here `result_value` might be `?0` in the example above...
+ if let ty::TyInfer(ty::InferTy::CanonicalTy(index)) = result_value.sty {
+ // in which case we would set `canonical_vars[0]` to `Some(?U)`.
+ opt_values[index] = Some(original_value);
+ }
+ }
+ UnpackedKind::Lifetime(result_value) => {
+ // e.g., here `result_value` might be `'?1` in the example above...
+ if let &ty::RegionKind::ReCanonical(index) = result_value {
+ // in which case we would set `canonical_vars[0]` to `Some('static)`.
+ opt_values[index] = Some(original_value);
+ }
+ }
+ }
+ }
+
+ // Create a result substitution: if we found a value for a
+ // given variable in the loop above, use that. Otherwise, use
+ // a fresh inference variable.
+ let result_subst = CanonicalVarValues {
+ var_values: query_result
+ .variables
+ .iter()
+ .enumerate()
+ .map(|(index, info)| match opt_values[CanonicalVar::new(index)] {
+ Some(k) => k,
+ None => self.fresh_inference_var_for_canonical_var(cause.span, *info),
+ })
+ .collect(),
+ };
+
+ result_subst
+ }
+
+ /// Given a "guess" at the values for the canonical variables in
+ /// the input, try to unify with the *actual* values found in the
+ /// query result. Often, but not always, this is a no-op, because
+ /// we already found the mapping in the "guessing" step.
+ ///
+ /// See also: `query_result_substitution_guess`
+ fn unify_query_result_substitution_guess<R>(
+ &self,
+ cause: &ObligationCause<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ original_values: &CanonicalVarValues<'tcx>,
+ result_subst: &CanonicalVarValues<'tcx>,
+ query_result: &Canonical<'tcx, QueryResult<'tcx, R>>,
+ ) -> InferResult<'tcx, ()>
+ where
+ R: Debug + TypeFoldable<'tcx>,
+ {
+ // A closure that yields the result value for the given
+ // canonical variable; this is taken from
+ // `query_result.var_values` after applying the substitution
+ // `result_subst`.
+ let substituted_query_result = |index: CanonicalVar| -> Kind<'tcx> {
+ query_result.substitute_projected(self.tcx, &result_subst, |v| &v.var_values[index])
+ };
+
+ // Unify the original value for each variable with the value
+ // taken from `query_result` (after applying `result_subst`).
+ Ok(self.unify_canonical_vars(cause, param_env, original_values, substituted_query_result)?)
+ }
+
+ /// Converts the region constraints resulting from a query into an
+ /// iterator of obligations.
+ fn query_region_constraints_into_obligations<'a>(
+ &'a self,
+ cause: &'a ObligationCause<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ unsubstituted_region_constraints: &'a [QueryRegionConstraint<'tcx>],
+ result_subst: &'a CanonicalVarValues<'tcx>,
+ ) -> impl Iterator<Item = PredicateObligation<'tcx>> + 'a {
+ Box::new(
+ unsubstituted_region_constraints
+ .iter()
+ .map(move |constraint| {
+ let ty::OutlivesPredicate(k1, r2) = constraint.skip_binder(); // restored below
+ let k1 = substitute_value(self.tcx, result_subst, k1);
+ let r2 = substitute_value(self.tcx, result_subst, r2);
+ match k1.unpack() {
+ UnpackedKind::Lifetime(r1) => Obligation::new(
+ cause.clone(),
+ param_env,
+ ty::Predicate::RegionOutlives(ty::Binder::dummy(
+ ty::OutlivesPredicate(r1, r2),
+ )),
+ ),
+
+ UnpackedKind::Type(t1) => Obligation::new(
+ cause.clone(),
+ param_env,
+ ty::Predicate::TypeOutlives(ty::Binder::dummy(ty::OutlivesPredicate(
+ t1, r2,
+ ))),
+ ),
+ }
+ }),
+ ) as Box<dyn Iterator<Item = _>>
+ }
+
+ /// Given two sets of values for the same set of canonical variables, unify them.
+ /// The second set is produced lazilly by supplying indices from the first set.
+ fn unify_canonical_vars(
+ &self,
+ cause: &ObligationCause<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ variables1: &CanonicalVarValues<'tcx>,
+ variables2: impl Fn(CanonicalVar) -> Kind<'tcx>,
+ ) -> InferResult<'tcx, ()> {
+ self.commit_if_ok(|_| {
+ let mut obligations = vec![];
+ for (index, value1) in variables1.var_values.iter_enumerated() {
+ let value2 = variables2(index);
+
+ match (value1.unpack(), value2.unpack()) {
+ (UnpackedKind::Type(v1), UnpackedKind::Type(v2)) => {
+ obligations
+ .extend(self.at(cause, param_env).eq(v1, v2)?.into_obligations());
+ }
+ (
+ UnpackedKind::Lifetime(ty::ReErased),
+ UnpackedKind::Lifetime(ty::ReErased),
+ ) => {
+ // no action needed
+ }
+ (UnpackedKind::Lifetime(v1), UnpackedKind::Lifetime(v2)) => {
+ obligations
+ .extend(self.at(cause, param_env).eq(v1, v2)?.into_obligations());
+ }
+ _ => {
+ bug!("kind mismatch, cannot unify {:?} and {:?}", value1, value2,);
+ }
+ }
+ }
+ Ok(InferOk {
+ value: (),
+ obligations,
+ })
+ })
+ }
+}
+
+/// Given the region obligations and constraints scraped from the infcx,
+/// creates query region constraints.
+pub fn make_query_outlives<'tcx>(
+ tcx: TyCtxt<'_, '_, 'tcx>,
+ region_obligations: Vec<(ast::NodeId, RegionObligation<'tcx>)>,
+ region_constraints: &RegionConstraintData<'tcx>,
+) -> Vec<QueryRegionConstraint<'tcx>> {
+ let RegionConstraintData {
+ constraints,
+ verifys,
+ givens,
+ } = region_constraints;
+
+ assert!(verifys.is_empty());
+ assert!(givens.is_empty());
+
+ let mut outlives: Vec<_> = constraints
+ .into_iter()
+ .map(|(k, _)| match *k {
+ // Swap regions because we are going from sub (<=) to outlives
+ // (>=).
+ Constraint::VarSubVar(v1, v2) => ty::OutlivesPredicate(
+ tcx.mk_region(ty::ReVar(v2)).into(),
+ tcx.mk_region(ty::ReVar(v1)),
+ ),
+ Constraint::VarSubReg(v1, r2) => {
+ ty::OutlivesPredicate(r2.into(), tcx.mk_region(ty::ReVar(v1)))
+ }
+ Constraint::RegSubVar(r1, v2) => {
+ ty::OutlivesPredicate(tcx.mk_region(ty::ReVar(v2)).into(), r1)
+ }
+ Constraint::RegSubReg(r1, r2) => ty::OutlivesPredicate(r2.into(), r1),
+ })
+ .map(ty::Binder::dummy) // no bound regions in the code above
+ .collect();
+
+ outlives.extend(
+ region_obligations
+ .into_iter()
+ .map(|(_, r_o)| ty::OutlivesPredicate(r_o.sup_type.into(), r_o.sub_region))
+ .map(ty::Binder::dummy), // no bound regions in the code above
+ );
+
+ outlives
+}
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! This module contains code to substitute new values into a
+//! `Canonical<'tcx, T>`.
+//!
+//! For an overview of what canonicaliation is and how it fits into
+//! rustc, check out the [chapter in the rustc guide][c].
+//!
+//! [c]: https://rust-lang-nursery.github.io/rustc-guide/traits/canonicalization.html
+
+use infer::canonical::{Canonical, CanonicalVarValues};
+use ty::fold::{TypeFoldable, TypeFolder};
+use ty::subst::UnpackedKind;
+use ty::{self, Ty, TyCtxt, TypeFlags};
+
+impl<'tcx, V> Canonical<'tcx, V> {
+ /// Instantiate the wrapped value, replacing each canonical value
+ /// with the value given in `var_values`.
+ pub fn substitute(&self, tcx: TyCtxt<'_, '_, 'tcx>, var_values: &CanonicalVarValues<'tcx>) -> V
+ where
+ V: TypeFoldable<'tcx>,
+ {
+ self.substitute_projected(tcx, var_values, |value| value)
+ }
+
+ /// Allows one to apply a substitute to some subset of
+ /// `self.value`. Invoke `projection_fn` with `self.value` to get
+ /// a value V that is expressed in terms of the same canonical
+ /// variables bound in `self` (usually this extracts from subset
+ /// of `self`). Apply the substitution `var_values` to this value
+ /// V, replacing each of the canonical variables.
+ pub fn substitute_projected<T>(
+ &self,
+ tcx: TyCtxt<'_, '_, 'tcx>,
+ var_values: &CanonicalVarValues<'tcx>,
+ projection_fn: impl FnOnce(&V) -> &T,
+ ) -> T
+ where
+ T: TypeFoldable<'tcx>,
+ {
+ assert_eq!(self.variables.len(), var_values.var_values.len());
+ let value = projection_fn(&self.value);
+ substitute_value(tcx, var_values, value)
+ }
+}
+
+/// Substitute the values from `var_values` into `value`. `var_values`
+/// must be values for the set of canonical variables that appear in
+/// `value`.
+pub(super) fn substitute_value<'a, 'tcx, T>(
+ tcx: TyCtxt<'_, '_, 'tcx>,
+ var_values: &CanonicalVarValues<'tcx>,
+ value: &'a T,
+) -> T
+where
+ T: TypeFoldable<'tcx>,
+{
+ if var_values.var_values.is_empty() {
+ debug_assert!(!value.has_type_flags(TypeFlags::HAS_CANONICAL_VARS));
+ value.clone()
+ } else if !value.has_type_flags(TypeFlags::HAS_CANONICAL_VARS) {
+ value.clone()
+ } else {
+ value.fold_with(&mut CanonicalVarValuesSubst { tcx, var_values })
+ }
+}
+
+struct CanonicalVarValuesSubst<'cx, 'gcx: 'tcx, 'tcx: 'cx> {
+ tcx: TyCtxt<'cx, 'gcx, 'tcx>,
+ var_values: &'cx CanonicalVarValues<'tcx>,
+}
+
+impl<'cx, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for CanonicalVarValuesSubst<'cx, 'gcx, 'tcx> {
+ fn tcx(&self) -> TyCtxt<'_, 'gcx, 'tcx> {
+ self.tcx
+ }
+
+ fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> {
+ match t.sty {
+ ty::TyInfer(ty::InferTy::CanonicalTy(c)) => {
+ match self.var_values.var_values[c].unpack() {
+ UnpackedKind::Type(ty) => ty,
+ r => bug!("{:?} is a type but value is {:?}", c, r),
+ }
+ }
+ _ => {
+ if !t.has_type_flags(TypeFlags::HAS_CANONICAL_VARS) {
+ t
+ } else {
+ t.super_fold_with(self)
+ }
+ }
+ }
+ }
+
+ fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
+ match r {
+ ty::RegionKind::ReCanonical(c) => match self.var_values.var_values[*c].unpack() {
+ UnpackedKind::Lifetime(l) => l,
+ r => bug!("{:?} is a region but value is {:?}", c, r),
+ },
+ _ => r.super_fold_with(self),
+ }
+ }
+}
infer::LateBoundRegion(_, br, infer::AssocTypeProjection(def_id)) => format!(
" for lifetime parameter {}in trait containing associated type `{}`",
br_string(br),
- self.tcx.associated_item(def_id).name
+ self.tcx.associated_item(def_id).ident
),
infer::EarlyBoundRegion(_, name) => format!(" for lifetime parameter `{}`", name),
infer::BoundRegionInCoherence(name) => {
labels.clear();
labels.push((pattern.span, format!("consider giving this closure parameter a type")));
} else if let Some(pattern) = local_visitor.found_local_pattern {
- if let Some(simple_name) = pattern.simple_name() {
- labels.push((pattern.span, format!("consider giving `{}` a type", simple_name)));
+ if let Some(simple_ident) = pattern.simple_ident() {
+ labels.push((pattern.span, format!("consider giving `{}` a type", simple_ident)));
} else {
labels.push((pattern.span, format!("consider giving the pattern a type")));
}
let sub_is_ret_type =
self.is_return_type_anon(scope_def_id_sub, bregion_sub, ty_fndecl_sub);
- let span_label_var1 = if let Some(simple_name) = anon_arg_sup.pat.simple_name() {
- format!(" from `{}`", simple_name)
+ let span_label_var1 = if let Some(simple_ident) = anon_arg_sup.pat.simple_ident() {
+ format!(" from `{}`", simple_ident)
} else {
format!("")
};
- let span_label_var2 = if let Some(simple_name) = anon_arg_sub.pat.simple_name() {
- format!(" into `{}`", simple_name)
+ let span_label_var2 = if let Some(simple_ident) = anon_arg_sub.pat.simple_ident() {
+ format!(" into `{}`", simple_ident)
} else {
format!("")
};
mod find_anon_type;
mod named_anon_conflict;
mod outlives_closure;
+mod static_impl_trait;
mod util;
impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> {
self.try_report_named_anon_conflict()
.or_else(|| self.try_report_anon_anon_conflict())
.or_else(|| self.try_report_outlives_closure())
+ .or_else(|| self.try_report_static_impl_trait())
}
pub fn get_regions(&self) -> (Span, ty::Region<'tcx>, ty::Region<'tcx>) {
}
}
- let (error_var, span_label_var) = if let Some(simple_name) = arg.pat.simple_name() {
+ let (error_var, span_label_var) = if let Some(simple_ident) = arg.pat.simple_ident() {
(
- format!("the type of `{}`", simple_name),
- format!("the type of `{}`", simple_name),
+ format!("the type of `{}`", simple_ident),
+ format!("the type of `{}`", simple_ident),
)
} else {
("parameter type".to_owned(), "type".to_owned())
--- /dev/null
+// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Error Reporting for static impl Traits.
+
+use infer::error_reporting::nice_region_error::NiceRegionError;
+use infer::lexical_region_resolve::RegionResolutionError;
+use ty::{BoundRegion, FreeRegion, RegionKind};
+use util::common::ErrorReported;
+
+impl<'a, 'gcx, 'tcx> NiceRegionError<'a, 'gcx, 'tcx> {
+ /// Print the error message for lifetime errors when the return type is a static impl Trait.
+ pub(super) fn try_report_static_impl_trait(&self) -> Option<ErrorReported> {
+ if let Some(ref error) = self.error {
+ match error.clone() {
+ RegionResolutionError::SubSupConflict(
+ var_origin,
+ sub_origin,
+ sub_r,
+ sup_origin,
+ sup_r,
+ ) => {
+ let anon_reg_sup = self.is_suitable_region(sup_r)?;
+ if sub_r == &RegionKind::ReStatic &&
+ self.is_return_type_impl_trait(anon_reg_sup.def_id)
+ {
+ let sp = var_origin.span();
+ let return_sp = sub_origin.span();
+ let mut err = self.tcx.sess.struct_span_err(
+ sp,
+ "cannot infer an appropriate lifetime",
+ );
+ err.span_label(
+ return_sp,
+ "this return type evaluates to the `'static` lifetime...",
+ );
+ err.span_label(
+ sup_origin.span(),
+ "...but this borrow...",
+ );
+
+ let (lifetime, lt_sp_opt) = self.tcx.msg_span_from_free_region(sup_r);
+ if let Some(lifetime_sp) = lt_sp_opt {
+ err.span_note(
+ lifetime_sp,
+ &format!("...can't outlive {}", lifetime),
+ );
+ }
+
+ let lifetime_name = match sup_r {
+ RegionKind::ReFree(FreeRegion {
+ bound_region: BoundRegion::BrNamed(_, ref name), ..
+ }) => format!("{}", name),
+ _ => "'_".to_owned(),
+ };
+ if let Ok(snippet) = self.tcx.sess.codemap().span_to_snippet(return_sp) {
+ err.span_suggestion(
+ return_sp,
+ &format!(
+ "you can add a constraint to the return type to make it last \
+ less than `'static` and match {}",
+ lifetime,
+ ),
+ format!("{} + {}", snippet, lifetime_name),
+ );
+ }
+ err.emit();
+ return Some(ErrorReported);
+ }
+ }
+ _ => {}
+ }
+ }
+ None
+ }
+}
}
None
}
+
+ pub(super) fn is_return_type_impl_trait(
+ &self,
+ scope_def_id: DefId,
+ ) -> bool {
+ let ret_ty = self.tcx.type_of(scope_def_id);
+ match ret_ty.sty {
+ ty::TyFnDef(_, _) => {
+ let sig = ret_ty.fn_sig(self.tcx);
+ let output = self.tcx.erase_late_bound_regions(&sig.output());
+ return output.is_impl_trait();
+ }
+ _ => {}
+ }
+ false
+ }
+
// Here we check for the case where anonymous region
// corresponds to self and if yes, we display E0312.
// FIXME(#42700) - Need to format self properly to
use ty::error::{ExpectedFound, TypeError, UnconstrainedNumeric};
use ty::fold::TypeFoldable;
use ty::relate::RelateResult;
-use traits::{self, ObligationCause, PredicateObligations};
+use traits::{self, ObligationCause, PredicateObligations, TraitEngine};
use rustc_data_structures::unify as ut;
use std::cell::{Cell, RefCell, Ref, RefMut};
use std::collections::BTreeMap;
pub fn unit(self) -> InferOk<'tcx, ()> {
InferOk { value: (), obligations: self.obligations }
}
+
+ /// Extract `value`, registering any obligations into `fulfill_cx`
+ pub fn into_value_registering_obligations(
+ self,
+ infcx: &InferCtxt<'_, '_, 'tcx>,
+ fulfill_cx: &mut impl TraitEngine<'tcx>,
+ ) -> T {
+ let InferOk { value, obligations } = self;
+ for obligation in obligations {
+ fulfill_cx.register_predicate_obligation(infcx, obligation);
+ }
+ value
+ }
}
impl<'tcx> InferOk<'tcx, ()> {
use infer::InferCtxt;
use syntax::ast;
use syntax::codemap::Span;
-use traits::{FulfillmentContext, TraitEngine};
+use traits::{FulfillmentContext, TraitEngine, TraitEngineExt};
use ty::{self, Ty, TypeFoldable};
use ty::outlives::Component;
use ty::wf;
pub mod env;
pub mod free_region_map;
pub mod bounds;
-mod obligations;
+pub mod obligations;
use hir::def_id::DefId;
use infer::{self, GenericKind, InferCtxt, RegionObligation, SubregionOrigin, VerifyBound};
+use syntax::ast;
use traits;
-use ty::{self, Ty, TyCtxt, TypeFoldable};
-use ty::subst::{Subst, Substs};
use ty::outlives::Component;
-use syntax::ast;
+use ty::subst::{Subst, Substs};
+use ty::{self, Ty, TyCtxt, TypeFoldable};
impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> {
/// Registers that the given region obligation must be resolved
) {
debug!(
"register_region_obligation(body_id={:?}, obligation={:?})",
- body_id,
- obligation
+ body_id, obligation
);
self.region_obligations
}
/// Trait queries just want to pass back type obligations "as is"
- pub fn take_registered_region_obligations(
- &self,
- ) -> Vec<(ast::NodeId, RegionObligation<'tcx>)> {
- ::std::mem::replace(
- &mut *self.region_obligations.borrow_mut(),
- vec![],
- )
+ pub fn take_registered_region_obligations(&self) -> Vec<(ast::NodeId, RegionObligation<'tcx>)> {
+ ::std::mem::replace(&mut *self.region_obligations.borrow_mut(), vec![])
}
/// Process the region obligations that must be proven (during
}
}
- let outlives =
- TypeOutlives::new(self, region_bound_pairs, implicit_region_bound, param_env);
+ let outlives = &mut TypeOutlives::new(
+ self,
+ self.tcx,
+ region_bound_pairs,
+ implicit_region_bound,
+ param_env,
+ );
for RegionObligation {
sup_type,
{
debug!(
"process_registered_region_obligations: sup_type={:?} sub_region={:?} cause={:?}",
- sup_type,
- sub_region,
- cause
+ sup_type, sub_region, cause
);
- let origin = SubregionOrigin::from_obligation_cause(
- &cause,
- || infer::RelateParamBound(cause.span, sup_type),
- );
+ let origin = SubregionOrigin::from_obligation_cause(&cause, || {
+ infer::RelateParamBound(cause.span, sup_type)
+ });
+ let sup_type = self.resolve_type_vars_if_possible(&sup_type);
outlives.type_must_outlive(origin, sup_type, sub_region);
}
}
ty: Ty<'tcx>,
region: ty::Region<'tcx>,
) {
- let outlives =
- TypeOutlives::new(self, region_bound_pairs, implicit_region_bound, param_env);
+ let outlives = &mut TypeOutlives::new(
+ self,
+ self.tcx,
+ region_bound_pairs,
+ implicit_region_bound,
+ param_env,
+ );
+ let ty = self.resolve_type_vars_if_possible(&ty);
outlives.type_must_outlive(origin, ty, region);
}
}
-#[must_use] // you ought to invoke `into_accrued_obligations` when you are done =)
-struct TypeOutlives<'cx, 'gcx: 'tcx, 'tcx: 'cx> {
+/// The `TypeOutlives` struct has the job of "lowering" a `T: 'a`
+/// obligation into a series of `'a: 'b` constraints and "verifys", as
+/// described on the module comment. The final constraints are emitted
+/// via a "delegate" of type `D` -- this is usually the `infcx`, which
+/// accrues them into the `region_obligations` code, but for NLL we
+/// use something else.
+pub struct TypeOutlives<'cx, 'gcx: 'tcx, 'tcx: 'cx, D>
+where
+ D: TypeOutlivesDelegate<'tcx>,
+{
// See the comments on `process_registered_region_obligations` for the meaning
// of these fields.
- infcx: &'cx InferCtxt<'cx, 'gcx, 'tcx>,
+ delegate: D,
+ tcx: TyCtxt<'cx, 'gcx, 'tcx>,
region_bound_pairs: &'cx [(ty::Region<'tcx>, GenericKind<'tcx>)],
implicit_region_bound: Option<ty::Region<'tcx>>,
param_env: ty::ParamEnv<'tcx>,
}
-impl<'cx, 'gcx, 'tcx> TypeOutlives<'cx, 'gcx, 'tcx> {
- fn new(
- infcx: &'cx InferCtxt<'cx, 'gcx, 'tcx>,
+pub trait TypeOutlivesDelegate<'tcx> {
+ fn push_sub_region_constraint(
+ &mut self,
+ origin: SubregionOrigin<'tcx>,
+ a: ty::Region<'tcx>,
+ b: ty::Region<'tcx>,
+ );
+
+ fn push_verify(
+ &mut self,
+ origin: SubregionOrigin<'tcx>,
+ kind: GenericKind<'tcx>,
+ a: ty::Region<'tcx>,
+ bound: VerifyBound<'tcx>,
+ );
+}
+
+impl<'cx, 'gcx, 'tcx, D> TypeOutlives<'cx, 'gcx, 'tcx, D>
+where
+ D: TypeOutlivesDelegate<'tcx>,
+{
+ pub fn new(
+ delegate: D,
+ tcx: TyCtxt<'cx, 'gcx, 'tcx>,
region_bound_pairs: &'cx [(ty::Region<'tcx>, GenericKind<'tcx>)],
implicit_region_bound: Option<ty::Region<'tcx>>,
param_env: ty::ParamEnv<'tcx>,
) -> Self {
Self {
- infcx,
+ delegate,
+ tcx,
region_bound_pairs,
implicit_region_bound,
param_env,
/// - `origin`, the reason we need this constraint
/// - `ty`, the type `T`
/// - `region`, the region `'a`
- fn type_must_outlive(
- &self,
+ pub fn type_must_outlive(
+ &mut self,
origin: infer::SubregionOrigin<'tcx>,
ty: Ty<'tcx>,
region: ty::Region<'tcx>,
) {
- let ty = self.infcx.resolve_type_vars_if_possible(&ty);
-
debug!(
"type_must_outlive(ty={:?}, region={:?}, origin={:?})",
- ty,
- region,
- origin
+ ty, region, origin
);
assert!(!ty.has_escaping_regions());
- let components = self.tcx().outlives_components(ty);
+ let components = self.tcx.outlives_components(ty);
self.components_must_outlive(origin, components, region);
}
- fn tcx(&self) -> TyCtxt<'cx, 'gcx, 'tcx> {
- self.infcx.tcx
- }
-
fn components_must_outlive(
- &self,
+ &mut self,
origin: infer::SubregionOrigin<'tcx>,
components: Vec<Component<'tcx>>,
region: ty::Region<'tcx>,
let origin = origin.clone();
match component {
Component::Region(region1) => {
- self.infcx.sub_regions(origin, region, region1);
+ self.delegate.push_sub_region_constraint(origin, region, region1);
}
Component::Param(param_ty) => {
self.param_ty_must_outlive(origin, region, param_ty);
// ignore this, we presume it will yield an error
// later, since if a type variable is not resolved by
// this point it never will be
- self.infcx.tcx.sess.delay_span_bug(
+ self.tcx.sess.delay_span_bug(
origin.span(),
&format!("unresolved inference variable in outlives: {:?}", v),
);
}
fn param_ty_must_outlive(
- &self,
+ &mut self,
origin: infer::SubregionOrigin<'tcx>,
region: ty::Region<'tcx>,
param_ty: ty::ParamTy,
) {
debug!(
"param_ty_must_outlive(region={:?}, param_ty={:?}, origin={:?})",
- region,
- param_ty,
- origin
+ region, param_ty, origin
);
let verify_bound = self.param_bound(param_ty);
let generic = GenericKind::Param(param_ty);
- self.infcx
- .verify_generic_bound(origin, generic, region, verify_bound);
+ self.delegate
+ .push_verify(origin, generic, region, verify_bound);
}
fn projection_must_outlive(
- &self,
+ &mut self,
origin: infer::SubregionOrigin<'tcx>,
region: ty::Region<'tcx>,
projection_ty: ty::ProjectionTy<'tcx>,
) {
debug!(
"projection_must_outlive(region={:?}, projection_ty={:?}, origin={:?})",
- region,
- projection_ty,
- origin
+ region, projection_ty, origin
);
// This case is thorny for inference. The fundamental problem is
}
for r in projection_ty.substs.regions() {
- self.infcx.sub_regions(origin.clone(), region, r);
+ self.delegate.push_sub_region_constraint(origin.clone(), region, r);
}
return;
.any(|r| env_bounds.contains(&r))
{
debug!("projection_must_outlive: unique declared bound appears in trait ref");
- self.infcx.sub_regions(origin.clone(), region, unique_bound);
+ self.delegate
+ .push_sub_region_constraint(origin.clone(), region, unique_bound);
return;
}
}
// even though a satisfactory solution exists.
let verify_bound = self.projection_bound(env_bounds, projection_ty);
let generic = GenericKind::Projection(projection_ty);
- self.infcx
- .verify_generic_bound(origin, generic.clone(), region, verify_bound);
+ self.delegate
+ .push_verify(origin, generic.clone(), region, verify_bound);
}
fn type_bound(&self, ty: Ty<'tcx>) -> VerifyBound<'tcx> {
) -> VerifyBound<'tcx> {
debug!(
"projection_bound(declared_bounds={:?}, projection_ty={:?})",
- declared_bounds,
- projection_ty
+ declared_bounds, projection_ty
);
// see the extensive comment in projection_must_outlive
- let ty = self.infcx
+ let ty = self
.tcx
.mk_projection(projection_ty.item_def_id, projection_ty.substs);
let recursive_bound = self.recursive_type_bound(ty);
&self,
generic: GenericKind<'tcx>,
) -> Vec<ty::Region<'tcx>> {
- let tcx = self.tcx();
+ let tcx = self.tcx;
// To start, collect bounds from user environment. Note that
// parameter environments are already elaborated, so we don't
debug!("projection_bounds(projection_ty={:?})", projection_ty);
let mut bounds = self.region_bounds_declared_on_associated_item(projection_ty.item_def_id);
for r in &mut bounds {
- *r = r.subst(self.tcx(), projection_ty.substs);
+ *r = r.subst(self.tcx, projection_ty.substs);
}
bounds
}
&self,
assoc_item_def_id: DefId,
) -> Vec<ty::Region<'tcx>> {
- let tcx = self.tcx();
+ let tcx = self.tcx;
let assoc_item = tcx.associated_item(assoc_item_def_id);
let trait_def_id = assoc_item.container.assert_trait();
let trait_predicates = tcx.predicates_of(trait_def_id);
.collect()
}
}
+
+impl<'cx, 'gcx, 'tcx> TypeOutlivesDelegate<'tcx> for &'cx InferCtxt<'cx, 'gcx, 'tcx> {
+ fn push_sub_region_constraint(
+ &mut self,
+ origin: SubregionOrigin<'tcx>,
+ a: ty::Region<'tcx>,
+ b: ty::Region<'tcx>,
+ ) {
+ self.sub_regions(origin, a, b)
+ }
+
+ fn push_verify(
+ &mut self,
+ origin: SubregionOrigin<'tcx>,
+ kind: GenericKind<'tcx>,
+ a: ty::Region<'tcx>,
+ bound: VerifyBound<'tcx>,
+ ) {
+ self.verify_generic_bound(origin, kind, a, bound)
+ }
+}
+
pub mod allocator;
pub mod borrowck;
pub mod expr_use_visitor;
- pub mod const_val;
pub mod cstore;
pub mod dataflow;
pub mod dead;
#[macro_export]
macro_rules! __impl_stable_hash_field {
- (DECL IGNORED) => (_);
- (DECL $name:ident) => (ref $name);
- (USE IGNORED $ctx:expr, $hasher:expr) => ({});
- (USE $name:ident, $ctx:expr, $hasher:expr) => ($name.hash_stable($ctx, $hasher));
+ ($field:ident, $ctx:expr, $hasher:expr) => ($field.hash_stable($ctx, $hasher));
+ ($field:ident, $ctx:expr, $hasher:expr, _) => ({ let _ = $field; });
+ ($field:ident, $ctx:expr, $hasher:expr, $delegate:expr) => ($delegate.hash_stable($ctx, $hasher));
}
#[macro_export]
macro_rules! impl_stable_hash_for {
- (enum $enum_name:path { $( $variant:ident $( ( $($arg:ident),* ) )* ),* $(,)* }) => {
+ (enum $enum_name:path { $( $variant:ident $( ( $($field:ident $(-> $delegate:tt)?),* ) )* ),* $(,)? }) => {
impl<'a, 'tcx> ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a>> for $enum_name {
#[inline]
fn hash_stable<W: ::rustc_data_structures::stable_hasher::StableHasherResult>(&self,
match *self {
$(
- $variant $( ( $( __impl_stable_hash_field!(DECL $arg) ),* ) )* => {
- $($( __impl_stable_hash_field!(USE $arg, __ctx, __hasher) );*)*
+ $variant $( ( $(ref $field),* ) )* => {
+ $($( __impl_stable_hash_field!($field, __ctx, __hasher $(, $delegate)?) );*)*
}
)*
}
}
}
};
- (struct $struct_name:path { $($field:ident),* }) => {
+ (struct $struct_name:path { $($field:ident $(-> $delegate:tt)?),* $(,)? }) => {
impl<'a, 'tcx> ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a>> for $struct_name {
#[inline]
fn hash_stable<W: ::rustc_data_structures::stable_hasher::StableHasherResult>(&self,
$(ref $field),*
} = *self;
- $( $field.hash_stable(__ctx, __hasher));*
+ $( __impl_stable_hash_field!($field, __ctx, __hasher $(, $delegate)?) );*
}
}
};
- (tuple_struct $struct_name:path { $($field:ident),* }) => {
+ (tuple_struct $struct_name:path { $($field:ident $(-> $delegate:tt)?),* $(,)? }) => {
impl<'a, 'tcx> ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a>> for $struct_name {
#[inline]
fn hash_stable<W: ::rustc_data_structures::stable_hasher::StableHasherResult>(&self,
$(ref $field),*
) = *self;
- $( $field.hash_stable(__ctx, __hasher));*
+ $( __impl_stable_hash_field!($field, __ctx, __hasher $(, $delegate)?) );*
}
}
};
+++ /dev/null
-// Copyright 2012-2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use hir::def_id::DefId;
-use ty;
-use ty::subst::Substs;
-use ty::query::TyCtxtAt;
-use mir::interpret::ConstValue;
-use errors::DiagnosticBuilder;
-
-use graphviz::IntoCow;
-use syntax_pos::Span;
-use syntax::ast;
-
-use std::borrow::Cow;
-use rustc_data_structures::sync::Lrc;
-
-pub type EvalResult<'tcx> = Result<&'tcx ty::Const<'tcx>, ConstEvalErr<'tcx>>;
-
-#[derive(Copy, Clone, Debug, Hash, RustcEncodable, RustcDecodable, Eq, PartialEq, Ord, PartialOrd)]
-pub enum ConstVal<'tcx> {
- Unevaluated(DefId, &'tcx Substs<'tcx>),
- Value(ConstValue<'tcx>),
-}
-
-#[derive(Clone, Debug, RustcEncodable, RustcDecodable)]
-pub struct ConstEvalErr<'tcx> {
- pub span: Span,
- pub kind: Lrc<ErrKind<'tcx>>,
-}
-
-#[derive(Clone, Debug, RustcEncodable, RustcDecodable)]
-pub enum ErrKind<'tcx> {
-
- CouldNotResolve,
- TypeckError,
- CheckMatchError,
- Miri(::mir::interpret::EvalError<'tcx>, Vec<FrameInfo>),
-}
-
-#[derive(Clone, Debug, RustcEncodable, RustcDecodable)]
-pub struct FrameInfo {
- pub span: Span,
- pub location: String,
- pub lint_root: Option<ast::NodeId>,
-}
-
-#[derive(Clone, Debug)]
-pub enum ConstEvalErrDescription<'a, 'tcx: 'a> {
- Simple(Cow<'a, str>),
- Backtrace(&'a ::mir::interpret::EvalError<'tcx>, &'a [FrameInfo]),
-}
-
-impl<'a, 'tcx> ConstEvalErrDescription<'a, 'tcx> {
- /// Return a one-line description of the error, for lints and such
- pub fn into_oneline(self) -> Cow<'a, str> {
- match self {
- ConstEvalErrDescription::Simple(simple) => simple,
- ConstEvalErrDescription::Backtrace(miri, _) => format!("{}", miri).into_cow(),
- }
- }
-}
-
-impl<'a, 'gcx, 'tcx> ConstEvalErr<'tcx> {
- pub fn description(&'a self) -> ConstEvalErrDescription<'a, 'tcx> {
- use self::ErrKind::*;
- use self::ConstEvalErrDescription::*;
-
- macro_rules! simple {
- ($msg:expr) => ({ Simple($msg.into_cow()) });
- ($fmt:expr, $($arg:tt)+) => ({
- Simple(format!($fmt, $($arg)+).into_cow())
- })
- }
-
- match *self.kind {
- CouldNotResolve => simple!("could not resolve"),
- TypeckError => simple!("type-checking failed"),
- CheckMatchError => simple!("match-checking failed"),
- Miri(ref err, ref trace) => Backtrace(err, trace),
- }
- }
-
- pub fn struct_error(&self,
- tcx: TyCtxtAt<'a, 'gcx, 'tcx>,
- message: &str)
- -> Option<DiagnosticBuilder<'tcx>>
- {
- self.struct_generic(tcx, message, None, true)
- }
-
- pub fn report_as_error(&self,
- tcx: TyCtxtAt<'a, 'gcx, 'tcx>,
- message: &str
- ) {
- let err = self.struct_generic(tcx, message, None, true);
- if let Some(mut err) = err {
- err.emit();
- }
- }
-
- pub fn report_as_lint(&self,
- tcx: TyCtxtAt<'a, 'gcx, 'tcx>,
- message: &str,
- lint_root: ast::NodeId,
- ) {
- let lint = self.struct_generic(
- tcx,
- message,
- Some(lint_root),
- false,
- );
- if let Some(mut lint) = lint {
- lint.emit();
- }
- }
-
- fn struct_generic(
- &self,
- tcx: TyCtxtAt<'a, 'gcx, 'tcx>,
- message: &str,
- lint_root: Option<ast::NodeId>,
- as_err: bool,
- ) -> Option<DiagnosticBuilder<'tcx>> {
- let (msg, frames): (_, &[_]) = match *self.kind {
- ErrKind::TypeckError | ErrKind::CheckMatchError => return None,
- ErrKind::Miri(ref miri, ref frames) => {
- match miri.kind {
- ::mir::interpret::EvalErrorKind::TypeckError |
- ::mir::interpret::EvalErrorKind::Layout(_) => return None,
- ::mir::interpret::EvalErrorKind::ReferencedConstant(ref inner) => {
- inner.struct_generic(tcx, "referenced constant", lint_root, as_err)?.emit();
- (miri.to_string(), frames)
- },
- _ => (miri.to_string(), frames),
- }
- }
- _ => (self.description().into_oneline().to_string(), &[]),
- };
- trace!("reporting const eval failure at {:?}", self.span);
- let mut err = if as_err {
- struct_error(tcx, message)
- } else {
- let node_id = frames
- .iter()
- .rev()
- .filter_map(|frame| frame.lint_root)
- .next()
- .or(lint_root)
- .expect("some part of a failing const eval must be local");
- tcx.struct_span_lint_node(
- ::rustc::lint::builtin::CONST_ERR,
- node_id,
- tcx.span,
- message,
- )
- };
- err.span_label(self.span, msg);
- for FrameInfo { span, location, .. } in frames {
- err.span_label(*span, format!("inside call to `{}`", location));
- }
- Some(err)
- }
-}
-
-pub fn struct_error<'a, 'gcx, 'tcx>(
- tcx: TyCtxtAt<'a, 'gcx, 'tcx>,
- msg: &str,
-) -> DiagnosticBuilder<'tcx> {
- struct_span_err!(tcx.sess, tcx.span, E0080, "{}", msg)
-}
if !self.symbol_is_live(impl_item.id, None) {
self.warn_dead_code(impl_item.id,
impl_item.span,
- impl_item.name,
+ impl_item.ident.name,
"associated const",
"used");
}
hir::ImplItemKind::Method(_, body_id) => {
if !self.symbol_is_live(impl_item.id, None) {
let span = self.tcx.sess.codemap().def_span(impl_item.span);
- self.warn_dead_code(impl_item.id, span, impl_item.name, "method", "used");
+ self.warn_dead_code(impl_item.id, span, impl_item.ident.name, "method", "used");
}
self.visit_nested_body(body_id)
}
let body = ir.tcx.hir.body(body_id);
for arg in &body.arguments {
- arg.pat.each_binding(|_bm, hir_id, _x, path1| {
+ arg.pat.each_binding(|_bm, hir_id, _x, ident| {
debug!("adding argument {:?}", hir_id);
- let name = path1.node;
- fn_maps.add_variable(Arg(hir_id, name));
+ fn_maps.add_variable(Arg(hir_id, ident.name));
})
};
}
}
- pat.each_binding(|_bm, hir_id, _sp, path1| {
- let name = path1.node;
- ir.add_live_node_for_node(hir_id, VarDefNode(path1.span));
+ pat.each_binding(|_bm, hir_id, _sp, ident| {
+ ir.add_live_node_for_node(hir_id, VarDefNode(ident.span));
ir.add_variable(Local(LocalInfo {
id: hir_id,
- name,
+ name: ident.name,
is_shorthand: shorthand_field_ids.contains(&hir_id)
}));
});
},
None => {
this.pat_bindings(&local.pat, |this, ln, var, sp, id| {
- let span = local.pat.simple_span().unwrap_or(sp);
+ let span = local.pat.simple_ident().map_or(sp, |ident| ident.span);
this.warn_about_unused(span, id, ln, var);
})
}
fn warn_about_unused_args(&self, body: &hir::Body, entry_ln: LiveNode) {
for arg in &body.arguments {
- arg.pat.each_binding(|_bm, hir_id, _, path1| {
- let sp = path1.span;
+ arg.pat.each_binding(|_bm, hir_id, _, ident| {
+ let sp = ident.span;
let var = self.variable(hir_id, sp);
// Ignore unused self.
- let name = path1.node;
- if name != keywords::SelfValue.name() {
+ if ident.name != keywords::SelfValue.name() {
if !self.warn_about_unused(sp, hir_id, entry_ln, var) {
if self.live_on_entry(entry_ln, var).is_none() {
self.report_dead_assign(hir_id, sp, var, true);
// and how it is located, as well as the mutability of the memory in
// which the value is stored.
//
-// *WARNING* The field `cmt.type` is NOT necessarily the same as the
+// *WARNING* The field `cmt.ty` is NOT necessarily the same as the
// result of `node_id_to_type(cmt.id)`. This is because the `id` is
// always the `id` of the node producing the type; in an expression
// like `*x`, the type of this deref node is the deref'd type (`T`),
let def_id = hir_map.local_def_id(param.id);
let origin = LifetimeDefOrigin::from_param(param);
debug!("Region::early: index={} def_id={:?}", i, def_id);
- (param.name, Region::EarlyBound(i, def_id, origin))
+ (param.name.modern(), Region::EarlyBound(i, def_id, origin))
}
fn late(hir_map: &Map, param: &GenericParam) -> (ParamName, Region) {
def_id,
origin,
);
- (param.name, Region::LateBound(depth, def_id, origin))
+ (param.name.modern(), Region::LateBound(depth, def_id, origin))
}
fn late_anon(index: &Cell<u32>) -> Region {
is_in_fn_syntax: bool,
/// List of labels in the function/method currently under analysis.
- labels_in_fn: Vec<(ast::Name, Span)>,
+ labels_in_fn: Vec<ast::Ident>,
/// Cache for cross-crate per-definition object lifetime defaults.
xcrate_object_lifetime_defaults: DefIdMap<Vec<ObjectLifetimeDefault>>,
GenericParamKind::Lifetime { .. } => {
let (name, reg) = Region::early(&self.tcx.hir, &mut index, ¶m);
if let hir::ParamName::Plain(param_name) = name {
- if param_name == keywords::UnderscoreLifetime.name() {
+ if param_name.name == keywords::UnderscoreLifetime.name() {
// Pick the elided lifetime "definition" if one exists
// and use it to make an elision scope.
elision = Some(reg);
struct GatherLabels<'a, 'tcx: 'a> {
tcx: TyCtxt<'a, 'tcx, 'tcx>,
scope: ScopeRef<'a>,
- labels_in_fn: &'a mut Vec<(ast::Name, Span)>,
+ labels_in_fn: &'a mut Vec<ast::Ident>,
}
let mut gather = GatherLabels {
}
fn visit_expr(&mut self, ex: &hir::Expr) {
- if let Some((label, label_span)) = expression_label(ex) {
- for &(prior, prior_span) in &self.labels_in_fn[..] {
+ if let Some(label) = expression_label(ex) {
+ for prior_label in &self.labels_in_fn[..] {
// FIXME (#24278): non-hygienic comparison
- if label == prior {
+ if label.name == prior_label.name {
signal_shadowing_problem(
self.tcx,
- label,
- original_label(prior_span),
- shadower_label(label_span),
+ label.name,
+ original_label(prior_label.span),
+ shadower_label(label.span),
);
}
}
- check_if_label_shadows_lifetime(self.tcx, self.scope, label, label_span);
+ check_if_label_shadows_lifetime(self.tcx, self.scope, label);
- self.labels_in_fn.push((label, label_span));
+ self.labels_in_fn.push(label);
}
intravisit::walk_expr(self, ex)
}
}
- fn expression_label(ex: &hir::Expr) -> Option<(ast::Name, Span)> {
+ fn expression_label(ex: &hir::Expr) -> Option<ast::Ident> {
match ex.node {
- hir::ExprWhile(.., Some(label)) | hir::ExprLoop(_, Some(label), _) => {
- Some((label.name, label.span))
- }
+ hir::ExprWhile(.., Some(label)) |
+ hir::ExprLoop(_, Some(label), _) => Some(label.ident),
_ => None,
}
}
fn check_if_label_shadows_lifetime(
tcx: TyCtxt<'_, '_, '_>,
mut scope: ScopeRef<'_>,
- label: ast::Name,
- label_span: Span,
+ label: ast::Ident,
) {
loop {
match *scope {
ref lifetimes, s, ..
} => {
// FIXME (#24278): non-hygienic comparison
- let param_name = hir::ParamName::Plain(label);
- if let Some(def) = lifetimes.get(¶m_name) {
+ if let Some(def) = lifetimes.get(&hir::ParamName::Plain(label.modern())) {
let node_id = tcx.hir.as_local_node_id(def.id().unwrap()).unwrap();
signal_shadowing_problem(
tcx,
- label,
+ label.name,
original_lifetime(tcx.hir.span(node_id)),
- shadower_label(label_span),
+ shadower_label(label.span),
);
return;
}
generics.params.iter().find_map(|param| match param.kind {
GenericParamKind::Lifetime { .. } => {
if i == 0 {
- return Some(param.name.name().to_string());
+ return Some(param.name.ident().to_string());
}
i -= 1;
None
fn add_bounds(set: &mut Set1<hir::LifetimeName>, bounds: &[hir::GenericBound]) {
for bound in bounds {
if let hir::GenericBound::Outlives(ref lifetime) = *bound {
- set.insert(lifetime.name);
+ set.insert(lifetime.name.modern());
}
}
}
debug!("node id first={:?}", node_id);
if let Some((id, span, name)) = match self.tcx.hir.get(node_id) {
hir::map::NodeLifetime(hir_lifetime) => {
- Some((hir_lifetime.id, hir_lifetime.span, hir_lifetime.name.name()))
+ Some((hir_lifetime.id, hir_lifetime.span, hir_lifetime.name.ident()))
}
hir::map::NodeGenericParam(param) => {
- Some((param.id, param.span, param.name.name()))
+ Some((param.id, param.span, param.name.ident()))
}
_ => None,
} {
let node_id = self.tcx.hir.as_local_node_id(def_id).unwrap();
if let Some((id, span, name)) = match self.tcx.hir.get(node_id) {
hir::map::NodeLifetime(hir_lifetime) => {
- Some((hir_lifetime.id, hir_lifetime.span, hir_lifetime.name.name()))
+ Some((hir_lifetime.id, hir_lifetime.span, hir_lifetime.name.ident()))
}
hir::map::NodeGenericParam(param) => {
- Some((param.id, param.span, param.name.name()))
+ Some((param.id, param.span, param.name.ident()))
}
_ => None,
} {
LifetimeName::Param(param_name) => param_name,
_ => bug!("expected LifetimeName::Param"),
};
- if let Some(&def) = lifetimes.get(&name) {
+ if let Some(&def) = lifetimes.get(&name.modern()) {
break Some(def.shifted(late_depth));
} else {
late_depth += 1;
lifetime_ref.span,
E0261,
"use of undeclared lifetime name `{}`",
- lifetime_ref.name.name()
+ lifetime_ref
).span_label(lifetime_ref.span, "undeclared lifetime")
.emit();
}
fn visit_fn_like_elision(
&mut self,
- inputs: &'tcx [P<hir::Ty>],
+ inputs: &'tcx [hir::Ty],
output: Option<&'tcx P<hir::Ty>>,
) {
debug!("visit_fn_like_elision: enter");
}).collect();
for (i, (lifetime_i, lifetime_i_name)) in lifetimes.iter().enumerate() {
if let hir::ParamName::Plain(_) = lifetime_i_name {
- let name = lifetime_i_name.name();
+ let name = lifetime_i_name.ident().name;
if name == keywords::UnderscoreLifetime.name() ||
name == keywords::StaticLifetime.name() {
let mut err = struct_span_err!(
lifetime_i.span,
E0262,
"invalid lifetime parameter name: `{}`",
- name
+ lifetime_i.name.ident(),
);
err.span_label(
lifetime_i.span,
lifetime_j.span,
E0263,
"lifetime name `{}` declared twice in the same scope",
- lifetime_j.name.name()
+ lifetime_j.name.ident()
).span_label(lifetime_j.span, "declared twice")
.span_label(lifetime_i.span, "previous declaration here")
.emit();
lifetime_i.span.to(lt.span),
&format!(
"unnecessary lifetime parameter `{}`",
- lifetime_i.name.name(),
+ lifetime_i.name.ident(),
),
).help(&format!(
"you can use the `'static` lifetime directly, in place \
of `{}`",
- lifetime_i.name.name(),
+ lifetime_i.name.ident(),
)).emit();
}
hir::LifetimeName::Param(_)
mut old_scope: ScopeRef,
param: &'tcx hir::GenericParam,
) {
- for &(label, label_span) in &self.labels_in_fn {
+ for label in &self.labels_in_fn {
// FIXME (#24278): non-hygienic comparison
- if param.name.name() == label {
+ if param.name.ident().name == label.name {
signal_shadowing_problem(
self.tcx,
- label,
- original_label(label_span),
+ label.name,
+ original_label(label.span),
shadower_lifetime(¶m),
);
return;
Scope::Binder {
ref lifetimes, s, ..
} => {
- if let Some(&def) = lifetimes.get(¶m.name) {
+ if let Some(&def) = lifetimes.get(¶m.name.modern()) {
let node_id = self.tcx.hir.as_local_node_id(def.id().unwrap()).unwrap();
signal_shadowing_problem(
self.tcx,
- param.name.name(),
+ param.name.ident().name,
original_lifetime(self.tcx.hir.span(node_id)),
shadower_lifetime(¶m),
);
hir::GenericParamKind::Lifetime { .. } => {
if !param.bounds.is_empty() {
// `'a: 'b` means both `'a` and `'b` are referenced
- appears_in_where_clause.regions.insert(hir::LifetimeName::Param(param.name));
+ appears_in_where_clause
+ .regions.insert(hir::LifetimeName::Param(param.name.modern()));
}
}
hir::GenericParamKind::Type { .. } => {}
// - do not appear in the where-clauses
// - are not implicitly captured by `impl Trait`
for param in &generics.params {
- let lt_name = hir::LifetimeName::Param(param.name);
+ let lt_name = hir::LifetimeName::Param(param.name.modern());
// appears in the where clauses? early-bound.
if appears_in_where_clause.regions.contains(<_name) {
continue;
}
debug!("insert_late_bound_lifetimes: lifetime {:?} with id {:?} is late-bound",
- param.name.name(),
+ param.name.ident(),
param.id);
let inserted = map.late_bound.insert(param.id);
}
fn visit_lifetime(&mut self, lifetime_ref: &'v hir::Lifetime) {
- self.regions.insert(lifetime_ref.name);
+ self.regions.insert(lifetime_ref.name.modern());
}
}
}
fn visit_lifetime(&mut self, lifetime_ref: &'v hir::Lifetime) {
- self.regions.insert(lifetime_ref.name);
+ self.regions.insert(lifetime_ref.name.modern());
}
}
}
for impl_item_ref in impl_item_refs {
let impl_item = self.tcx.hir.impl_item(impl_item_ref.id);
let trait_item_def_id = self.tcx.associated_items(trait_did)
- .find(|item| item.name == impl_item.name).map(|item| item.def_id);
+ .find(|item| item.ident.name == impl_item.ident.name)
+ .map(|item| item.def_id);
if let Some(def_id) = trait_item_def_id {
// Pass `None` to skip deprecation warnings.
self.tcx.check_stability(def_id, None, impl_item.span);
use syntax::ast;
use syntax::symbol::Symbol;
use syntax_pos::Span;
+use hir::def_id::DefId;
use hir::intravisit::{Visitor, NestedVisitorMap};
use hir::intravisit;
use hir;
}
}
+impl<'a, 'tcx, 'gcx> TyCtxt<'a, 'tcx, 'gcx> {
+ pub fn is_weak_lang_item(&self, item_def_id: DefId) -> bool {
+ let lang_items = self.lang_items();
+ let did = Some(item_def_id);
+
+ $(lang_items.$name() == did)||+
+ }
+}
+
) }
weak_lang_items! {
use std::{fmt, env};
use mir;
-use middle::const_val::ConstEvalErr;
use ty::{FnSig, Ty, layout};
use ty::layout::{Size, Align};
+use rustc_data_structures::sync::Lrc;
use super::{
Pointer, Lock, AccessKind
use backtrace::Backtrace;
+use ty;
+use ty::query::TyCtxtAt;
+use errors::DiagnosticBuilder;
+
+use syntax_pos::Span;
+use syntax::ast;
+
+pub type ConstEvalResult<'tcx> = Result<&'tcx ty::Const<'tcx>, Lrc<ConstEvalErr<'tcx>>>;
+
+#[derive(Clone, Debug, RustcEncodable, RustcDecodable)]
+pub struct ConstEvalErr<'tcx> {
+ pub span: Span,
+ pub error: ::mir::interpret::EvalError<'tcx>,
+ pub stacktrace: Vec<FrameInfo>,
+}
+
+#[derive(Clone, Debug, RustcEncodable, RustcDecodable)]
+pub struct FrameInfo {
+ pub span: Span,
+ pub location: String,
+ pub lint_root: Option<ast::NodeId>,
+}
+
+impl<'a, 'gcx, 'tcx> ConstEvalErr<'tcx> {
+ pub fn struct_error(&self,
+ tcx: TyCtxtAt<'a, 'gcx, 'tcx>,
+ message: &str)
+ -> Option<DiagnosticBuilder<'tcx>>
+ {
+ self.struct_generic(tcx, message, None)
+ }
+
+ pub fn report_as_error(&self,
+ tcx: TyCtxtAt<'a, 'gcx, 'tcx>,
+ message: &str
+ ) {
+ let err = self.struct_generic(tcx, message, None);
+ if let Some(mut err) = err {
+ err.emit();
+ }
+ }
+
+ pub fn report_as_lint(&self,
+ tcx: TyCtxtAt<'a, 'gcx, 'tcx>,
+ message: &str,
+ lint_root: ast::NodeId,
+ ) {
+ let lint = self.struct_generic(
+ tcx,
+ message,
+ Some(lint_root),
+ );
+ if let Some(mut lint) = lint {
+ lint.emit();
+ }
+ }
+
+ fn struct_generic(
+ &self,
+ tcx: TyCtxtAt<'a, 'gcx, 'tcx>,
+ message: &str,
+ lint_root: Option<ast::NodeId>,
+ ) -> Option<DiagnosticBuilder<'tcx>> {
+ match self.error.kind {
+ ::mir::interpret::EvalErrorKind::TypeckError |
+ ::mir::interpret::EvalErrorKind::TooGeneric |
+ ::mir::interpret::EvalErrorKind::CheckMatchError |
+ ::mir::interpret::EvalErrorKind::Layout(_) => return None,
+ ::mir::interpret::EvalErrorKind::ReferencedConstant(ref inner) => {
+ inner.struct_generic(tcx, "referenced constant has errors", lint_root)?.emit();
+ },
+ _ => {},
+ }
+ trace!("reporting const eval failure at {:?}", self.span);
+ let mut err = if let Some(lint_root) = lint_root {
+ let node_id = self.stacktrace
+ .iter()
+ .rev()
+ .filter_map(|frame| frame.lint_root)
+ .next()
+ .unwrap_or(lint_root);
+ tcx.struct_span_lint_node(
+ ::rustc::lint::builtin::CONST_ERR,
+ node_id,
+ tcx.span,
+ message,
+ )
+ } else {
+ struct_error(tcx, message)
+ };
+ err.span_label(self.span, self.error.to_string());
+ for FrameInfo { span, location, .. } in &self.stacktrace {
+ err.span_label(*span, format!("inside call to `{}`", location));
+ }
+ Some(err)
+ }
+}
+
+pub fn struct_error<'a, 'gcx, 'tcx>(
+ tcx: TyCtxtAt<'a, 'gcx, 'tcx>,
+ msg: &str,
+) -> DiagnosticBuilder<'tcx> {
+ struct_span_err!(tcx.sess, tcx.span, E0080, "{}", msg)
+}
+
#[derive(Debug, Clone, RustcEncodable, RustcDecodable)]
pub struct EvalError<'tcx> {
pub kind: EvalErrorKind<'tcx, u64>,
UnimplementedTraitSelection,
/// Abort in case type errors are reached
TypeckError,
+ /// Resolution can fail if we are in a too generic context
+ TooGeneric,
+ CheckMatchError,
/// Cannot compute this constant because it depends on another one
/// which already produced an error
- ReferencedConstant(ConstEvalErr<'tcx>),
+ ReferencedConstant(Lrc<ConstEvalErr<'tcx>>),
GeneratorResumedAfterReturn,
GeneratorResumedAfterPanic,
}
"there were unresolved type arguments during trait selection",
TypeckError =>
"encountered constants with type errors, stopping evaluation",
+ TooGeneric =>
+ "encountered overly generic constant",
+ CheckMatchError =>
+ "match checking failed",
ReferencedConstant(_) =>
"referenced constant has errors",
Overflow(mir::BinOp::Add) => "attempt to add with overflow",
mod error;
mod value;
-pub use self::error::{EvalError, EvalResult, EvalErrorKind, AssertMessage};
+pub use self::error::{
+ EvalError, EvalResult, EvalErrorKind, AssertMessage, ConstEvalErr, struct_error,
+ FrameInfo, ConstEvalResult,
+};
pub use self::value::{Scalar, Value, ConstValue};
use ty::layout::{Align, HasDataLayout, Size};
use ty;
+use ty::subst::Substs;
+use hir::def_id::DefId;
use super::{EvalResult, Pointer, PointerArithmetic, Allocation};
/// Represents a constant value in Rust. ByVal and ScalarPair are optimizations which
/// matches Value's optimizations for easy conversions between these two types
-#[derive(Clone, Copy, Debug, Eq, PartialEq, PartialOrd, Ord, RustcEncodable, RustcDecodable, Hash)]
+#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord, RustcEncodable, RustcDecodable, Hash)]
pub enum ConstValue<'tcx> {
+ /// Never returned from the `const_eval` query, but the HIR contains these frequently in order
+ /// to allow HIR creation to happen for everything before needing to be able to run constant
+ /// evaluation
+ Unevaluated(DefId, &'tcx Substs<'tcx>),
/// Used only for types with layout::abi::Scalar ABI and ZSTs which use Scalar::undef()
Scalar(Scalar),
/// Used only for types with layout::abi::ScalarPair
#[inline]
pub fn to_byval_value(&self) -> Option<Value> {
match *self {
+ ConstValue::Unevaluated(..) |
ConstValue::ByRef(..) => None,
ConstValue::ScalarPair(a, b) => Some(Value::ScalarPair(a, b)),
ConstValue::Scalar(val) => Some(Value::Scalar(val)),
#[inline]
pub fn to_scalar(&self) -> Option<Scalar> {
match *self {
- ConstValue::ByRef(..) => None,
+ ConstValue::Unevaluated(..) |
+ ConstValue::ByRef(..) |
ConstValue::ScalarPair(..) => None,
ConstValue::Scalar(val) => Some(val),
}
}
}
-/// Write a `ConstVal` in a way closer to the original source code than the `Debug` output.
+/// Write a `ConstValue` in a way closer to the original source code than the `Debug` output.
pub fn fmt_const_val<W: Write>(fmt: &mut W, const_val: &ty::Const) -> fmt::Result {
- use middle::const_val::ConstVal;
- match const_val.val {
- ConstVal::Unevaluated(..) => write!(fmt, "{:?}", const_val),
- ConstVal::Value(val) => {
- if let Some(value) = val.to_byval_value() {
- print_miri_value(value, const_val.ty, fmt)
- } else {
- write!(fmt, "{:?}:{}", val, const_val.ty)
- }
- }
+ if let Some(value) = const_val.to_byval_value() {
+ print_miri_value(value, const_val.ty, fmt)
+ } else {
+ write!(fmt, "{:?}:{}", const_val.val, const_val.ty)
}
}
use super::{ObligationCause, PredicateObligation};
pub trait TraitEngine<'tcx>: 'tcx {
- fn normalize_projection_type<'a, 'gcx>(
+ fn normalize_projection_type(
&mut self,
- infcx: &InferCtxt<'a, 'gcx, 'tcx>,
+ infcx: &InferCtxt<'_, 'gcx, 'tcx>,
param_env: ty::ParamEnv<'tcx>,
projection_ty: ty::ProjectionTy<'tcx>,
cause: ObligationCause<'tcx>,
) -> Ty<'tcx>;
- fn register_bound<'a, 'gcx>(
+ fn register_bound(
&mut self,
- infcx: &InferCtxt<'a, 'gcx, 'tcx>,
+ infcx: &InferCtxt<'_, 'gcx, 'tcx>,
param_env: ty::ParamEnv<'tcx>,
ty: Ty<'tcx>,
def_id: DefId,
cause: ObligationCause<'tcx>,
);
- fn register_predicate_obligation<'a, 'gcx>(
+ fn register_predicate_obligation(
&mut self,
- infcx: &InferCtxt<'a, 'gcx, 'tcx>,
+ infcx: &InferCtxt<'_, 'gcx, 'tcx>,
obligation: PredicateObligation<'tcx>,
);
- fn select_all_or_error<'a, 'gcx>(
+ fn select_all_or_error(
&mut self,
- infcx: &InferCtxt<'a, 'gcx, 'tcx>,
+ infcx: &InferCtxt<'_, 'gcx, 'tcx>,
) -> Result<(), Vec<FulfillmentError<'tcx>>>;
- fn select_where_possible<'a, 'gcx>(
+ fn select_where_possible(
&mut self,
- infcx: &InferCtxt<'a, 'gcx, 'tcx>,
+ infcx: &InferCtxt<'_, 'gcx, 'tcx>,
) -> Result<(), Vec<FulfillmentError<'tcx>>>;
fn pending_obligations(&self) -> Vec<PredicateObligation<'tcx>>;
}
-impl<'a, 'gcx, 'tcx> dyn TraitEngine<'tcx> {
- pub fn new(_tcx: TyCtxt<'_, '_, 'tcx>) -> Box<Self> {
- Box::new(FulfillmentContext::new())
- }
+pub trait TraitEngineExt<'tcx> {
+ fn register_predicate_obligations(
+ &mut self,
+ infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+ obligations: impl IntoIterator<Item = PredicateObligation<'tcx>>,
+ );
+}
- pub fn register_predicate_obligations<I>(
+impl<T: ?Sized + TraitEngine<'tcx>> TraitEngineExt<'tcx> for T {
+ fn register_predicate_obligations(
&mut self,
- infcx: &InferCtxt<'a, 'gcx, 'tcx>,
- obligations: I,
- ) where
- I: IntoIterator<Item = PredicateObligation<'tcx>>,
- {
+ infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+ obligations: impl IntoIterator<Item = PredicateObligation<'tcx>>,
+ ) {
for obligation in obligations {
self.register_predicate_obligation(infcx, obligation);
}
}
}
+
+impl dyn TraitEngine<'tcx> {
+ pub fn new(_tcx: TyCtxt<'_, '_, 'tcx>) -> Box<Self> {
+ Box::new(FulfillmentContext::new())
+ }
+}
..
}) => {
(self.tcx.sess.codemap().def_span(span), decl.inputs.iter()
- .map(|arg| match arg.clone().into_inner().node {
+ .map(|arg| match arg.clone().node {
hir::TyTup(ref tys) => ArgKind::Tuple(
Some(arg.span),
tys.iter()
use rustc_data_structures::obligation_forest::{ObligationProcessor, ProcessResult};
use std::marker::PhantomData;
use hir::def_id::DefId;
-use middle::const_val::{ConstEvalErr, ErrKind};
+use mir::interpret::ConstEvalErr;
+use mir::interpret::EvalErrorKind;
use super::CodeAmbiguity;
use super::CodeProjectionError;
use super::CodeSelectionError;
-use super::engine::TraitEngine;
+use super::engine::{TraitEngine, TraitEngineExt};
use super::{FulfillmentError, FulfillmentErrorCode};
use super::{ObligationCause, PredicateObligation, Obligation};
use super::project;
}
}
- pub fn register_predicate_obligations<I>(&mut self,
- infcx: &InferCtxt<'a, 'gcx, 'tcx>,
- obligations: I)
- where I: IntoIterator<Item = PredicateObligation<'tcx>>
- {
- for obligation in obligations {
- self.register_predicate_obligation(infcx, obligation);
- }
- }
-
/// Attempts to select obligations using `selcx`. If `only_new_obligations` is true, then it
/// only attempts to select obligations that haven't been seen before.
fn select(&mut self, selcx: &mut SelectionContext<'a, 'gcx, 'tcx>)
ProcessResult::Error(
CodeSelectionError(ConstEvalFailure(ConstEvalErr {
span: obligation.cause.span,
- kind: ErrKind::CouldNotResolve.into(),
- }))
+ error: EvalErrorKind::TooGeneric.into(),
+ stacktrace: vec![],
+ }.into()))
)
}
},
use hir::def_id::DefId;
use infer::outlives::env::OutlivesEnvironment;
use middle::region;
-use middle::const_val::ConstEvalErr;
+use mir::interpret::ConstEvalErr;
use ty::subst::Substs;
use ty::{self, AdtKind, Slice, Ty, TyCtxt, GenericParamDefKind, ToPredicate};
use ty::error::{ExpectedFound, TypeError};
use ty::fold::{TypeFolder, TypeFoldable, TypeVisitor};
-use infer::canonical::{Canonical, Canonicalize};
use infer::{InferCtxt};
use rustc_data_structures::sync::Lrc;
pub use self::select::{EvaluationResult, IntercrateAmbiguityCause, OverflowError};
pub use self::specialize::{OverlapError, specialization_graph, translate_substs};
pub use self::specialize::{SpecializesCache, find_associated_item};
-pub use self::engine::TraitEngine;
+pub use self::engine::{TraitEngine, TraitEngineExt};
pub use self::util::elaborate_predicates;
pub use self::util::supertraits;
pub use self::util::Supertraits;
ty::PolyTraitRef<'tcx>,
ty::error::TypeError<'tcx>),
TraitNotObjectSafe(DefId),
- ConstEvalFailure(ConstEvalErr<'tcx>),
+ ConstEvalFailure(Lrc<ConstEvalErr<'tcx>>),
Overflow,
}
};
}
-impl<'gcx: 'tcx, 'tcx> Canonicalize<'gcx, 'tcx> for ty::ParamEnvAnd<'tcx, Goal<'tcx>> {
- // we ought to intern this, but I'm too lazy just now
- type Canonicalized = Canonical<'gcx, ty::ParamEnvAnd<'gcx, Goal<'gcx>>>;
-
- fn intern(
- _gcx: TyCtxt<'_, 'gcx, 'gcx>,
- value: Canonical<'gcx, Self::Lifted>,
- ) -> Self::Canonicalized {
- value
- }
-}
-
pub trait ExClauseFold<'tcx>
where
Self: chalk_engine::context::Context + Clone,
tcx: TyCtxt<'a, 'gcx, 'tcx>,
) -> Option<Self::LiftedExClause>;
}
-
-impl<'gcx: 'tcx, 'tcx, C> Canonicalize<'gcx, 'tcx> for chalk_engine::ExClause<C>
-where
- C: chalk_engine::context::Context + Clone,
- C: ExClauseLift<'gcx> + ExClauseFold<'tcx>,
- C::Substitution: Clone,
- C::RegionConstraint: Clone,
-{
- type Canonicalized = Canonical<'gcx, C::LiftedExClause>;
-
- fn intern(
- _gcx: TyCtxt<'_, 'gcx, 'gcx>,
- value: Canonical<'gcx, Self::Lifted>,
- ) -> Self::Canonicalized {
- value
- }
-}
.filter(|item| item.kind == ty::AssociatedKind::Method)
.filter_map(|item| {
self.object_safety_violation_for_method(trait_def_id, &item)
- .map(|code| ObjectSafetyViolation::Method(item.name, code))
+ .map(|code| ObjectSafetyViolation::Method(item.ident.name, code))
}).filter(|violation| {
if let ObjectSafetyViolation::Method(_,
MethodViolationCode::WhereClauseReferencesSelf(span)) = violation {
violations.extend(self.associated_items(trait_def_id)
.filter(|item| item.kind == ty::AssociatedKind::Const)
- .map(|item| ObjectSafetyViolation::AssociatedConst(item.name)));
+ .map(|item| ObjectSafetyViolation::AssociatedConst(item.ident.name)));
debug!("object_safety_violations_for_trait(trait_def_id={:?}) = {:?}",
trait_def_id,
use hir::def_id::DefId;
use infer::{InferCtxt, InferOk};
use infer::type_variable::TypeVariableOrigin;
-use middle::const_val::ConstVal;
+use mir::interpret::ConstValue;
use mir::interpret::{GlobalId};
use rustc_data_structures::snapshot_map::{Snapshot, SnapshotMap};
-use syntax::symbol::Symbol;
+use syntax::ast::Ident;
use ty::subst::{Subst, Substs};
use ty::{self, ToPredicate, ToPolyTraitRef, Ty, TyCtxt};
use ty::fold::{TypeFoldable, TypeFolder};
}
fn fold_const(&mut self, constant: &'tcx ty::Const<'tcx>) -> &'tcx ty::Const<'tcx> {
- if let ConstVal::Unevaluated(def_id, substs) = constant.val {
+ if let ConstValue::Unevaluated(def_id, substs) = constant.val {
let tcx = self.selcx.tcx().global_tcx();
if let Some(param_env) = self.tcx().lift_to_global(&self.param_env) {
if substs.needs_infer() || substs.has_skol() {
obligation.predicate.self_ty(),
gen_sig)
.map_bound(|(trait_ref, yield_ty, return_ty)| {
- let name = tcx.associated_item(obligation.predicate.item_def_id).name;
- let ty = if name == Symbol::intern("Return") {
+ let name = tcx.associated_item(obligation.predicate.item_def_id).ident.name;
+ let ty = if name == "Return" {
return_ty
- } else if name == Symbol::intern("Yield") {
+ } else if name == "Yield" {
yield_ty
} else {
bug!()
projection_ty: ty::ProjectionTy::from_ref_and_name(
tcx,
trait_ref,
- Symbol::intern(FN_OUTPUT_NAME),
+ Ident::from_str(FN_OUTPUT_NAME),
),
ty: ret_type
}
// checker method `check_impl_items_against_trait`, so here we
// just return TyError.
debug!("confirm_impl_candidate: no associated type {:?} for {:?}",
- assoc_ty.item.name,
+ assoc_ty.item.ident,
obligation.predicate);
tcx.types.err
} else {
-> specialization_graph::NodeItem<ty::AssociatedItem>
{
let tcx = selcx.tcx();
- let assoc_ty_name = tcx.associated_item(assoc_ty_def_id).name;
+ let assoc_ty_name = tcx.associated_item(assoc_ty_def_id).ident;
let trait_def_id = tcx.impl_trait_ref(impl_def_id).unwrap().def_id;
let trait_def = tcx.trait_def(trait_def_id);
let impl_node = specialization_graph::Node::Impl(impl_def_id);
for item in impl_node.items(tcx) {
if item.kind == ty::AssociatedKind::Type &&
- tcx.hygienic_eq(item.name, assoc_ty_name, trait_def_id) {
+ tcx.hygienic_eq(item.ident, assoc_ty_name, trait_def_id) {
return specialization_graph::NodeItem {
node: specialization_graph::Node::Impl(impl_def_id),
item,
// except according to those terms.
use infer::at::At;
-use infer::canonical::{Canonical, Canonicalize, QueryResult};
use infer::InferOk;
use std::iter::FromIterator;
-use traits::query::CanonicalTyGoal;
-use ty::{self, Ty, TyCtxt};
+use syntax::codemap::Span;
use ty::subst::Kind;
-use rustc_data_structures::sync::Lrc;
+use ty::{self, Ty, TyCtxt};
impl<'cx, 'gcx, 'tcx> At<'cx, 'gcx, 'tcx> {
/// Given a type `ty` of some value being dropped, computes a set
// any destructor.
let tcx = self.infcx.tcx;
if trivial_dropck_outlives(tcx, ty) {
- return InferOk { value: vec![], obligations: vec![] };
+ return InferOk {
+ value: vec![],
+ obligations: vec![],
+ };
}
let gcx = tcx.global_tcx();
debug!("c_ty = {:?}", c_ty);
match &gcx.dropck_outlives(c_ty) {
Ok(result) if result.is_proven() => {
- match self.infcx.instantiate_query_result(
+ match self.infcx.instantiate_query_result_and_region_obligations(
self.cause,
self.param_env,
&orig_values,
result,
) {
- Ok(InferOk {
- value: DropckOutlivesResult { kinds, overflows },
- obligations,
- }) => {
- for overflow_ty in overflows.into_iter().take(1) {
- let mut err = struct_span_err!(
- tcx.sess,
- span,
- E0320,
- "overflow while adding drop-check rules for {}",
- self.infcx.resolve_type_vars_if_possible(&ty),
- );
- err.note(&format!("overflowed on {}", overflow_ty));
- err.emit();
- }
-
+ Ok(InferOk { value, obligations }) => {
+ let ty = self.infcx.resolve_type_vars_if_possible(&ty);
+ let kinds = value.into_kinds_reporting_overflows(tcx, span, ty);
return InferOk {
value: kinds,
obligations,
}
}
-#[derive(Clone, Debug)]
+#[derive(Clone, Debug, Default)]
pub struct DropckOutlivesResult<'tcx> {
pub kinds: Vec<Kind<'tcx>>,
pub overflows: Vec<Ty<'tcx>>,
}
+impl<'tcx> DropckOutlivesResult<'tcx> {
+ pub fn report_overflows(
+ &self,
+ tcx: TyCtxt<'_, '_, 'tcx>,
+ span: Span,
+ ty: Ty<'tcx>,
+ ) {
+ for overflow_ty in self.overflows.iter().take(1) {
+ let mut err = struct_span_err!(
+ tcx.sess,
+ span,
+ E0320,
+ "overflow while adding drop-check rules for {}",
+ ty,
+ );
+ err.note(&format!("overflowed on {}", overflow_ty));
+ err.emit();
+ }
+ }
+
+ pub fn into_kinds_reporting_overflows(
+ self,
+ tcx: TyCtxt<'_, '_, 'tcx>,
+ span: Span,
+ ty: Ty<'tcx>,
+ ) -> Vec<Kind<'tcx>> {
+ self.report_overflows(tcx, span, ty);
+ let DropckOutlivesResult { kinds, overflows: _ } = self;
+ kinds
+ }
+}
+
/// A set of constraints that need to be satisfied in order for
/// a type to be valid for destruction.
#[derive(Clone, Debug)]
result
}
}
-impl<'gcx: 'tcx, 'tcx> Canonicalize<'gcx, 'tcx> for ty::ParamEnvAnd<'tcx, Ty<'tcx>> {
- type Canonicalized = CanonicalTyGoal<'gcx>;
-
- fn intern(
- _gcx: TyCtxt<'_, 'gcx, 'gcx>,
- value: Canonical<'gcx, Self::Lifted>,
- ) -> Self::Canonicalized {
- value
- }
-}
-
BraceStructTypeFoldableImpl! {
impl<'tcx> TypeFoldable<'tcx> for DropckOutlivesResult<'tcx> {
kinds, overflows
kinds, overflows
});
-impl<'gcx: 'tcx, 'tcx> Canonicalize<'gcx, 'tcx> for QueryResult<'tcx, DropckOutlivesResult<'tcx>> {
- // we ought to intern this, but I'm too lazy just now
- type Canonicalized = Lrc<Canonical<'gcx, QueryResult<'gcx, DropckOutlivesResult<'gcx>>>>;
-
- fn intern(
- _gcx: TyCtxt<'_, 'gcx, 'gcx>,
- value: Canonical<'gcx, Self::Lifted>,
- ) -> Self::Canonicalized {
- Lrc::new(value)
- }
-}
-
impl_stable_hash_for!(struct DtorckConstraint<'tcx> {
outlives,
dtorck_types,
///
/// Note also that `needs_drop` requires a "global" type (i.e., one
/// with erased regions), but this funtcion does not.
-fn trivial_dropck_outlives<'cx, 'tcx>(tcx: TyCtxt<'cx, '_, 'tcx>, ty: Ty<'tcx>) -> bool {
+pub fn trivial_dropck_outlives<'tcx>(tcx: TyCtxt<'_, '_, 'tcx>, ty: Ty<'tcx>) -> bool {
match ty.sty {
// None of these types have a destructor and hence they do not
// require anything in particular to outlive the dtor's
// except according to those terms.
use infer::InferCtxt;
-use infer::canonical::{Canonical, Canonicalize};
use traits::{EvaluationResult, PredicateObligation, SelectionContext,
TraitQueryMode, OverflowError};
-use traits::query::CanonicalPredicateGoal;
-use ty::{ParamEnvAnd, Predicate, TyCtxt};
impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> {
/// Evaluates whether the predicate can be satisfied (by any means)
}
}
}
-
-impl<'gcx: 'tcx, 'tcx> Canonicalize<'gcx, 'tcx> for ParamEnvAnd<'tcx, Predicate<'tcx>> {
- type Canonicalized = CanonicalPredicateGoal<'gcx>;
-
- fn intern(
- _gcx: TyCtxt<'_, 'gcx, 'gcx>,
- value: Canonical<'gcx, Self::Lifted>,
- ) -> Self::Canonicalized {
- value
- }
-}
//! `librustc_traits`.
use infer::canonical::Canonical;
+use ty::error::TypeError;
use ty::{self, Ty};
pub mod dropck_outlives;
pub mod evaluate_obligation;
pub mod normalize;
pub mod normalize_erasing_regions;
+pub mod type_op;
pub type CanonicalProjectionGoal<'tcx> =
Canonical<'tcx, ty::ParamEnvAnd<'tcx, ty::ProjectionTy<'tcx>>>;
pub type CanonicalPredicateGoal<'tcx> =
Canonical<'tcx, ty::ParamEnvAnd<'tcx, ty::Predicate<'tcx>>>;
+pub type CanonicalTypeOpEqGoal<'tcx> =
+ Canonical<'tcx, ty::ParamEnvAnd<'tcx, type_op::eq::Eq<'tcx>>>;
+
+pub type CanonicalTypeOpSubtypeGoal<'tcx> =
+ Canonical<'tcx, ty::ParamEnvAnd<'tcx, type_op::subtype::Subtype<'tcx>>>;
+
+pub type CanonicalTypeOpProvePredicateGoal<'tcx> =
+ Canonical<'tcx, ty::ParamEnvAnd<'tcx, type_op::prove_predicate::ProvePredicate<'tcx>>>;
+
+pub type CanonicalTypeOpNormalizeGoal<'tcx, T> =
+ Canonical<'tcx, ty::ParamEnvAnd<'tcx, type_op::normalize::Normalize<T>>>;
+
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub struct NoSolution;
pub type Fallible<T> = Result<T, NoSolution>;
+impl<'tcx> From<TypeError<'tcx>> for NoSolution {
+ fn from(_: TypeError<'tcx>) -> NoSolution {
+ NoSolution
+ }
+}
+
impl_stable_hash_for!(struct NoSolution { });
use infer::{InferCtxt, InferOk};
use infer::at::At;
-use infer::canonical::{Canonical, Canonicalize, QueryResult};
-use middle::const_val::ConstVal;
-use mir::interpret::GlobalId;
-use rustc_data_structures::sync::Lrc;
+use mir::interpret::{GlobalId, ConstValue};
use traits::{Obligation, ObligationCause, PredicateObligation, Reveal};
-use traits::query::CanonicalProjectionGoal;
use traits::project::Normalized;
use ty::{self, Ty, TyCtxt};
use ty::fold::{TypeFoldable, TypeFolder};
let concrete_ty = generic_ty.subst(self.tcx(), substs);
self.anon_depth += 1;
if concrete_ty == ty {
- println!("generic_ty: {:#?}", generic_ty);
- println!("substs {:#?}", substs);
+ bug!("infinite recursion generic_ty: {:#?}, substs: {:#?}, \
+ concrete_ty: {:#?}, ty: {:#?}", generic_ty, substs, concrete_ty,
+ ty);
}
- assert_ne!(concrete_ty, ty, "infinite recursion");
let folded_ty = self.fold_ty(concrete_ty);
self.anon_depth -= 1;
folded_ty
return ty;
}
- match self.infcx.instantiate_query_result(
+ match self.infcx.instantiate_query_result_and_region_obligations(
self.cause,
self.param_env,
&orig_values,
}
fn fold_const(&mut self, constant: &'tcx ty::Const<'tcx>) -> &'tcx ty::Const<'tcx> {
- if let ConstVal::Unevaluated(def_id, substs) = constant.val {
+ if let ConstValue::Unevaluated(def_id, substs) = constant.val {
let tcx = self.infcx.tcx.global_tcx();
if let Some(param_env) = self.tcx().lift_to_global(&self.param_env) {
if substs.needs_infer() || substs.has_skol() {
}
}
-impl<'gcx: 'tcx, 'tcx> Canonicalize<'gcx, 'tcx> for ty::ParamEnvAnd<'tcx, ty::ProjectionTy<'tcx>> {
- type Canonicalized = CanonicalProjectionGoal<'gcx>;
-
- fn intern(
- _gcx: TyCtxt<'_, 'gcx, 'gcx>,
- value: Canonical<'gcx, Self::Lifted>,
- ) -> Self::Canonicalized {
- value
- }
-}
-
-impl<'gcx: 'tcx, 'tcx> Canonicalize<'gcx, 'tcx> for QueryResult<'tcx, NormalizationResult<'tcx>> {
- // we ought to intern this, but I'm too lazy just now
- type Canonicalized = Lrc<Canonical<'gcx, QueryResult<'gcx, NormalizationResult<'gcx>>>>;
-
- fn intern(
- _gcx: TyCtxt<'_, 'gcx, 'gcx>,
- value: Canonical<'gcx, Self::Lifted>,
- ) -> Self::Canonicalized {
- Lrc::new(value)
- }
-}
-
impl_stable_hash_for!(struct NormalizationResult<'tcx> {
normalized_ty
});
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use infer::{InferCtxt, InferOk};
+use std::fmt;
+use traits::query::Fallible;
+
+use infer::canonical::query_result;
+use infer::canonical::QueryRegionConstraint;
+use std::rc::Rc;
+use syntax::codemap::DUMMY_SP;
+use traits::{ObligationCause, TraitEngine, TraitEngineExt};
+
+pub struct CustomTypeOp<F, G> {
+ closure: F,
+ description: G,
+}
+
+impl<F, G> CustomTypeOp<F, G> {
+ pub fn new<'gcx, 'tcx, R>(closure: F, description: G) -> Self
+ where
+ F: FnOnce(&InferCtxt<'_, 'gcx, 'tcx>) -> Fallible<InferOk<'tcx, R>>,
+ G: Fn() -> String,
+ {
+ CustomTypeOp {
+ closure,
+ description,
+ }
+ }
+}
+
+impl<'gcx, 'tcx, F, R, G> super::TypeOp<'gcx, 'tcx> for CustomTypeOp<F, G>
+where
+ F: for<'a, 'cx> FnOnce(&'a InferCtxt<'cx, 'gcx, 'tcx>) -> Fallible<InferOk<'tcx, R>>,
+ G: Fn() -> String,
+{
+ type Output = R;
+
+ /// Processes the operation and all resulting obligations,
+ /// returning the final result along with any region constraints
+ /// (they will be given over to the NLL region solver).
+ fn fully_perform(
+ self,
+ infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+ ) -> Fallible<(Self::Output, Option<Rc<Vec<QueryRegionConstraint<'tcx>>>>)> {
+ if cfg!(debug_assertions) {
+ info!("fully_perform({:?})", self);
+ }
+
+ scrape_region_constraints(infcx, || Ok((self.closure)(infcx)?))
+ }
+}
+
+impl<F, G> fmt::Debug for CustomTypeOp<F, G>
+where
+ G: Fn() -> String,
+{
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ write!(f, "{}", (self.description)())
+ }
+}
+
+/// Executes `op` and then scrapes out all the "old style" region
+/// constraints that result, creating query-region-constraints.
+fn scrape_region_constraints<'gcx, 'tcx, R>(
+ infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+ op: impl FnOnce() -> Fallible<InferOk<'tcx, R>>,
+) -> Fallible<(R, Option<Rc<Vec<QueryRegionConstraint<'tcx>>>>)> {
+ let mut fulfill_cx = TraitEngine::new(infcx.tcx);
+ let dummy_body_id = ObligationCause::dummy().body_id;
+ let InferOk { value, obligations } = infcx.commit_if_ok(|_| op())?;
+ debug_assert!(obligations.iter().all(|o| o.cause.body_id == dummy_body_id));
+ fulfill_cx.register_predicate_obligations(infcx, obligations);
+ if let Err(e) = fulfill_cx.select_all_or_error(infcx) {
+ infcx.tcx.sess.diagnostic().delay_span_bug(
+ DUMMY_SP,
+ &format!("errors selecting obligation during MIR typeck: {:?}", e),
+ );
+ }
+
+ let region_obligations = infcx.take_registered_region_obligations();
+
+ let region_constraint_data = infcx.take_and_reset_region_constraints();
+
+ let outlives =
+ query_result::make_query_outlives(infcx.tcx, region_obligations, ®ion_constraint_data);
+
+ if outlives.is_empty() {
+ Ok((value, None))
+ } else {
+ Ok((value, Some(Rc::new(outlives))))
+ }
+}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResult, QueryResult};
+use traits::query::Fallible;
+use ty::{ParamEnvAnd, Ty, TyCtxt};
+
+#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
+pub struct Eq<'tcx> {
+ pub a: Ty<'tcx>,
+ pub b: Ty<'tcx>,
+}
+
+impl<'tcx> Eq<'tcx> {
+ pub fn new(a: Ty<'tcx>, b: Ty<'tcx>) -> Self {
+ Self { a, b }
+ }
+}
+
+impl<'gcx: 'tcx, 'tcx> super::QueryTypeOp<'gcx, 'tcx> for Eq<'tcx> {
+ type QueryResult = ();
+
+ fn try_fast_path(
+ _tcx: TyCtxt<'_, 'gcx, 'tcx>,
+ key: &ParamEnvAnd<'tcx, Eq<'tcx>>,
+ ) -> Option<Self::QueryResult> {
+ if key.value.a == key.value.b {
+ Some(())
+ } else {
+ None
+ }
+ }
+
+ fn perform_query(
+ tcx: TyCtxt<'_, 'gcx, 'tcx>,
+ canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Self>>,
+ ) -> Fallible<CanonicalizedQueryResult<'gcx, ()>> {
+ tcx.type_op_eq(canonicalized)
+ }
+
+ fn shrink_to_tcx_lifetime(
+ v: &'a CanonicalizedQueryResult<'gcx, ()>,
+ ) -> &'a Canonical<'tcx, QueryResult<'tcx, ()>> {
+ v
+ }
+}
+
+BraceStructTypeFoldableImpl! {
+ impl<'tcx> TypeFoldable<'tcx> for Eq<'tcx> {
+ a,
+ b,
+ }
+}
+
+BraceStructLiftImpl! {
+ impl<'a, 'tcx> Lift<'tcx> for Eq<'a> {
+ type Lifted = Eq<'tcx>;
+ a,
+ b,
+ }
+}
+
+impl_stable_hash_for! {
+ struct Eq<'tcx> { a, b }
+}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResult, QueryRegionConstraint,
+ QueryResult};
+use infer::{InferCtxt, InferOk};
+use std::fmt;
+use std::rc::Rc;
+use traits::query::Fallible;
+use traits::ObligationCause;
+use ty::fold::TypeFoldable;
+use ty::{Lift, ParamEnvAnd, TyCtxt};
+
+pub mod custom;
+pub mod eq;
+pub mod normalize;
+pub mod outlives;
+pub mod prove_predicate;
+use self::prove_predicate::ProvePredicate;
+pub mod subtype;
+
+/// "Type ops" are used in NLL to perform some particular action and
+/// extract out the resulting region constraints (or an error if it
+/// cannot be completed).
+pub trait TypeOp<'gcx, 'tcx>: Sized + fmt::Debug {
+ type Output;
+
+ /// Processes the operation and all resulting obligations,
+ /// returning the final result along with any region constraints
+ /// (they will be given over to the NLL region solver).
+ fn fully_perform(
+ self,
+ infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+ ) -> Fallible<(Self::Output, Option<Rc<Vec<QueryRegionConstraint<'tcx>>>>)>;
+}
+
+/// "Query type ops" are type ops that are implemented using a
+/// [canonical query][c]. The `Self` type here contains the kernel of
+/// information needed to do the operation -- `TypeOp` is actually
+/// implemented for `ParamEnvAnd<Self>`, since we always need to bring
+/// along a parameter environment as well. For query type-ops, we will
+/// first canonicalize the key and then invoke the query on the tcx,
+/// which produces the resulting query region constraints.
+///
+/// [c]: https://rust-lang-nursery.github.io/rustc-guide/traits/canonicalization.html
+pub trait QueryTypeOp<'gcx: 'tcx, 'tcx>:
+ fmt::Debug + Sized + TypeFoldable<'tcx> + Lift<'gcx>
+{
+ type QueryResult: TypeFoldable<'tcx> + Lift<'gcx>;
+
+ /// Give query the option for a simple fast path that never
+ /// actually hits the tcx cache lookup etc. Return `Some(r)` with
+ /// a final result or `None` to do the full path.
+ fn try_fast_path(
+ tcx: TyCtxt<'_, 'gcx, 'tcx>,
+ key: &ParamEnvAnd<'tcx, Self>,
+ ) -> Option<Self::QueryResult>;
+
+ /// Performs the actual query with the canonicalized key -- the
+ /// real work happens here. This method is not given an `infcx`
+ /// because it shouldn't need one -- and if it had access to one,
+ /// it might do things like invoke `sub_regions`, which would be
+ /// bad, because it would create subregion relationships that are
+ /// not captured in the return value.
+ fn perform_query(
+ tcx: TyCtxt<'_, 'gcx, 'tcx>,
+ canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Self>>,
+ ) -> Fallible<CanonicalizedQueryResult<'gcx, Self::QueryResult>>;
+
+ /// Casts a lifted query result (which is in the gcx lifetime)
+ /// into the tcx lifetime. This is always just an identity cast,
+ /// but the generic code doesn't realize it -- put another way, in
+ /// the generic code, we have a `Lifted<'gcx, Self::QueryResult>`
+ /// and we want to convert that to a `Self::QueryResult`. This is
+ /// not a priori valid, so we can't do it -- but in practice, it
+ /// is always a no-op (e.g., the lifted form of a type,
+ /// `Ty<'gcx>`, is a subtype of `Ty<'tcx>`). So we have to push
+ /// the operation into the impls that know more specifically what
+ /// `QueryResult` is. This operation would (maybe) be nicer with
+ /// something like HKTs or GATs, since then we could make
+ /// `QueryResult` parametric and `'gcx` and `'tcx` etc.
+ fn shrink_to_tcx_lifetime(
+ lifted_query_result: &'a CanonicalizedQueryResult<'gcx, Self::QueryResult>,
+ ) -> &'a Canonical<'tcx, QueryResult<'tcx, Self::QueryResult>>;
+
+ fn fully_perform_into(
+ query_key: ParamEnvAnd<'tcx, Self>,
+ infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+ output_query_region_constraints: &mut Vec<QueryRegionConstraint<'tcx>>,
+ ) -> Fallible<Self::QueryResult> {
+ if let Some(result) = QueryTypeOp::try_fast_path(infcx.tcx, &query_key) {
+ return Ok(result);
+ }
+
+ // FIXME(#33684) -- We need to use
+ // `canonicalize_hr_query_hack` here because of things
+ // like the subtype query, which go awry around
+ // `'static` otherwise.
+ let (canonical_self, canonical_var_values) = infcx.canonicalize_hr_query_hack(&query_key);
+ let canonical_result = Self::perform_query(infcx.tcx, canonical_self)?;
+ let canonical_result = Self::shrink_to_tcx_lifetime(&canonical_result);
+
+ let param_env = query_key.param_env;
+
+ let InferOk { value, obligations } = infcx
+ .instantiate_nll_query_result_and_region_obligations(
+ &ObligationCause::dummy(),
+ param_env,
+ &canonical_var_values,
+ canonical_result,
+ output_query_region_constraints,
+ )?;
+
+ // Typically, instantiating NLL query results does not
+ // create obligations. However, in some cases there
+ // are unresolved type variables, and unify them *can*
+ // create obligations. In that case, we have to go
+ // fulfill them. We do this via a (recursive) query.
+ for obligation in obligations {
+ let () = ProvePredicate::fully_perform_into(
+ obligation
+ .param_env
+ .and(ProvePredicate::new(obligation.predicate)),
+ infcx,
+ output_query_region_constraints,
+ )?;
+ }
+
+ Ok(value)
+ }
+}
+
+impl<'gcx: 'tcx, 'tcx, Q> TypeOp<'gcx, 'tcx> for ParamEnvAnd<'tcx, Q>
+where
+ Q: QueryTypeOp<'gcx, 'tcx>,
+{
+ type Output = Q::QueryResult;
+
+ fn fully_perform(
+ self,
+ infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+ ) -> Fallible<(Self::Output, Option<Rc<Vec<QueryRegionConstraint<'tcx>>>>)> {
+ let mut qrc = vec![];
+ let r = Q::fully_perform_into(self, infcx, &mut qrc)?;
+
+ // Promote the final query-region-constraints into a
+ // (optional) ref-counted vector:
+ let opt_qrc = if qrc.is_empty() {
+ None
+ } else {
+ Some(Rc::new(qrc))
+ };
+
+ Ok((r, opt_qrc))
+ }
+}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResult, QueryResult};
+use std::fmt;
+use traits::query::Fallible;
+use ty::fold::TypeFoldable;
+use ty::{self, Lift, ParamEnvAnd, Ty, TyCtxt};
+
+#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
+pub struct Normalize<T> {
+ pub value: T,
+}
+
+impl<'tcx, T> Normalize<T>
+where
+ T: fmt::Debug + TypeFoldable<'tcx>,
+{
+ pub fn new(value: T) -> Self {
+ Self { value }
+ }
+}
+
+impl<'gcx: 'tcx, 'tcx, T> super::QueryTypeOp<'gcx, 'tcx> for Normalize<T>
+where
+ T: Normalizable<'gcx, 'tcx>,
+{
+ type QueryResult = T;
+
+ fn try_fast_path(_tcx: TyCtxt<'_, 'gcx, 'tcx>, key: &ParamEnvAnd<'tcx, Self>) -> Option<T> {
+ if !key.value.value.has_projections() {
+ Some(key.value.value)
+ } else {
+ None
+ }
+ }
+
+ fn perform_query(
+ tcx: TyCtxt<'_, 'gcx, 'tcx>,
+ canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Self>>,
+ ) -> Fallible<CanonicalizedQueryResult<'gcx, Self::QueryResult>> {
+ T::type_op_method(tcx, canonicalized)
+ }
+
+ fn shrink_to_tcx_lifetime(
+ v: &'a CanonicalizedQueryResult<'gcx, T>,
+ ) -> &'a Canonical<'tcx, QueryResult<'tcx, T>> {
+ T::shrink_to_tcx_lifetime(v)
+ }
+}
+
+pub trait Normalizable<'gcx, 'tcx>: fmt::Debug + TypeFoldable<'tcx> + Lift<'gcx> + Copy {
+ fn type_op_method(
+ tcx: TyCtxt<'_, 'gcx, 'tcx>,
+ canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Normalize<Self>>>,
+ ) -> Fallible<CanonicalizedQueryResult<'gcx, Self>>;
+
+ /// Convert from the `'gcx` (lifted) form of `Self` into the `tcx`
+ /// form of `Self`.
+ fn shrink_to_tcx_lifetime(
+ v: &'a CanonicalizedQueryResult<'gcx, Self>,
+ ) -> &'a Canonical<'tcx, QueryResult<'tcx, Self>>;
+}
+
+impl Normalizable<'gcx, 'tcx> for Ty<'tcx>
+where
+ 'gcx: 'tcx,
+{
+ fn type_op_method(
+ tcx: TyCtxt<'_, 'gcx, 'tcx>,
+ canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Normalize<Self>>>,
+ ) -> Fallible<CanonicalizedQueryResult<'gcx, Self>> {
+ tcx.type_op_normalize_ty(canonicalized)
+ }
+
+ fn shrink_to_tcx_lifetime(
+ v: &'a CanonicalizedQueryResult<'gcx, Self>,
+ ) -> &'a Canonical<'tcx, QueryResult<'tcx, Self>> {
+ v
+ }
+}
+
+impl Normalizable<'gcx, 'tcx> for ty::Predicate<'tcx>
+where
+ 'gcx: 'tcx,
+{
+ fn type_op_method(
+ tcx: TyCtxt<'_, 'gcx, 'tcx>,
+ canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Normalize<Self>>>,
+ ) -> Fallible<CanonicalizedQueryResult<'gcx, Self>> {
+ tcx.type_op_normalize_predicate(canonicalized)
+ }
+
+ fn shrink_to_tcx_lifetime(
+ v: &'a CanonicalizedQueryResult<'gcx, Self>,
+ ) -> &'a Canonical<'tcx, QueryResult<'tcx, Self>> {
+ v
+ }
+}
+
+impl Normalizable<'gcx, 'tcx> for ty::PolyFnSig<'tcx>
+where
+ 'gcx: 'tcx,
+{
+ fn type_op_method(
+ tcx: TyCtxt<'_, 'gcx, 'tcx>,
+ canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Normalize<Self>>>,
+ ) -> Fallible<CanonicalizedQueryResult<'gcx, Self>> {
+ tcx.type_op_normalize_poly_fn_sig(canonicalized)
+ }
+
+ fn shrink_to_tcx_lifetime(
+ v: &'a CanonicalizedQueryResult<'gcx, Self>,
+ ) -> &'a Canonical<'tcx, QueryResult<'tcx, Self>> {
+ v
+ }
+}
+
+impl Normalizable<'gcx, 'tcx> for ty::FnSig<'tcx>
+where
+ 'gcx: 'tcx,
+{
+ fn type_op_method(
+ tcx: TyCtxt<'_, 'gcx, 'tcx>,
+ canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Normalize<Self>>>,
+ ) -> Fallible<CanonicalizedQueryResult<'gcx, Self>> {
+ tcx.type_op_normalize_fn_sig(canonicalized)
+ }
+
+ fn shrink_to_tcx_lifetime(
+ v: &'a CanonicalizedQueryResult<'gcx, Self>,
+ ) -> &'a Canonical<'tcx, QueryResult<'tcx, Self>> {
+ v
+ }
+}
+
+BraceStructTypeFoldableImpl! {
+ impl<'tcx, T> TypeFoldable<'tcx> for Normalize<T> {
+ value,
+ } where T: TypeFoldable<'tcx>,
+}
+
+BraceStructLiftImpl! {
+ impl<'tcx, T> Lift<'tcx> for Normalize<T> {
+ type Lifted = Normalize<T::Lifted>;
+ value,
+ } where T: Lift<'tcx>,
+}
+
+impl_stable_hash_for! {
+ impl<'tcx, T> for struct Normalize<T> {
+ value
+ }
+}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResult, QueryResult};
+use traits::query::dropck_outlives::trivial_dropck_outlives;
+use traits::query::dropck_outlives::DropckOutlivesResult;
+use traits::query::Fallible;
+use ty::{ParamEnvAnd, Ty, TyCtxt};
+
+#[derive(Copy, Clone, Debug)]
+pub struct DropckOutlives<'tcx> {
+ dropped_ty: Ty<'tcx>,
+}
+
+impl<'tcx> DropckOutlives<'tcx> {
+ pub fn new(dropped_ty: Ty<'tcx>) -> Self {
+ DropckOutlives { dropped_ty }
+ }
+}
+
+impl super::QueryTypeOp<'gcx, 'tcx> for DropckOutlives<'tcx>
+where
+ 'gcx: 'tcx,
+{
+ type QueryResult = DropckOutlivesResult<'tcx>;
+
+ fn try_fast_path(
+ tcx: TyCtxt<'_, 'gcx, 'tcx>,
+ key: &ParamEnvAnd<'tcx, Self>,
+ ) -> Option<Self::QueryResult> {
+ if trivial_dropck_outlives(tcx, key.value.dropped_ty) {
+ Some(DropckOutlivesResult::default())
+ } else {
+ None
+ }
+ }
+
+ fn perform_query(
+ tcx: TyCtxt<'_, 'gcx, 'tcx>,
+ canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Self>>,
+ ) -> Fallible<CanonicalizedQueryResult<'gcx, Self::QueryResult>> {
+ // Subtle: note that we are not invoking
+ // `infcx.at(...).dropck_outlives(...)` here, but rather the
+ // underlying `dropck_outlives` query. This same underlying
+ // query is also used by the
+ // `infcx.at(...).dropck_outlives(...)` fn. Avoiding the
+ // wrapper means we don't need an infcx in this code, which is
+ // good because the interface doesn't give us one (so that we
+ // know we are not registering any subregion relations or
+ // other things).
+
+ // FIXME convert to the type expected by the `dropck_outlives`
+ // query. This should eventually be fixed by changing the
+ // *underlying query*.
+ let Canonical {
+ variables,
+ value:
+ ParamEnvAnd {
+ param_env,
+ value: DropckOutlives { dropped_ty },
+ },
+ } = canonicalized;
+ let canonicalized = Canonical {
+ variables,
+ value: param_env.and(dropped_ty),
+ };
+
+ tcx.dropck_outlives(canonicalized)
+ }
+
+ fn shrink_to_tcx_lifetime(
+ lifted_query_result: &'a CanonicalizedQueryResult<'gcx, Self::QueryResult>,
+ ) -> &'a Canonical<'tcx, QueryResult<'tcx, Self::QueryResult>> {
+ lifted_query_result
+ }
+}
+
+BraceStructTypeFoldableImpl! {
+ impl<'tcx> TypeFoldable<'tcx> for DropckOutlives<'tcx> {
+ dropped_ty
+ }
+}
+
+BraceStructLiftImpl! {
+ impl<'a, 'tcx> Lift<'tcx> for DropckOutlives<'a> {
+ type Lifted = DropckOutlives<'tcx>;
+ dropped_ty
+ }
+}
+
+impl_stable_hash_for! {
+ struct DropckOutlives<'tcx> { dropped_ty }
+}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResult, QueryResult};
+use traits::query::Fallible;
+use ty::{ParamEnvAnd, Predicate, TyCtxt};
+
+#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
+pub struct ProvePredicate<'tcx> {
+ pub predicate: Predicate<'tcx>,
+}
+
+impl<'tcx> ProvePredicate<'tcx> {
+ pub fn new(predicate: Predicate<'tcx>) -> Self {
+ ProvePredicate { predicate }
+ }
+}
+
+impl<'gcx: 'tcx, 'tcx> super::QueryTypeOp<'gcx, 'tcx> for ProvePredicate<'tcx> {
+ type QueryResult = ();
+
+ fn try_fast_path(
+ _tcx: TyCtxt<'_, 'gcx, 'tcx>,
+ _key: &ParamEnvAnd<'tcx, Self>,
+ ) -> Option<Self::QueryResult> {
+ None
+ }
+
+ fn perform_query(
+ tcx: TyCtxt<'_, 'gcx, 'tcx>,
+ canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Self>>,
+ ) -> Fallible<CanonicalizedQueryResult<'gcx, ()>> {
+ tcx.type_op_prove_predicate(canonicalized)
+ }
+
+ fn shrink_to_tcx_lifetime(
+ v: &'a CanonicalizedQueryResult<'gcx, ()>,
+ ) -> &'a Canonical<'tcx, QueryResult<'tcx, ()>> {
+ v
+ }
+}
+
+BraceStructTypeFoldableImpl! {
+ impl<'tcx> TypeFoldable<'tcx> for ProvePredicate<'tcx> {
+ predicate,
+ }
+}
+
+BraceStructLiftImpl! {
+ impl<'a, 'tcx> Lift<'tcx> for ProvePredicate<'a> {
+ type Lifted = ProvePredicate<'tcx>;
+ predicate,
+ }
+}
+
+impl_stable_hash_for! {
+ struct ProvePredicate<'tcx> { predicate }
+}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResult, QueryResult};
+use traits::query::Fallible;
+use ty::{ParamEnvAnd, Ty, TyCtxt};
+
+#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
+pub struct Subtype<'tcx> {
+ pub sub: Ty<'tcx>,
+ pub sup: Ty<'tcx>,
+}
+
+impl<'tcx> Subtype<'tcx> {
+ pub fn new(sub: Ty<'tcx>, sup: Ty<'tcx>) -> Self {
+ Self {
+ sub,
+ sup,
+ }
+ }
+}
+
+impl<'gcx: 'tcx, 'tcx> super::QueryTypeOp<'gcx, 'tcx> for Subtype<'tcx> {
+ type QueryResult = ();
+
+ fn try_fast_path(_tcx: TyCtxt<'_, 'gcx, 'tcx>, key: &ParamEnvAnd<'tcx, Self>) -> Option<()> {
+ if key.value.sub == key.value.sup {
+ Some(())
+ } else {
+ None
+ }
+ }
+
+ fn perform_query(
+ tcx: TyCtxt<'_, 'gcx, 'tcx>,
+ canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Self>>,
+ ) -> Fallible<CanonicalizedQueryResult<'gcx, ()>> {
+ tcx.type_op_subtype(canonicalized)
+ }
+
+ fn shrink_to_tcx_lifetime(
+ v: &'a CanonicalizedQueryResult<'gcx, ()>,
+ ) -> &'a Canonical<'tcx, QueryResult<'tcx, ()>> {
+ v
+ }
+}
+
+BraceStructTypeFoldableImpl! {
+ impl<'tcx> TypeFoldable<'tcx> for Subtype<'tcx> {
+ sub,
+ sup,
+ }
+}
+
+BraceStructLiftImpl! {
+ impl<'a, 'tcx> Lift<'tcx> for Subtype<'a> {
+ type Lifted = Subtype<'tcx>;
+ sub,
+ sup,
+ }
+}
+
+impl_stable_hash_for! {
+ struct Subtype<'tcx> { sub, sup }
+}
let trait_def = tcx.trait_def(trait_def_id);
let ancestors = trait_def.ancestors(tcx, impl_data.impl_def_id);
- match ancestors.defs(tcx, item.name, item.kind, trait_def_id).next() {
+ match ancestors.defs(tcx, item.ident, item.kind, trait_def_id).next() {
Some(node_item) => {
let substs = tcx.infer_ctxt().enter(|infcx| {
let param_env = ty::ParamEnv::reveal_all();
use ty::{self, TyCtxt, TypeFoldable};
use ty::fast_reject::{self, SimplifiedType};
use rustc_data_structures::sync::Lrc;
-use syntax::ast::Name;
+use syntax::ast::Ident;
use util::captures::Captures;
use util::nodemap::{DefIdMap, FxHashMap};
pub fn defs(
self,
tcx: TyCtxt<'a, 'gcx, 'tcx>,
- trait_item_name: Name,
+ trait_item_name: Ident,
trait_item_kind: ty::AssociatedKind,
trait_def_id: DefId,
) -> impl Iterator<Item = NodeItem<ty::AssociatedItem>> + Captures<'gcx> + Captures<'tcx> + 'a {
self.flat_map(move |node| {
node.items(tcx).filter(move |impl_item| {
impl_item.kind == trait_item_kind &&
- tcx.hygienic_eq(impl_item.name, trait_item_name, trait_def_id)
+ tcx.hygienic_eq(impl_item.ident, trait_item_name, trait_def_id)
}).map(move |item| NodeItem { node: node, item: item })
})
}
})
}
super::TraitNotObjectSafe(def_id) => Some(super::TraitNotObjectSafe(def_id)),
- super::ConstEvalFailure(ref err) => tcx.lift(err).map(super::ConstEvalFailure),
+ super::ConstEvalFailure(ref err) => tcx.lift(&**err).map(|err| super::ConstEvalFailure(
+ err.into(),
+ )),
super::Overflow => bug!(), // FIXME: ape ConstEvalFailure?
}
}
fn position(&self) -> usize;
}
-impl<'buf> TyEncoder for opaque::Encoder<'buf> {
+impl TyEncoder for opaque::Encoder {
#[inline]
fn position(&self) -> usize {
self.position()
use std::cmp::Ordering;
use std::collections::hash_map::{self, Entry};
use std::hash::{Hash, Hasher};
+use std::fmt;
use std::mem;
use std::ops::Deref;
use std::iter;
/// contain the TypeVariants key or if the address of the interned
/// pointer differs. The latter case is possible if a primitive type,
/// e.g. `()` or `u8`, was interned in a different context.
-pub trait Lift<'tcx> {
- type Lifted: 'tcx;
+pub trait Lift<'tcx>: fmt::Debug {
+ type Lifted: fmt::Debug + 'tcx;
fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option<Self::Lifted>;
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use middle::const_val::ConstVal;
+use mir::interpret::ConstValue;
use ty::subst::Substs;
use ty::{self, Ty, TypeFlags, TypeFoldable};
fn add_const(&mut self, constant: &ty::Const) {
self.add_ty(constant.ty);
- match constant.val {
- ConstVal::Value(_) => {}
- ConstVal::Unevaluated(_, substs) => {
- self.add_flags(TypeFlags::HAS_PROJECTION);
- self.add_substs(substs);
- }
+ if let ConstValue::Unevaluated(_, substs) = constant.val {
+ self.add_flags(TypeFlags::HAS_PROJECTION);
+ self.add_substs(substs);
}
}
//! These methods return true to indicate that the visitor has found what it is looking for
//! and does not need to visit anything else.
-use middle::const_val::ConstVal;
+use mir::interpret::ConstValue;
use hir::def_id::DefId;
use ty::{self, Binder, Ty, TyCtxt, TypeFlags};
}
fn visit_const(&mut self, c: &'tcx ty::Const<'tcx>) -> bool {
- if let ConstVal::Unevaluated(..) = c.val {
+ if let ConstValue::Unevaluated(..) = c.val {
let projection_flags = TypeFlags::HAS_NORMALIZABLE_PROJECTION |
TypeFlags::HAS_PROJECTION;
if projection_flags.intersects(self.flags) {
use hir::svh::Svh;
use ich::Fingerprint;
use ich::StableHashingContext;
-use infer::canonical::{Canonical, Canonicalize};
+use infer::canonical::Canonical;
use middle::lang_items::{FnTraitLangItem, FnMutTraitLangItem, FnOnceTraitLangItem};
use middle::privacy::AccessLevels;
use middle::resolve_lifetime::ObjectLifetimeDefault;
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub struct AssociatedItem {
pub def_id: DefId,
- pub name: Name,
+ pub ident: Ident,
pub kind: AssociatedKind,
pub vis: Visibility,
pub defaultness: hir::Defaultness,
// regions just fine, showing `fn(&MyType)`.
format!("{}", tcx.fn_sig(self.def_id).skip_binder())
}
- ty::AssociatedKind::Type => format!("type {};", self.name.to_string()),
+ ty::AssociatedKind::Type => format!("type {};", self.ident),
ty::AssociatedKind::Const => {
- format!("const {}: {:?};", self.name.to_string(), tcx.type_of(self.def_id))
+ format!("const {}: {:?};", self.ident, tcx.type_of(self.def_id))
}
}
}
pub type CanonicalTy<'gcx> = Canonical<'gcx, Ty<'gcx>>;
-impl <'gcx: 'tcx, 'tcx> Canonicalize<'gcx, 'tcx> for Ty<'tcx> {
- type Canonicalized = CanonicalTy<'gcx>;
-
- fn intern(_gcx: TyCtxt<'_, 'gcx, 'gcx>,
- value: Canonical<'gcx, Self::Lifted>) -> Self::Canonicalized {
- value
- }
-}
-
extern {
/// A dummy type used to force Slice to by unsized without requiring fat pointers
type OpaqueSliceContents;
})
} else {
info!("invalid enum discriminant: {:#?}", val);
- ::middle::const_val::struct_error(
+ ::mir::interpret::struct_error(
tcx.at(tcx.def_span(expr_did)),
"constant evaluation of enum discriminant resulted in non-integer",
).emit();
};
AssociatedItem {
- name: trait_item_ref.name,
+ ident: trait_item_ref.ident,
kind,
// Visibility of trait items is inherited from their traits.
vis: Visibility::from_hir(parent_vis, trait_item_ref.id.node_id, self),
hir::AssociatedItemKind::Type => (ty::AssociatedKind::Type, false),
};
- ty::AssociatedItem {
- name: impl_item_ref.name,
+ AssociatedItem {
+ ident: impl_item_ref.ident,
kind,
// Visibility of trait impl items doesn't matter.
vis: ty::Visibility::from_hir(&impl_item_ref.vis, impl_item_ref.id.node_id, self),
pub fn associated_items(
self,
def_id: DefId,
- ) -> impl Iterator<Item = ty::AssociatedItem> + 'a {
+ ) -> impl Iterator<Item = AssociatedItem> + 'a {
let def_ids = self.associated_item_def_ids(def_id);
Box::new((0..def_ids.len()).map(move |i| self.associated_item(def_ids[i])))
- as Box<dyn Iterator<Item = ty::AssociatedItem> + 'a>
+ as Box<dyn Iterator<Item = AssociatedItem> + 'a>
}
/// Returns true if the impls are the same polarity and are implementing
// Hygienically compare a use-site name (`use_name`) for a field or an associated item with its
// supposed definition name (`def_name`). The method also needs `DefId` of the supposed
// definition's parent/scope to perform comparison.
- pub fn hygienic_eq(self, use_name: Name, def_name: Name, def_parent_def_id: DefId) -> bool {
- let (use_ident, def_ident) = (use_name.to_ident(), def_name.to_ident());
- self.adjust_ident(use_ident, def_parent_def_id, DUMMY_NODE_ID).0 == def_ident
+ pub fn hygienic_eq(self, use_name: Ident, def_name: Ident, def_parent_def_id: DefId) -> bool {
+ self.adjust_ident(use_name, def_parent_def_id, DUMMY_NODE_ID).0 == def_name.modern()
}
pub fn adjust_ident(self, mut ident: Ident, scope: DefId, block: NodeId) -> (Ident, DefId) {
use dep_graph::SerializedDepNodeIndex;
use dep_graph::DepNode;
use hir::def_id::{CrateNum, DefId, DefIndex};
-use mir::interpret::{GlobalId, ConstValue};
-use traits::query::{CanonicalPredicateGoal, CanonicalProjectionGoal, CanonicalTyGoal};
+use mir::interpret::GlobalId;
+use traits::query::{
+ CanonicalPredicateGoal, CanonicalProjectionGoal, CanonicalTyGoal, CanonicalTypeOpEqGoal,
+ CanonicalTypeOpNormalizeGoal, CanonicalTypeOpProvePredicateGoal, CanonicalTypeOpSubtypeGoal,
+};
use ty::{self, ParamEnvAnd, Ty, TyCtxt};
use ty::subst::Substs;
use ty::query::queries;
}
}
+impl<'tcx> QueryDescription<'tcx> for queries::type_op_eq<'tcx> {
+ fn describe(_tcx: TyCtxt, goal: CanonicalTypeOpEqGoal<'tcx>) -> String {
+ format!("evaluating `type_op_eq` `{:?}`", goal)
+ }
+}
+
+impl<'tcx> QueryDescription<'tcx> for queries::type_op_subtype<'tcx> {
+ fn describe(_tcx: TyCtxt, goal: CanonicalTypeOpSubtypeGoal<'tcx>) -> String {
+ format!("evaluating `type_op_subtype` `{:?}`", goal)
+ }
+}
+
+impl<'tcx> QueryDescription<'tcx> for queries::type_op_prove_predicate<'tcx> {
+ fn describe(_tcx: TyCtxt, goal: CanonicalTypeOpProvePredicateGoal<'tcx>) -> String {
+ format!("evaluating `type_op_prove_predicate` `{:?}`", goal)
+ }
+}
+
+impl<'tcx> QueryDescription<'tcx> for queries::type_op_normalize_ty<'tcx> {
+ fn describe(_tcx: TyCtxt, goal: CanonicalTypeOpNormalizeGoal<'tcx, Ty<'tcx>>) -> String {
+ format!("normalizing `{:?}`", goal)
+ }
+}
+
+impl<'tcx> QueryDescription<'tcx> for queries::type_op_normalize_predicate<'tcx> {
+ fn describe(
+ _tcx: TyCtxt,
+ goal: CanonicalTypeOpNormalizeGoal<'tcx, ty::Predicate<'tcx>>,
+ ) -> String {
+ format!("normalizing `{:?}`", goal)
+ }
+}
+
+impl<'tcx> QueryDescription<'tcx> for queries::type_op_normalize_poly_fn_sig<'tcx> {
+ fn describe(
+ _tcx: TyCtxt,
+ goal: CanonicalTypeOpNormalizeGoal<'tcx, ty::PolyFnSig<'tcx>>,
+ ) -> String {
+ format!("normalizing `{:?}`", goal)
+ }
+}
+
+impl<'tcx> QueryDescription<'tcx> for queries::type_op_normalize_fn_sig<'tcx> {
+ fn describe(_tcx: TyCtxt, goal: CanonicalTypeOpNormalizeGoal<'tcx, ty::FnSig<'tcx>>) -> String {
+ format!("normalizing `{:?}`", goal)
+ }
+}
+
impl<'tcx> QueryDescription<'tcx> for queries::is_copy_raw<'tcx> {
fn describe(_tcx: TyCtxt, env: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> String {
format!("computing whether `{}` is `Copy`", env.value)
}
impl<'tcx> QueryDescription<'tcx> for queries::const_value_to_allocation<'tcx> {
- fn describe(_tcx: TyCtxt, (val, ty): (ConstValue<'tcx>, Ty<'tcx>)) -> String {
- format!("converting value `{:?}` ({}) to an allocation", val, ty)
+ fn describe(_tcx: TyCtxt, val: &'tcx ty::Const<'tcx>) -> String {
+ format!("converting value `{:?}` to an allocation", val)
}
}
//! Defines the set of legal keys that can be used in queries.
+use infer::canonical::Canonical;
use hir::def_id::{CrateNum, DefId, LOCAL_CRATE, DefIndex};
-use traits::query::{CanonicalPredicateGoal, CanonicalProjectionGoal, CanonicalTyGoal};
use ty::{self, Ty, TyCtxt};
use ty::subst::Substs;
use ty::fast_reject::SimplifiedType;
}
}
-impl<'tcx> Key for (mir::interpret::ConstValue<'tcx>, Ty<'tcx>) {
+impl<'tcx> Key for &'tcx ty::Const<'tcx> {
fn query_crate(&self) -> CrateNum {
LOCAL_CRATE
}
}
}
-impl<'tcx> Key for CanonicalProjectionGoal<'tcx> {
- fn query_crate(&self) -> CrateNum {
- LOCAL_CRATE
- }
-
- fn default_span(&self, _tcx: TyCtxt) -> Span {
- DUMMY_SP
- }
-}
-
-impl<'tcx> Key for CanonicalTyGoal<'tcx> {
- fn query_crate(&self) -> CrateNum {
- LOCAL_CRATE
- }
-
- fn default_span(&self, _tcx: TyCtxt) -> Span {
- DUMMY_SP
- }
-}
-
-impl<'tcx> Key for CanonicalPredicateGoal<'tcx> {
+/// Canonical query goals correspond to abstract trait operations that
+/// are not tied to any crate in particular.
+impl<'tcx, T> Key for Canonical<'tcx, T>
+where
+ T: Debug + Hash + Clone + Eq,
+{
fn query_crate(&self) -> CrateNum {
LOCAL_CRATE
}
use middle::stability::{self, DeprecationEntry};
use middle::lang_items::{LanguageItems, LangItem};
use middle::exported_symbols::{SymbolExportLevel, ExportedSymbol};
-use middle::const_val::EvalResult;
+use mir::interpret::ConstEvalResult;
use mir::mono::{CodegenUnit, Stats};
use mir;
-use mir::interpret::{GlobalId, Allocation, ConstValue};
+use mir::interpret::{GlobalId, Allocation};
use session::{CompileResult, CrateDisambiguator};
use session::config::OutputFilenames;
use traits::{self, Vtable};
use traits::query::{CanonicalPredicateGoal, CanonicalProjectionGoal,
- CanonicalTyGoal, NoSolution};
+ CanonicalTyGoal, CanonicalTypeOpEqGoal, CanonicalTypeOpSubtypeGoal,
+ CanonicalTypeOpProvePredicateGoal, CanonicalTypeOpNormalizeGoal, NoSolution};
use traits::query::dropck_outlives::{DtorckConstraint, DropckOutlivesResult};
use traits::query::normalize::NormalizationResult;
use traits::specialization_graph;
/// Results of evaluating const items or constants embedded in
/// other items (such as enum variant explicit discriminants).
[] fn const_eval: const_eval_dep_node(ty::ParamEnvAnd<'tcx, GlobalId<'tcx>>)
- -> EvalResult<'tcx>,
+ -> ConstEvalResult<'tcx>,
/// Converts a constant value to an constant allocation
[] fn const_value_to_allocation: const_value_to_allocation(
- (ConstValue<'tcx>, Ty<'tcx>)
+ &'tcx ty::Const<'tcx>
) -> &'tcx Allocation,
[] fn check_match: CheckMatch(DefId)
CanonicalPredicateGoal<'tcx>
) -> Result<traits::EvaluationResult, traits::OverflowError>,
+ /// Do not call this query directly: part of the `Eq` type-op
+ [] fn type_op_eq: TypeOpEq(
+ CanonicalTypeOpEqGoal<'tcx>
+ ) -> Result<
+ Lrc<Canonical<'tcx, canonical::QueryResult<'tcx, ()>>>,
+ NoSolution,
+ >,
+
+ /// Do not call this query directly: part of the `Subtype` type-op
+ [] fn type_op_subtype: TypeOpSubtype(
+ CanonicalTypeOpSubtypeGoal<'tcx>
+ ) -> Result<
+ Lrc<Canonical<'tcx, canonical::QueryResult<'tcx, ()>>>,
+ NoSolution,
+ >,
+
+ /// Do not call this query directly: part of the `ProvePredicate` type-op
+ [] fn type_op_prove_predicate: TypeOpProvePredicate(
+ CanonicalTypeOpProvePredicateGoal<'tcx>
+ ) -> Result<
+ Lrc<Canonical<'tcx, canonical::QueryResult<'tcx, ()>>>,
+ NoSolution,
+ >,
+
+ /// Do not call this query directly: part of the `Normalize` type-op
+ [] fn type_op_normalize_ty: TypeOpNormalizeTy(
+ CanonicalTypeOpNormalizeGoal<'tcx, Ty<'tcx>>
+ ) -> Result<
+ Lrc<Canonical<'tcx, canonical::QueryResult<'tcx, Ty<'tcx>>>>,
+ NoSolution,
+ >,
+
+ /// Do not call this query directly: part of the `Normalize` type-op
+ [] fn type_op_normalize_predicate: TypeOpNormalizePredicate(
+ CanonicalTypeOpNormalizeGoal<'tcx, ty::Predicate<'tcx>>
+ ) -> Result<
+ Lrc<Canonical<'tcx, canonical::QueryResult<'tcx, ty::Predicate<'tcx>>>>,
+ NoSolution,
+ >,
+
+ /// Do not call this query directly: part of the `Normalize` type-op
+ [] fn type_op_normalize_poly_fn_sig: TypeOpNormalizePolyFnSig(
+ CanonicalTypeOpNormalizeGoal<'tcx, ty::PolyFnSig<'tcx>>
+ ) -> Result<
+ Lrc<Canonical<'tcx, canonical::QueryResult<'tcx, ty::PolyFnSig<'tcx>>>>,
+ NoSolution,
+ >,
+
+ /// Do not call this query directly: part of the `Normalize` type-op
+ [] fn type_op_normalize_fn_sig: TypeOpNormalizeFnSig(
+ CanonicalTypeOpNormalizeGoal<'tcx, ty::FnSig<'tcx>>
+ ) -> Result<
+ Lrc<Canonical<'tcx, canonical::QueryResult<'tcx, ty::FnSig<'tcx>>>>,
+ NoSolution,
+ >,
+
[] fn substitute_normalize_and_test_predicates:
substitute_normalize_and_test_predicates_node((DefId, &'tcx Substs<'tcx>)) -> bool,
}
fn const_value_to_allocation<'tcx>(
- (val, ty): (ConstValue<'tcx>, Ty<'tcx>)
+ val: &'tcx ty::Const<'tcx>,
) -> DepConstructor<'tcx> {
- DepConstructor::ConstValueToAllocation { val, ty }
+ DepConstructor::ConstValueToAllocation { val }
}
fn type_param_predicates<'tcx>((item_id, param_id): (DefId, DefId)) -> DepConstructor<'tcx> {
let len = BytePos::decode(self)?;
let file_lo = self.file_index_to_file(file_lo_index);
- let lo = file_lo.lines.borrow()[line_lo - 1] + col_lo;
+ let lo = file_lo.lines[line_lo - 1] + col_lo;
let hi = lo + len;
let expn_info_tag = u8::decode(self)?;
}
impl<'enc, 'a, 'tcx> SpecializedEncoder<Fingerprint>
-for CacheEncoder<'enc, 'a, 'tcx, opaque::Encoder<'enc>>
+for CacheEncoder<'enc, 'a, 'tcx, opaque::Encoder>
{
fn specialized_encode(&mut self, f: &Fingerprint) -> Result<(), Self::Error> {
f.encode_opaque(&mut self.encoder)
impl UseSpecializedEncodable for IntEncodedWithFixedSize {}
impl UseSpecializedDecodable for IntEncodedWithFixedSize {}
-impl<'enc> SpecializedEncoder<IntEncodedWithFixedSize> for opaque::Encoder<'enc> {
+impl SpecializedEncoder<IntEncodedWithFixedSize> for opaque::Encoder {
fn specialized_encode(&mut self, x: &IntEncodedWithFixedSize) -> Result<(), Self::Error> {
let start_pos = self.position();
for i in 0 .. IntEncodedWithFixedSize::ENCODED_SIZE {
DepKind::NormalizeTyAfterErasingRegions |
DepKind::DropckOutlives |
DepKind::EvaluateObligation |
+ DepKind::TypeOpEq |
+ DepKind::TypeOpSubtype |
+ DepKind::TypeOpProvePredicate |
+ DepKind::TypeOpNormalizeTy |
+ DepKind::TypeOpNormalizePredicate |
+ DepKind::TypeOpNormalizePolyFnSig |
+ DepKind::TypeOpNormalizeFnSig |
DepKind::SubstituteNormalizeAndTestPredicates |
DepKind::InstanceDefSizeEstimate |
DepKind::ProgramClausesForEnv |
//! type equality, etc.
use hir::def_id::DefId;
-use middle::const_val::ConstVal;
+use mir::interpret::ConstValue;
use ty::subst::{Kind, UnpackedKind, Substs};
use ty::{self, Ty, TyCtxt, TypeFoldable};
use ty::error::{ExpectedFound, TypeError};
return Ok(s);
}
match x.val {
- ConstVal::Unevaluated(def_id, substs) => {
+ ConstValue::Unevaluated(def_id, substs) => {
// FIXME(eddyb) get the right param_env.
let param_env = ty::ParamEnv::empty();
match tcx.lift_to_global(&substs) {
//! hand, though we've recently added some macros (e.g.,
//! `BraceStructLiftImpl!`) to help with the tedium.
-use middle::const_val::{self, ConstVal, ConstEvalErr};
+use mir::interpret::{ConstValue, ConstEvalErr};
use ty::{self, Lift, Ty, TyCtxt};
use ty::fold::{TypeFoldable, TypeFolder, TypeVisitor};
use rustc_data_structures::accumulate_vec::AccumulateVec;
use rustc_data_structures::indexed_vec::{IndexVec, Idx};
-use rustc_data_structures::sync::Lrc;
use mir::interpret;
use std::rc::Rc;
impl<'a, 'tcx> Lift<'tcx> for ConstEvalErr<'a> {
type Lifted = ConstEvalErr<'tcx>;
fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
- tcx.lift(&*self.kind).map(|kind| {
+ tcx.lift(&self.error).map(|error| {
ConstEvalErr {
span: self.span,
- kind: Lrc::new(kind),
+ stacktrace: self.stacktrace.clone(),
+ error,
}
})
}
PathNotFound(ref v) => PathNotFound(v.clone()),
UnimplementedTraitSelection => UnimplementedTraitSelection,
TypeckError => TypeckError,
- ReferencedConstant(ref err) => ReferencedConstant(tcx.lift(err)?),
+ TooGeneric => TooGeneric,
+ CheckMatchError => CheckMatchError,
+ ReferencedConstant(ref err) => ReferencedConstant(tcx.lift(&**err)?.into()),
OverflowNeg => OverflowNeg,
Overflow(op) => Overflow(op),
DivisionByZero => DivisionByZero,
}
}
-impl<'a, 'tcx> Lift<'tcx> for const_val::ErrKind<'a> {
- type Lifted = const_val::ErrKind<'tcx>;
- fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
- use middle::const_val::ErrKind::*;
-
- Some(match *self {
- CouldNotResolve => CouldNotResolve,
- TypeckError => TypeckError,
- CheckMatchError => CheckMatchError,
- Miri(ref e, ref frames) => return tcx.lift(e).map(|e| Miri(e, frames.clone())),
- })
- }
-}
-
impl<'a, 'tcx> Lift<'tcx> for ty::layout::LayoutError<'a> {
type Lifted = ty::layout::LayoutError<'tcx>;
fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
}
}
-impl<'tcx> TypeFoldable<'tcx> for ConstVal<'tcx> {
+impl<'tcx> TypeFoldable<'tcx> for ConstValue<'tcx> {
fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
match *self {
- ConstVal::Value(v) => ConstVal::Value(v),
- ConstVal::Unevaluated(def_id, substs) => {
- ConstVal::Unevaluated(def_id, substs.fold_with(folder))
+ ConstValue::Scalar(v) => ConstValue::Scalar(v),
+ ConstValue::ScalarPair(a, b) => ConstValue::ScalarPair(a, b),
+ ConstValue::ByRef(alloc, offset) => ConstValue::ByRef(alloc, offset),
+ ConstValue::Unevaluated(def_id, substs) => {
+ ConstValue::Unevaluated(def_id, substs.fold_with(folder))
}
}
}
fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
match *self {
- ConstVal::Value(_) => false,
- ConstVal::Unevaluated(_, substs) => substs.visit_with(visitor),
+ ConstValue::Scalar(_) |
+ ConstValue::ScalarPair(_, _) |
+ ConstValue::ByRef(_, _) => false,
+ ConstValue::Unevaluated(_, substs) => substs.visit_with(visitor),
}
}
}
use hir::def_id::DefId;
-use middle::const_val::ConstVal;
+use mir::interpret::ConstValue;
use middle::region;
use polonius_engine::Atom;
use rustc_data_structures::indexed_vec::Idx;
use ty::{self, AdtDef, TypeFlags, Ty, TyCtxt, TypeFoldable};
use ty::{Slice, TyS, ParamEnvAnd, ParamEnv};
use util::captures::Captures;
-use mir::interpret::{Scalar, Pointer, Value, ConstValue};
+use mir::interpret::{Scalar, Pointer, Value};
use std::iter;
use std::cmp::Ordering;
use rustc_target::spec::abi;
-use syntax::ast::{self, Name};
+use syntax::ast::{self, Ident};
use syntax::symbol::{keywords, InternedString};
use serialize;
/// Construct a ProjectionTy by searching the trait from trait_ref for the
/// associated item named item_name.
pub fn from_ref_and_name(
- tcx: TyCtxt, trait_ref: ty::TraitRef<'tcx>, item_name: Name
+ tcx: TyCtxt, trait_ref: ty::TraitRef<'tcx>, item_name: Ident
) -> ProjectionTy<'tcx> {
let item_def_id = tcx.associated_items(trait_ref.def_id).find(|item| {
item.kind == ty::AssociatedKind::Type &&
- tcx.hygienic_eq(item_name, item.name, trait_ref.def_id)
+ tcx.hygienic_eq(item_name, item.ident, trait_ref.def_id)
}).unwrap().def_id;
ProjectionTy {
}
}
+ pub fn is_impl_trait(&self) -> bool {
+ match self.sty {
+ TyAnon(..) => true,
+ _ => false,
+ }
+ }
+
pub fn ty_to_def_id(&self) -> Option<DefId> {
match self.sty {
TyDynamic(ref tt, ..) => tt.principal().map(|p| p.def_id()),
pub struct Const<'tcx> {
pub ty: Ty<'tcx>,
- pub val: ConstVal<'tcx>,
+ pub val: ConstValue<'tcx>,
}
impl<'tcx> Const<'tcx> {
ty: Ty<'tcx>,
) -> &'tcx Self {
tcx.mk_const(Const {
- val: ConstVal::Unevaluated(def_id, substs),
+ val: ConstValue::Unevaluated(def_id, substs),
ty,
})
}
#[inline]
- pub fn from_const_val(
+ pub fn from_const_value(
tcx: TyCtxt<'_, '_, 'tcx>,
- val: ConstVal<'tcx>,
+ val: ConstValue<'tcx>,
ty: Ty<'tcx>,
) -> &'tcx Self {
tcx.mk_const(Const {
})
}
- #[inline]
- pub fn from_const_value(
- tcx: TyCtxt<'_, '_, 'tcx>,
- val: ConstValue<'tcx>,
- ty: Ty<'tcx>,
- ) -> &'tcx Self {
- Self::from_const_val(tcx, ConstVal::Value(val), ty)
- }
-
#[inline]
pub fn from_byval_value(
tcx: TyCtxt<'_, '_, 'tcx>,
}
let ty = tcx.lift_to_global(&ty).unwrap();
let size = tcx.layout_of(ty).ok()?.size;
- match self.val {
- ConstVal::Value(val) => val.to_bits(size),
- _ => None,
- }
+ self.val.to_bits(size)
}
#[inline]
pub fn to_ptr(&self) -> Option<Pointer> {
- match self.val {
- ConstVal::Value(val) => val.to_ptr(),
- _ => None,
- }
+ self.val.to_ptr()
}
#[inline]
pub fn to_byval_value(&self) -> Option<Value> {
- match self.val {
- ConstVal::Value(val) => val.to_byval_value(),
- _ => None,
- }
+ self.val.to_byval_value()
}
#[inline]
pub fn to_scalar(&self) -> Option<Scalar> {
- match self.val {
- ConstVal::Value(val) => val.to_scalar(),
- _ => None,
- }
+ self.val.to_scalar()
}
#[inline]
assert_eq!(self.ty, ty.value);
let ty = tcx.lift_to_global(&ty).unwrap();
let size = tcx.layout_of(ty).ok()?.size;
- match self.val {
- ConstVal::Value(val) => val.to_bits(size),
- _ => None,
- }
+ self.val.to_bits(size)
}
#[inline]
self.def_key(def_id).disambiguated_data.data == DefPathData::ClosureExpr
}
+ /// True if this def-id refers to the implicit constructor for
+ /// a tuple struct like `struct Foo(u32)`.
+ pub fn is_struct_constructor(self, def_id: DefId) -> bool {
+ self.def_key(def_id).disambiguated_data.data == DefPathData::StructCtor
+ }
+
/// Given the `DefId` of a fn or closure, returns the `DefId` of
/// the innermost fn item that the closure is contained within.
/// This is a significant def-id because, when we do
//! An iterator over the type substructure.
//! WARNING: this does not keep track of the region depth.
-use middle::const_val::ConstVal;
+use mir::interpret::ConstValue;
use ty::{self, Ty};
use rustc_data_structures::small_vec::SmallVec;
use rustc_data_structures::accumulate_vec::IntoIter as AccIntoIter;
}
fn push_const<'tcx>(stack: &mut TypeWalkerStack<'tcx>, constant: &'tcx ty::Const<'tcx>) {
- match constant.val {
- ConstVal::Value(_) => {}
- ConstVal::Unevaluated(_, substs) => {
- stack.extend(substs.types().rev());
- }
+ if let ConstValue::Unevaluated(_, substs) = constant.val {
+ stack.extend(substs.types().rev());
}
stack.push(constant.ty);
}
// except according to those terms.
use hir::def_id::DefId;
-use middle::const_val::ConstVal;
+use mir::interpret::ConstValue;
use infer::InferCtxt;
use ty::subst::Substs;
use traits;
/// into `self.out`.
fn compute_const(&mut self, constant: &'tcx ty::Const<'tcx>) {
self.require_sized(constant.ty, traits::ConstSized);
- match constant.val {
- ConstVal::Value(_) => {}
- ConstVal::Unevaluated(def_id, substs) => {
- let obligations = self.nominal_obligations(def_id, substs);
- self.out.extend(obligations);
-
- let predicate = ty::Predicate::ConstEvaluatable(def_id, substs);
- let cause = self.cause(traits::MiscObligation);
- self.out.push(traits::Obligation::new(cause,
- self.param_env,
- predicate));
- }
+ if let ConstValue::Unevaluated(def_id, substs) = constant.val {
+ let obligations = self.nominal_obligations(def_id, substs);
+ self.out.extend(obligations);
+
+ let predicate = ty::Predicate::ConstEvaluatable(def_id, substs);
+ let cause = self.cause(traits::MiscObligation);
+ self.out.push(traits::Obligation::new(cause,
+ self.param_env,
+ predicate));
}
}
use hir::def_id::DefId;
use hir::map::definitions::DefPathData;
-use middle::const_val::ConstVal;
+use mir::interpret::ConstValue;
use middle::region::{self, BlockRemainder};
use ty::subst::{self, Subst};
use ty::{BrAnon, BrEnv, BrFresh, BrNamed};
ty::tls::with(|tcx|
print!(f, self,
write("{}=",
- tcx.associated_item(projection.projection_ty.item_def_id).name),
+ tcx.associated_item(projection.projection_ty.item_def_id).ident),
print_display(projection.ty))
)?;
}
TyArray(ty, sz) => {
print!(f, cx, write("["), print(ty), write("; "))?;
match sz.val {
- ConstVal::Value(..) => ty::tls::with(|tcx| {
- write!(f, "{}", sz.unwrap_usize(tcx))
- })?,
- ConstVal::Unevaluated(_def_id, _substs) => {
+ ConstValue::Unevaluated(_def_id, _substs) => {
write!(f, "_")?;
}
+ _ => ty::tls::with(|tcx| {
+ write!(f, "{}", sz.unwrap_usize(tcx))
+ })?,
}
write!(f, "]")
}
// parameterized(f, self.substs, self.item_def_id, &[])
// (which currently ICEs).
let (trait_ref, item_name) = ty::tls::with(|tcx|
- (self.trait_ref(tcx), tcx.associated_item(self.item_def_id).name)
+ (self.trait_ref(tcx), tcx.associated_item(self.item_def_id).ident)
);
print!(f, cx, print_debug(trait_ref), write("::{}", item_name))
}
format: MacroAttribute(Symbol::intern(name)),
allow_internal_unstable: true,
allow_internal_unsafe: false,
+ local_inner_macros: false,
edition: hygiene::default_edition(),
});
cmt: &'c mc::cmt_<'tcx>) {
let source = get_pattern_source(bccx.tcx,move_pat);
let pat_span_path_opt = match move_pat.node {
- PatKind::Binding(_, _, ref path1, _) => {
+ PatKind::Binding(_, _, ident, _) => {
Some(MovePlace {
span: move_pat.span,
- name: path1.node,
+ name: ident.name,
pat_source: source,
})
}
let tcx = self.bccx.tcx;
let mut mutables = FxHashMap();
for p in pats {
- p.each_binding(|_, hir_id, span, path1| {
- let name = path1.node;
-
+ p.each_binding(|_, hir_id, span, ident| {
// Skip anything that looks like `_foo`
- if name.as_str().starts_with("_") {
+ if ident.as_str().starts_with("_") {
return;
}
_ => return,
}
- mutables.entry(name).or_insert(Vec::new()).push((hir_id, span));
+ mutables.entry(ident.name).or_insert(Vec::new()).push((hir_id, span));
});
}
}
fn u32(&mut self, val: u32) {
- let at = self.data.len();
- leb128::write_u32_leb128(&mut self.data, at, val);
+ leb128::write_u32_leb128(&mut self.data, val);
}
fn byte(&mut self, val: u8) {
fn fetch_wasm_section(tcx: TyCtxt, id: DefId) -> (String, Vec<u8>) {
use rustc::mir::interpret::GlobalId;
- use rustc::middle::const_val::ConstVal;
info!("loading wasm section {:?}", id);
};
let param_env = ty::ParamEnv::reveal_all();
let val = tcx.const_eval(param_env.and(cid)).unwrap();
-
- let const_val = match val.val {
- ConstVal::Value(val) => val,
- ConstVal::Unevaluated(..) => bug!("should be evaluated"),
- };
-
- let alloc = tcx.const_value_to_allocation((const_val, val.ty));
+ let alloc = tcx.const_value_to_allocation(val);
(section.to_string(), alloc.bytes.clone())
}
// except according to those terms.
use llvm::{self, ValueRef};
-use rustc::middle::const_val::{ConstVal, ConstEvalErr};
+use rustc::mir::interpret::ConstEvalErr;
use rustc_mir::interpret::{read_target_uint, const_val_field};
use rustc::hir::def_id::DefId;
use rustc::mir;
use rustc_data_structures::indexed_vec::Idx;
+use rustc_data_structures::sync::Lrc;
use rustc::mir::interpret::{GlobalId, Pointer, Scalar, Allocation, ConstValue, AllocType};
use rustc::ty::{self, Ty};
use rustc::ty::layout::{self, HasDataLayout, LayoutOf, Size};
pub fn codegen_static_initializer<'a, 'tcx>(
cx: &CodegenCx<'a, 'tcx>,
def_id: DefId)
- -> Result<ValueRef, ConstEvalErr<'tcx>>
+ -> Result<ValueRef, Lrc<ConstEvalErr<'tcx>>>
{
let instance = ty::Instance::mono(cx.tcx, def_id);
let cid = GlobalId {
let static_ = cx.tcx.const_eval(param_env.and(cid))?;
let alloc = match static_.val {
- ConstVal::Value(ConstValue::ByRef(alloc, n)) if n.bytes() == 0 => alloc,
+ ConstValue::ByRef(alloc, n) if n.bytes() == 0 => alloc,
_ => bug!("static const eval returned {:#?}", static_),
};
Ok(const_alloc_to_llvm(cx, alloc))
}
impl<'a, 'tcx> FunctionCx<'a, 'tcx> {
- fn const_to_const_value(
+ fn fully_evaluate(
&mut self,
bx: &Builder<'a, 'tcx>,
constant: &'tcx ty::Const<'tcx>,
- ) -> Result<ConstValue<'tcx>, ConstEvalErr<'tcx>> {
+ ) -> Result<&'tcx ty::Const<'tcx>, Lrc<ConstEvalErr<'tcx>>> {
match constant.val {
- ConstVal::Unevaluated(def_id, ref substs) => {
+ ConstValue::Unevaluated(def_id, ref substs) => {
let tcx = bx.tcx();
let param_env = ty::ParamEnv::reveal_all();
let instance = ty::Instance::resolve(tcx, param_env, def_id, substs).unwrap();
instance,
promoted: None,
};
- let c = tcx.const_eval(param_env.and(cid))?;
- self.const_to_const_value(bx, c)
+ tcx.const_eval(param_env.and(cid))
},
- ConstVal::Value(val) => Ok(val),
+ _ => Ok(constant),
}
}
- pub fn mir_constant_to_const_value(
+ pub fn eval_mir_constant(
&mut self,
bx: &Builder<'a, 'tcx>,
constant: &mir::Constant<'tcx>,
- ) -> Result<ConstValue<'tcx>, ConstEvalErr<'tcx>> {
+ ) -> Result<&'tcx ty::Const<'tcx>, Lrc<ConstEvalErr<'tcx>>> {
match constant.literal {
mir::Literal::Promoted { index } => {
let param_env = ty::ParamEnv::reveal_all();
mir::Literal::Value { value } => {
Ok(self.monomorphize(&value))
}
- }.and_then(|c| self.const_to_const_value(bx, c))
+ }.and_then(|c| self.fully_evaluate(bx, c))
}
/// process constant containing SIMD shuffle indices
bx: &Builder<'a, 'tcx>,
constant: &mir::Constant<'tcx>,
) -> (ValueRef, Ty<'tcx>) {
- self.mir_constant_to_const_value(bx, constant)
+ self.eval_mir_constant(bx, constant)
.and_then(|c| {
- let field_ty = constant.ty.builtin_index().unwrap();
- let fields = match constant.ty.sty {
+ let field_ty = c.ty.builtin_index().unwrap();
+ let fields = match c.ty.sty {
ty::TyArray(_, n) => n.unwrap_usize(bx.tcx()),
ref other => bug!("invalid simd shuffle type: {}", other),
};
- let values: Result<Vec<ValueRef>, _> = (0..fields).map(|field| {
+ let values: Result<Vec<ValueRef>, Lrc<_>> = (0..fields).map(|field| {
let field = const_val_field(
bx.tcx(),
ty::ParamEnv::reveal_all(),
None,
mir::Field::new(field as usize),
c,
- constant.ty,
)?;
if let Some(prim) = field.to_scalar() {
let layout = bx.cx.layout_of(field_ty);
}
}).collect();
let llval = C_struct(bx.cx, &values?, false);
- Ok((llval, constant.ty))
+ Ok((llval, c.ty))
})
.unwrap_or_else(|e| {
e.report_as_error(
// except according to those terms.
use llvm::{ValueRef, LLVMConstInBoundsGEP};
-use rustc::middle::const_val::ConstEvalErr;
+use rustc::mir::interpret::ConstEvalErr;
use rustc::mir;
use rustc::mir::interpret::ConstValue;
use rustc::ty;
use rustc::ty::layout::{self, Align, LayoutOf, TyLayout};
use rustc_data_structures::indexed_vec::Idx;
+use rustc_data_structures::sync::Lrc;
use base;
use common::{self, CodegenCx, C_null, C_undef, C_usize};
}
pub fn from_const(bx: &Builder<'a, 'tcx>,
- val: ConstValue<'tcx>,
- ty: ty::Ty<'tcx>)
- -> Result<OperandRef<'tcx>, ConstEvalErr<'tcx>> {
- let layout = bx.cx.layout_of(ty);
+ val: &'tcx ty::Const<'tcx>)
+ -> Result<OperandRef<'tcx>, Lrc<ConstEvalErr<'tcx>>> {
+ let layout = bx.cx.layout_of(val.ty);
if layout.is_zst() {
return Ok(OperandRef::new_zst(bx.cx, layout));
}
- let val = match val {
+ let val = match val.val {
+ ConstValue::Unevaluated(..) => bug!(),
ConstValue::Scalar(x) => {
let scalar = match layout.abi {
layout::Abi::Scalar(ref x) => x,
mir::Operand::Constant(ref constant) => {
let ty = self.monomorphize(&constant.ty);
- self.mir_constant_to_const_value(bx, constant)
- .and_then(|c| OperandRef::from_const(bx, c, ty))
+ self.eval_mir_constant(bx, constant)
+ .and_then(|c| OperandRef::from_const(bx, c))
.unwrap_or_else(|err| {
match constant.literal {
mir::Literal::Promoted { .. } => {
let mut cfgs = Vec::new();
for &(name, ref value) in sess.parse_sess.config.iter() {
let gated_cfg = GatedCfg::gate(&ast::MetaItem {
- ident: ast::Path::from_ident(name.to_ident()),
+ ident: ast::Path::from_ident(ast::Ident::with_empty_ctxt(name)),
node: ast::MetaItemKind::Word,
span: DUMMY_SP,
});
use std::env;
use rustc::session::config::nightly_options;
+use rustc_serialize::opaque::Encoder;
/// The first few bytes of files generated by incremental compilation
const FILE_MAGIC: &'static [u8] = b"RSIC";
/// the git commit hash.
const RUSTC_VERSION: Option<&'static str> = option_env!("CFG_VERSION");
-pub fn write_file_header<W: io::Write>(stream: &mut W) -> io::Result<()> {
- stream.write_all(FILE_MAGIC)?;
- stream.write_all(&[(HEADER_FORMAT_VERSION >> 0) as u8,
- (HEADER_FORMAT_VERSION >> 8) as u8])?;
+pub fn write_file_header(stream: &mut Encoder) {
+ stream.emit_raw_bytes(FILE_MAGIC);
+ stream.emit_raw_bytes(&[(HEADER_FORMAT_VERSION >> 0) as u8,
+ (HEADER_FORMAT_VERSION >> 8) as u8]);
let rustc_version = rustc_version();
assert_eq!(rustc_version.len(), (rustc_version.len() as u8) as usize);
- stream.write_all(&[rustc_version.len() as u8])?;
- stream.write_all(rustc_version.as_bytes())?;
-
- Ok(())
+ stream.emit_raw_bytes(&[rustc_version.len() as u8]);
+ stream.emit_raw_bytes(rustc_version.as_bytes());
}
/// Reads the contents of a file with a file header as defined in this module.
use rustc_data_structures::sync::join;
use rustc_serialize::Encodable as RustcEncodable;
use rustc_serialize::opaque::Encoder;
-use std::io::{self, Cursor};
use std::fs;
use std::path::PathBuf;
}
fn save_in<F>(sess: &Session, path_buf: PathBuf, encode: F)
- where F: FnOnce(&mut Encoder) -> io::Result<()>
+ where F: FnOnce(&mut Encoder)
{
debug!("save: storing data in {}", path_buf.display());
}
// generate the data in a memory buffer
- let mut wr = Cursor::new(Vec::new());
- file_format::write_file_header(&mut wr).unwrap();
- match encode(&mut Encoder::new(&mut wr)) {
- Ok(()) => {}
- Err(err) => {
- sess.err(&format!("could not encode dep-graph to `{}`: {}",
- path_buf.display(),
- err));
- return;
- }
- }
+ let mut encoder = Encoder::new(Vec::new());
+ file_format::write_file_header(&mut encoder);
+ encode(&mut encoder);
// write the data out
- let data = wr.into_inner();
+ let data = encoder.into_inner();
match fs::write(&path_buf, data) {
Ok(_) => {
debug!("save: data written to disk successfully");
}
fn encode_dep_graph(tcx: TyCtxt,
- encoder: &mut Encoder)
- -> io::Result<()> {
+ encoder: &mut Encoder) {
// First encode the commandline arguments hash
- tcx.sess.opts.dep_tracking_hash().encode(encoder)?;
+ tcx.sess.opts.dep_tracking_hash().encode(encoder).unwrap();
// Encode the graph data.
let serialized_graph = time(tcx.sess, "getting serialized graph", || {
}
time(tcx.sess, "encoding serialized graph", || {
- serialized_graph.encode(encoder)
- })?;
-
- Ok(())
+ serialized_graph.encode(encoder).unwrap();
+ });
}
fn encode_work_product_index(work_products: &FxHashMap<WorkProductId, WorkProduct>,
- encoder: &mut Encoder) -> io::Result<()> {
+ encoder: &mut Encoder) {
let serialized_products: Vec<_> = work_products
.iter()
.map(|(id, work_product)| {
})
.collect();
- serialized_products.encode(encoder)
+ serialized_products.encode(encoder).unwrap();
}
fn encode_query_cache(tcx: TyCtxt,
- encoder: &mut Encoder)
- -> io::Result<()> {
+ encoder: &mut Encoder) {
time(tcx.sess, "serialize query result cache", || {
- tcx.serialize_query_result_cache(encoder)
+ tcx.serialize_query_result_cache(encoder).unwrap();
})
}
GenericParamKind::Lifetime { .. } => {}
GenericParamKind::Type { synthetic, .. } => {
if synthetic.is_none() {
- self.check_case(cx, "type parameter", param.name.name(), param.span);
+ self.check_case(cx, "type parameter", param.name.ident().name, param.span);
}
}
}
fn check_generic_param(&mut self, cx: &LateContext, param: &hir::GenericParam) {
match param.kind {
GenericParamKind::Lifetime { .. } => {
- let name = param.name.name().as_str();
+ let name = param.name.ident().as_str();
self.check_snake_case(cx, "lifetime", &name, Some(param.span));
}
GenericParamKind::Type { .. } => {}
}
fn check_trait_item(&mut self, cx: &LateContext, item: &hir::TraitItem) {
- if let hir::TraitItemKind::Method(_, hir::TraitMethod::Required(ref names)) = item.node {
+ if let hir::TraitItemKind::Method(_, hir::TraitMethod::Required(ref pnames)) = item.node {
self.check_snake_case(cx,
"trait method",
- &item.name.as_str(),
+ &item.ident.as_str(),
Some(item.span));
- for name in names {
- self.check_snake_case(cx, "variable", &name.node.as_str(), Some(name.span));
+ for param_name in pnames {
+ self.check_snake_case(cx, "variable", ¶m_name.as_str(), Some(param_name.span));
}
}
}
fn check_pat(&mut self, cx: &LateContext, p: &hir::Pat) {
- if let &PatKind::Binding(_, _, ref path1, _) = &p.node {
- self.check_snake_case(cx, "variable", &path1.node.as_str(), Some(p.span));
+ if let &PatKind::Binding(_, _, ref ident, _) = &p.node {
+ self.check_snake_case(cx, "variable", &ident.as_str(), Some(p.span));
}
}
fn check_trait_item(&mut self, cx: &LateContext, ti: &hir::TraitItem) {
match ti.node {
hir::TraitItemKind::Const(..) => {
- NonUpperCaseGlobals::check_upper_case(cx, "associated constant", ti.name, ti.span);
+ NonUpperCaseGlobals::check_upper_case(cx, "associated constant",
+ ti.ident.name, ti.span);
}
_ => {}
}
fn check_impl_item(&mut self, cx: &LateContext, ii: &hir::ImplItem) {
match ii.node {
hir::ImplItemKind::Const(..) => {
- NonUpperCaseGlobals::check_upper_case(cx, "associated constant", ii.name, ii.span);
+ NonUpperCaseGlobals::check_upper_case(cx, "associated constant",
+ ii.ident.name, ii.span);
}
_ => {}
}
if path.segments.len() == 1 {
NonUpperCaseGlobals::check_upper_case(cx,
"constant in pattern",
- path.segments[0].name,
+ path.segments[0].ident.name,
path.span);
}
}
// (Issue #49588)
continue;
}
- if let PatKind::Binding(_, _, name, None) = fieldpat.node.pat.node {
- let binding_ident = ast::Ident::new(name.node, name.span);
- if cx.tcx.find_field_index(binding_ident, &variant) ==
+ if let PatKind::Binding(_, _, ident, None) = fieldpat.node.pat.node {
+ if cx.tcx.find_field_index(ident, &variant) ==
Some(cx.tcx.field_index(fieldpat.node.id, cx.tables)) {
let mut err = cx.struct_span_lint(NON_SHORTHAND_FIELD_PATTERNS,
fieldpat.span,
- &format!("the `{}:` in this pattern is redundant",
- name.node));
+ &format!("the `{}:` in this pattern is redundant", ident));
let subspan = cx.tcx.sess.codemap().span_through_char(fieldpat.span, ':');
- err.span_suggestion_short(subspan,
- "remove this",
- format!("{}", name.node));
+ err.span_suggestion_short(subspan, "remove this", format!("{}", ident));
err.emit();
}
}
let container = ty::ImplContainer(vtable_impl.impl_def_id);
// It matches if it comes from the same impl,
// and has the same method name.
- container == method.container && callee_item.name == method.name
+ container == method.container &&
+ callee_item.ident.name == method.ident.name
}
// There's no way to know if this call is
};
ty::AssociatedItem {
- name: name.as_symbol(),
+ ident: Ident::from_interned_str(name),
kind,
vis: item.visibility.decode(self),
defaultness: container.defaultness(),
src_hash,
start_pos,
end_pos,
- lines,
- multibyte_chars,
- non_narrow_chars,
+ mut lines,
+ mut multibyte_chars,
+ mut non_narrow_chars,
name_hash,
.. } = filemap_to_import;
// `CodeMap::new_imported_filemap()` will then translate those
// coordinates to their new global frame of reference when the
// offset of the FileMap is known.
- let mut lines = lines.into_inner();
for pos in &mut lines {
*pos = *pos - start_pos;
}
- let mut multibyte_chars = multibyte_chars.into_inner();
for mbc in &mut multibyte_chars {
mbc.pos = mbc.pos - start_pos;
}
- let mut non_narrow_chars = non_narrow_chars.into_inner();
for swc in &mut non_narrow_chars {
*swc = *swc - start_pos;
}
use rustc_serialize::{Encodable, Encoder, SpecializedEncoder, opaque};
use std::hash::Hash;
-use std::io::prelude::*;
-use std::io::Cursor;
use std::path::Path;
use rustc_data_structures::sync::Lrc;
use std::u32;
use syntax::ast::{self, CRATE_NODE_ID};
-use syntax::codemap::Spanned;
use syntax::attr;
-use syntax::symbol::Symbol;
+use syntax::symbol::keywords;
use syntax_pos::{self, hygiene, FileName, FileMap, Span, DUMMY_SP};
use rustc::hir::{self, PatKind};
use rustc::hir::intravisit;
pub struct EncodeContext<'a, 'tcx: 'a> {
- opaque: opaque::Encoder<'a>,
+ opaque: opaque::Encoder,
pub tcx: TyCtxt<'a, 'tcx, 'tcx>,
link_meta: &'a LinkMeta,
}
impl<'a, 'tcx> Encoder for EncodeContext<'a, 'tcx> {
- type Error = <opaque::Encoder<'a> as Encoder>::Error;
+ type Error = <opaque::Encoder as Encoder>::Error;
fn emit_nil(&mut self) -> Result<(), Self::Error> {
Ok(())
// Index the items
i = self.position();
- let index = items.write_index(&mut self.opaque.cursor);
+ let index = items.write_index(&mut self.opaque);
let index_bytes = self.position() - i;
let attrs = tcx.hir.krate_attrs();
if self.tcx.sess.meta_stats() {
let mut zero_bytes = 0;
- for e in self.opaque.cursor.get_ref() {
+ for e in self.opaque.data.iter() {
if *e == 0 {
zero_bytes += 1;
}
let body = self.tcx.hir.body(body_id);
self.lazy_seq(body.arguments.iter().map(|arg| {
match arg.pat.node {
- PatKind::Binding(_, _, name, _) => name.node,
- _ => Symbol::intern("")
+ PatKind::Binding(_, _, ident, _) => ident.name,
+ _ => keywords::Invalid.name(),
}
}))
})
}
- fn encode_fn_arg_names(&mut self, names: &[Spanned<ast::Name>])
- -> LazySeq<ast::Name> {
- self.lazy_seq(names.iter().map(|name| name.node))
+ fn encode_fn_arg_names(&mut self, param_names: &[ast::Ident]) -> LazySeq<ast::Name> {
+ self.lazy_seq(param_names.iter().map(|ident| ident.name))
}
fn encode_optimized_mir(&mut self, def_id: DefId) -> Option<Lazy<mir::Mir<'tcx>>> {
link_meta: &LinkMeta)
-> EncodedMetadata
{
- let mut cursor = Cursor::new(vec![]);
- cursor.write_all(METADATA_HEADER).unwrap();
+ let mut encoder = opaque::Encoder::new(vec![]);
+ encoder.emit_raw_bytes(METADATA_HEADER);
// Will be filled with the root position after encoding everything.
- cursor.write_all(&[0, 0, 0, 0]).unwrap();
+ encoder.emit_raw_bytes(&[0, 0, 0, 0]);
- let root = {
+ let (root, mut result) = {
let mut ecx = EncodeContext {
- opaque: opaque::Encoder::new(&mut cursor),
+ opaque: encoder,
tcx,
link_meta,
lazy_state: LazyState::NoNode,
// Encode all the entries and extra information in the crate,
// culminating in the `CrateRoot` which points to all of it.
- ecx.encode_crate_root()
+ let root = ecx.encode_crate_root();
+ (root, ecx.opaque.into_inner())
};
- let mut result = cursor.into_inner();
// Encode the root position.
let header = METADATA_HEADER.len();
use schema::*;
use rustc::hir::def_id::{DefId, DefIndex, DefIndexAddressSpace};
-use std::io::{Cursor, Write};
+use rustc_serialize::opaque::Encoder;
use std::slice;
use std::u32;
self.positions[space_index][array_index] = position.to_le();
}
- pub fn write_index(&self, buf: &mut Cursor<Vec<u8>>) -> LazySeq<Index> {
+ pub fn write_index(&self, buf: &mut Encoder) -> LazySeq<Index> {
let pos = buf.position();
// First we write the length of the lower range ...
- buf.write_all(words_to_bytes(&[(self.positions[0].len() as u32).to_le()])).unwrap();
+ buf.emit_raw_bytes(words_to_bytes(&[(self.positions[0].len() as u32).to_le()]));
// ... then the values in the lower range ...
- buf.write_all(words_to_bytes(&self.positions[0][..])).unwrap();
+ buf.emit_raw_bytes(words_to_bytes(&self.positions[0][..]));
// ... then the values in the higher range.
- buf.write_all(words_to_bytes(&self.positions[1][..])).unwrap();
+ buf.emit_raw_bytes(words_to_bytes(&self.positions[1][..]));
LazySeq::with_position_and_length(pos as usize,
self.positions[0].len() + self.positions[1].len() + 1)
}
#![feature(box_patterns)]
#![feature(fs_read_write)]
#![feature(libc)]
+#![feature(macro_at_most_once_rep)]
#![cfg_attr(stage0, feature(macro_lifetime_matcher))]
#![feature(proc_macro_internals)]
#![feature(quote)]
let input_mir = tcx.mir_validated(def_id);
debug!("run query mir_borrowck: {}", tcx.item_path_str(def_id));
- if !tcx.has_attr(def_id, "rustc_mir_borrowck") && !tcx.use_mir_borrowck() {
+ let mut return_early;
+
+ // Return early if we are not supposed to use MIR borrow checker for this function.
+ return_early = !tcx.has_attr(def_id, "rustc_mir_borrowck") && !tcx.use_mir_borrowck();
+
+ if tcx.is_struct_constructor(def_id) {
+ // We are not borrow checking the automatically generated struct constructors
+ // because we want to accept structs such as this (taken from the `linked-hash-map`
+ // crate):
+ // ```rust
+ // struct Qey<Q: ?Sized>(Q);
+ // ```
+ // MIR of this struct constructor looks something like this:
+ // ```rust
+ // fn Qey(_1: Q) -> Qey<Q>{
+ // let mut _0: Qey<Q>; // return place
+ //
+ // bb0: {
+ // (_0.0: Q) = move _1; // bb0[0]: scope 0 at src/main.rs:1:1: 1:26
+ // return; // bb0[1]: scope 0 at src/main.rs:1:1: 1:26
+ // }
+ // }
+ // ```
+ // The problem here is that `(_0.0: Q) = move _1;` is valid only if `Q` is
+ // of statically known size, which is not known to be true because of the
+ // `Q: ?Sized` constraint. However, it is true because the constructor can be
+ // called only when `Q` is of statically known size.
+ return_early = true;
+ }
+
+ if return_early {
return BorrowCheckResult {
closure_requirements: None,
used_mut_upvars: SmallVec::new(),
mir_def_id: def_id,
move_data: &mdpe.move_data,
param_env: param_env,
+ location_table,
movable_generator,
locals_are_invalidated_at_exit: match tcx.hir.body_owner_kind(id) {
hir::BodyOwnerKind::Const | hir::BodyOwnerKind::Static(_) => false,
mir: &'cx Mir<'tcx>,
mir_def_id: DefId,
move_data: &'cx MoveData<'tcx>,
+
+ /// Map from MIR `Location` to `LocationIndex`; created
+ /// when MIR borrowck begins.
+ location_table: &'cx LocationTable,
+
param_env: ParamEnv<'gcx>,
movable_generator: bool,
/// This keeps track of whether local variables are free-ed when the function
let mut error_reported = false;
let tcx = self.tcx;
let mir = self.mir;
- let location_table = &LocationTable::new(mir);
- let location = location_table.start_index(context.loc);
+ let location = self.location_table.start_index(context.loc);
let borrow_set = self.borrow_set.clone();
each_borrow_involving_path(
self,
use dataflow::indexes::BorrowIndex;
use polonius_engine::AllFacts as PoloniusAllFacts;
use polonius_engine::Atom;
-use rustc::ty::RegionVid;
+use rustc::ty::{RegionVid, TyCtxt};
use rustc_data_structures::indexed_vec::Idx;
use std::error::Error;
use std::fmt::Debug;
crate type AllFacts = PoloniusAllFacts<RegionVid, BorrowIndex, LocationIndex>;
crate trait AllFactsExt {
+ /// Returns true if there is a need to gather `AllFacts` given the
+ /// current `-Z` flags.
+ fn enabled(tcx: TyCtxt<'_, '_, '_>) -> bool;
+
fn write_to_dir(
&self,
dir: impl AsRef<Path>,
}
impl AllFactsExt for AllFacts {
+ /// Return
+ fn enabled(tcx: TyCtxt<'_, '_, '_>) -> bool {
+ tcx.sess.opts.debugging_opts.nll_facts
+ || tcx.sess.opts.debugging_opts.polonius
+ }
+
fn write_to_dir(
&self,
dir: impl AsRef<Path>,
Option<Rc<Output<RegionVid, BorrowIndex, LocationIndex>>>,
Option<ClosureRegionRequirements<'gcx>>,
) {
- let mut all_facts = if infcx.tcx.sess.opts.debugging_opts.nll_facts
- || infcx.tcx.sess.opts.debugging_opts.polonius
- {
+ let mut all_facts = if AllFacts::enabled(infcx.tcx) {
Some(AllFacts::default())
} else {
None
use super::universal_regions::UniversalRegions;
use borrow_check::nll::region_infer::values::ToElementIndex;
use rustc::hir::def_id::DefId;
+use rustc::infer::canonical::QueryRegionConstraint;
use rustc::infer::error_reporting::nice_region_error::NiceRegionError;
use rustc::infer::region_constraints::{GenericKind, VarInfos};
use rustc::infer::InferCtxt;
use rustc::infer::NLLRegionVariableOrigin;
-use rustc::infer::RegionObligation;
use rustc::infer::RegionVariableOrigin;
-use rustc::infer::SubregionOrigin;
use rustc::mir::{
ClosureOutlivesRequirement, ClosureOutlivesSubject, ClosureRegionRequirements, Local, Location,
Mir,
};
-use rustc::traits::ObligationCause;
-use rustc::ty::{self, RegionVid, Ty, TypeFoldable};
+use rustc::ty::{self, RegionVid, Ty, TyCtxt, TypeFoldable};
use rustc::util::common::{self, ErrorReported};
use rustc_data_structures::bitvec::BitVector;
use rustc_data_structures::indexed_vec::{Idx, IndexVec};
use std::fmt;
use std::rc::Rc;
-use syntax::ast;
use syntax_pos::Span;
mod annotation;
pub trait ClosureRegionRequirementsExt<'gcx, 'tcx> {
fn apply_requirements(
&self,
- infcx: &InferCtxt<'_, 'gcx, 'tcx>,
- body_id: ast::NodeId,
+ tcx: TyCtxt<'_, 'gcx, 'tcx>,
location: Location,
closure_def_id: DefId,
closure_substs: ty::ClosureSubsts<'tcx>,
- );
+ ) -> Vec<QueryRegionConstraint<'tcx>>;
fn subst_closure_mapping<T>(
&self,
- infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+ tcx: TyCtxt<'_, 'gcx, 'tcx>,
closure_mapping: &IndexVec<RegionVid, ty::Region<'tcx>>,
value: &T,
) -> T
/// requirements.
fn apply_requirements(
&self,
- infcx: &InferCtxt<'_, 'gcx, 'tcx>,
- body_id: ast::NodeId,
+ tcx: TyCtxt<'_, 'gcx, 'tcx>,
location: Location,
closure_def_id: DefId,
closure_substs: ty::ClosureSubsts<'tcx>,
- ) {
- let tcx = infcx.tcx;
-
+ ) -> Vec<QueryRegionConstraint<'tcx>> {
debug!(
"apply_requirements(location={:?}, closure_def_id={:?}, closure_substs={:?})",
location, closure_def_id, closure_substs
// into a vector. These are the regions that we will be
// relating to one another.
let closure_mapping =
- &UniversalRegions::closure_mapping(infcx, user_closure_ty, self.num_external_vids);
+ &UniversalRegions::closure_mapping(tcx, user_closure_ty, self.num_external_vids);
debug!("apply_requirements: closure_mapping={:?}", closure_mapping);
// Create the predicates.
- for outlives_requirement in &self.outlives_requirements {
- let outlived_region = closure_mapping[outlives_requirement.outlived_free_region];
-
- // FIXME, this origin is not entirely suitable.
- let origin = SubregionOrigin::CallRcvr(outlives_requirement.blame_span);
-
- match outlives_requirement.subject {
- ClosureOutlivesSubject::Region(region) => {
- let region = closure_mapping[region];
- debug!(
- "apply_requirements: region={:?} \
- outlived_region={:?} \
- outlives_requirement={:?}",
- region, outlived_region, outlives_requirement,
- );
- infcx.sub_regions(origin, outlived_region, region);
- }
+ self.outlives_requirements
+ .iter()
+ .map(|outlives_requirement| {
+ let outlived_region = closure_mapping[outlives_requirement.outlived_free_region];
+
+ match outlives_requirement.subject {
+ ClosureOutlivesSubject::Region(region) => {
+ let region = closure_mapping[region];
+ debug!(
+ "apply_requirements: region={:?} \
+ outlived_region={:?} \
+ outlives_requirement={:?}",
+ region, outlived_region, outlives_requirement,
+ );
+ ty::Binder::dummy(ty::OutlivesPredicate(region.into(), outlived_region))
+ }
- ClosureOutlivesSubject::Ty(ty) => {
- let ty = self.subst_closure_mapping(infcx, closure_mapping, &ty);
- debug!(
- "apply_requirements: ty={:?} \
- outlived_region={:?} \
- outlives_requirement={:?}",
- ty, outlived_region, outlives_requirement,
- );
- infcx.register_region_obligation(
- body_id,
- RegionObligation {
- sup_type: ty,
- sub_region: outlived_region,
- cause: ObligationCause::misc(outlives_requirement.blame_span, body_id),
- },
- );
+ ClosureOutlivesSubject::Ty(ty) => {
+ let ty = self.subst_closure_mapping(tcx, closure_mapping, &ty);
+ debug!(
+ "apply_requirements: ty={:?} \
+ outlived_region={:?} \
+ outlives_requirement={:?}",
+ ty, outlived_region, outlives_requirement,
+ );
+ ty::Binder::dummy(ty::OutlivesPredicate(ty.into(), outlived_region))
+ }
}
- }
- }
+ })
+ .collect()
}
fn subst_closure_mapping<T>(
&self,
- infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+ tcx: TyCtxt<'_, 'gcx, 'tcx>,
closure_mapping: &IndexVec<RegionVid, ty::Region<'tcx>>,
value: &T,
) -> T
where
T: TypeFoldable<'tcx>,
{
- infcx.tcx.fold_regions(value, &mut false, |r, _depth| {
+ tcx.fold_regions(value, &mut false, |r, _depth| {
if let ty::ReClosureBound(vid) = r {
closure_mapping[*vid]
} else {
use borrow_check::nll::region_infer::{OutlivesConstraint, RegionTest, TypeTest};
use borrow_check::nll::type_check::Locations;
use borrow_check::nll::universal_regions::UniversalRegions;
-use rustc::infer::region_constraints::Constraint;
-use rustc::infer::region_constraints::RegionConstraintData;
-use rustc::infer::region_constraints::{Verify, VerifyBound};
+use rustc::infer::canonical::QueryRegionConstraint;
+use rustc::infer::outlives::obligations::{TypeOutlives, TypeOutlivesDelegate};
+use rustc::infer::region_constraints::{GenericKind, VerifyBound};
+use rustc::infer::{self, SubregionOrigin};
use rustc::mir::{Location, Mir};
-use rustc::ty;
+use rustc::ty::subst::UnpackedKind;
+use rustc::ty::{self, TyCtxt};
use syntax::codemap::Span;
-crate struct ConstraintConversion<'a, 'tcx: 'a> {
+crate struct ConstraintConversion<'a, 'gcx: 'tcx, 'tcx: 'a> {
+ tcx: TyCtxt<'a, 'gcx, 'tcx>,
mir: &'a Mir<'tcx>,
universal_regions: &'a UniversalRegions<'tcx>,
location_table: &'a LocationTable,
+ region_bound_pairs: &'a [(ty::Region<'tcx>, GenericKind<'tcx>)],
+ implicit_region_bound: Option<ty::Region<'tcx>>,
+ param_env: ty::ParamEnv<'tcx>,
+ locations: Locations,
outlives_constraints: &'a mut Vec<OutlivesConstraint>,
type_tests: &'a mut Vec<TypeTest<'tcx>>,
all_facts: &'a mut Option<AllFacts>,
-
}
-impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> {
+impl<'a, 'gcx, 'tcx> ConstraintConversion<'a, 'gcx, 'tcx> {
crate fn new(
+ tcx: TyCtxt<'a, 'gcx, 'tcx>,
mir: &'a Mir<'tcx>,
universal_regions: &'a UniversalRegions<'tcx>,
location_table: &'a LocationTable,
+ region_bound_pairs: &'a [(ty::Region<'tcx>, GenericKind<'tcx>)],
+ implicit_region_bound: Option<ty::Region<'tcx>>,
+ param_env: ty::ParamEnv<'tcx>,
+ locations: Locations,
outlives_constraints: &'a mut Vec<OutlivesConstraint>,
type_tests: &'a mut Vec<TypeTest<'tcx>>,
all_facts: &'a mut Option<AllFacts>,
) -> Self {
Self {
+ tcx,
mir,
universal_regions,
location_table,
+ region_bound_pairs,
+ implicit_region_bound,
+ param_env,
+ locations,
outlives_constraints,
type_tests,
all_facts,
}
}
- crate fn convert(
- &mut self,
- locations: Locations,
- data: &RegionConstraintData<'tcx>,
- ) {
- debug!("generate: constraints at: {:#?}", locations);
- let RegionConstraintData {
- constraints,
- verifys,
- givens,
- } = data;
-
- let span = self
- .mir
- .source_info(locations.from_location().unwrap_or(Location::START))
- .span;
-
- let at_location = locations.at_location().unwrap_or(Location::START);
-
- for constraint in constraints.keys() {
- debug!("generate: constraint: {:?}", constraint);
- let (a_vid, b_vid) = match constraint {
- Constraint::VarSubVar(a_vid, b_vid) => (*a_vid, *b_vid),
- Constraint::RegSubVar(a_r, b_vid) => (self.to_region_vid(a_r), *b_vid),
- Constraint::VarSubReg(a_vid, b_r) => (*a_vid, self.to_region_vid(b_r)),
- Constraint::RegSubReg(a_r, b_r) => {
- (self.to_region_vid(a_r), self.to_region_vid(b_r))
- }
- };
-
- // We have the constraint that `a_vid <= b_vid`. Add
- // `b_vid: a_vid` to our region checker. Note that we
- // reverse direction, because `regioncx` talks about
- // "outlives" (`>=`) whereas the region constraints
- // talk about `<=`.
- self.add_outlives(span, b_vid, a_vid, at_location);
-
- // In the new analysis, all outlives relations etc
- // "take effect" at the mid point of the statement
- // that requires them, so ignore the `at_location`.
- if let Some(all_facts) = &mut self.all_facts {
- if let Some(from_location) = locations.from_location() {
- all_facts.outlives.push((
- b_vid,
- a_vid,
- self.location_table.mid_index(from_location),
- ));
- } else {
- for location in self.location_table.all_points() {
- all_facts.outlives.push((b_vid, a_vid, location));
+ pub(super) fn convert_all(&mut self, query_constraints: &[QueryRegionConstraint<'tcx>]) {
+ for query_constraint in query_constraints {
+ self.convert(query_constraint);
+ }
+ }
+
+ pub(super) fn convert(&mut self, query_constraint: &QueryRegionConstraint<'tcx>) {
+ debug!("generate: constraints at: {:#?}", self.locations);
+
+ // Extract out various useful fields we'll need below.
+ let ConstraintConversion {
+ tcx,
+ region_bound_pairs,
+ implicit_region_bound,
+ param_env,
+ ..
+ } = *self;
+
+ // At the moment, we never generate any "higher-ranked"
+ // region constraints like `for<'a> 'a: 'b`. At some point
+ // when we move to universes, we will, and this assertion
+ // will start to fail.
+ let ty::OutlivesPredicate(k1, r2) =
+ query_constraint.no_late_bound_regions().unwrap_or_else(|| {
+ span_bug!(
+ self.span(),
+ "query_constraint {:?} contained bound regions",
+ query_constraint,
+ );
+ });
+
+ match k1.unpack() {
+ UnpackedKind::Lifetime(r1) => {
+ let r1_vid = self.to_region_vid(r1);
+ let r2_vid = self.to_region_vid(r2);
+ self.add_outlives(r1_vid, r2_vid);
+
+ // In the new analysis, all outlives relations etc
+ // "take effect" at the mid point of the statement
+ // that requires them, so ignore the `at_location`.
+ if let Some(all_facts) = &mut self.all_facts {
+ if let Some(from_location) = self.locations.from_location() {
+ all_facts.outlives.push((
+ r1_vid,
+ r2_vid,
+ self.location_table.mid_index(from_location),
+ ));
+ } else {
+ for location in self.location_table.all_points() {
+ all_facts.outlives.push((r1_vid, r2_vid, location));
+ }
}
}
}
- }
- for verify in verifys {
- let type_test = self.verify_to_type_test(verify, span, locations);
- self.add_type_test(type_test);
+ UnpackedKind::Type(t1) => {
+ // we don't actually use this for anything, but
+ // the `TypeOutlives` code needs an origin.
+ let origin = infer::RelateParamBound(self.span(), t1);
+
+ TypeOutlives::new(
+ &mut *self,
+ tcx,
+ region_bound_pairs,
+ implicit_region_bound,
+ param_env,
+ ).type_must_outlive(origin, t1, r2);
+ }
}
-
- assert!(
- givens.is_empty(),
- "MIR type-checker does not use givens (thank goodness)"
- );
}
fn verify_to_type_test(
&self,
- verify: &Verify<'tcx>,
- span: Span,
- locations: Locations,
+ generic_kind: GenericKind<'tcx>,
+ region: ty::Region<'tcx>,
+ bound: VerifyBound<'tcx>,
) -> TypeTest<'tcx> {
- let generic_kind = verify.kind;
-
- let lower_bound = self.to_region_vid(verify.region);
+ let lower_bound = self.to_region_vid(region);
- let point = locations.at_location().unwrap_or(Location::START);
+ let point = self.locations.at_location().unwrap_or(Location::START);
- let test = self.verify_bound_to_region_test(&verify.bound);
+ let test = self.verify_bound_to_region_test(&bound);
TypeTest {
generic_kind,
lower_bound,
point,
- span,
+ span: self.span(),
test,
}
}
self.universal_regions.to_region_vid(r)
}
- fn add_outlives(
- &mut self,
- span: Span,
- sup: ty::RegionVid,
- sub: ty::RegionVid,
- point: Location,
- ) {
+ fn span(&self) -> Span {
+ self.mir
+ .source_info(self.locations.from_location().unwrap_or(Location::START))
+ .span
+ }
+
+ fn add_outlives(&mut self, sup: ty::RegionVid, sub: ty::RegionVid) {
+ let span = self.span();
+ let point = self.locations.at_location().unwrap_or(Location::START);
+
self.outlives_constraints.push(OutlivesConstraint {
span,
sub,
self.type_tests.push(type_test);
}
}
+
+impl<'a, 'b, 'gcx, 'tcx> TypeOutlivesDelegate<'tcx>
+ for &'a mut ConstraintConversion<'b, 'gcx, 'tcx>
+{
+ fn push_sub_region_constraint(
+ &mut self,
+ _origin: SubregionOrigin<'tcx>,
+ a: ty::Region<'tcx>,
+ b: ty::Region<'tcx>,
+ ) {
+ let b = self.universal_regions.to_region_vid(b);
+ let a = self.universal_regions.to_region_vid(a);
+ self.add_outlives(b, a);
+ }
+
+ fn push_verify(
+ &mut self,
+ _origin: SubregionOrigin<'tcx>,
+ kind: GenericKind<'tcx>,
+ a: ty::Region<'tcx>,
+ bound: VerifyBound<'tcx>,
+ ) {
+ let type_test = self.verify_to_type_test(kind, a, bound);
+ self.add_type_test(type_test);
+ }
+}
use rustc::infer::InferOk;
use rustc::mir::visit::TyContext;
use rustc::mir::*;
+use rustc::traits::query::type_op::custom::CustomTypeOp;
use rustc::traits::{ObligationCause, PredicateObligations};
use rustc::ty::subst::Subst;
use rustc::ty::Ty;
// Equate expected input tys with those in the MIR.
let argument_locals = (1..).map(Local::new);
for (&unnormalized_input_ty, local) in unnormalized_input_tys.iter().zip(argument_locals) {
- let input_ty = self.normalize(&unnormalized_input_ty, Locations::All);
+ let input_ty = self.normalize(unnormalized_input_ty, Locations::All);
let mir_input_ty = mir.local_decls[local].ty;
self.equate_normalized_input_or_output(input_ty, mir_input_ty);
}
"equate_inputs_and_outputs: unnormalized_output_ty={:?}",
unnormalized_output_ty
);
- let output_ty = self.normalize(&unnormalized_output_ty, Locations::All);
+ let output_ty = self.normalize(unnormalized_output_ty, Locations::All);
debug!(
"equate_inputs_and_outputs: normalized output_ty={:?}",
output_ty
);
+ let param_env = self.param_env;
let mir_output_ty = mir.local_decls[RETURN_PLACE].ty;
let anon_type_map =
self.fully_perform_op(
Locations::All,
- || format!("input_output"),
- |cx| {
- let mut obligations = ObligationAccumulator::default();
-
- let dummy_body_id = ObligationCause::dummy().body_id;
- let (output_ty, anon_type_map) = obligations.add(infcx.instantiate_anon_types(
- mir_def_id,
- dummy_body_id,
- cx.param_env,
- &output_ty,
- ));
- debug!(
- "equate_inputs_and_outputs: instantiated output_ty={:?}",
- output_ty
- );
- debug!(
- "equate_inputs_and_outputs: anon_type_map={:#?}",
- anon_type_map
- );
-
- debug!(
- "equate_inputs_and_outputs: mir_output_ty={:?}",
- mir_output_ty
- );
- obligations.add(
- infcx
- .at(&ObligationCause::dummy(), cx.param_env)
- .eq(output_ty, mir_output_ty)?,
- );
-
- for (&anon_def_id, anon_decl) in &anon_type_map {
- let anon_defn_ty = tcx.type_of(anon_def_id);
- let anon_defn_ty = anon_defn_ty.subst(tcx, anon_decl.substs);
- let anon_defn_ty = renumber::renumber_regions(
- cx.infcx,
- TyContext::Location(Location::START),
- &anon_defn_ty,
+ CustomTypeOp::new(
+ |infcx| {
+ let mut obligations = ObligationAccumulator::default();
+
+ let dummy_body_id = ObligationCause::dummy().body_id;
+ let (output_ty, anon_type_map) =
+ obligations.add(infcx.instantiate_anon_types(
+ mir_def_id,
+ dummy_body_id,
+ param_env,
+ &output_ty,
+ ));
+ debug!(
+ "equate_inputs_and_outputs: instantiated output_ty={:?}",
+ output_ty
);
debug!(
- "equate_inputs_and_outputs: concrete_ty={:?}",
- anon_decl.concrete_ty
+ "equate_inputs_and_outputs: anon_type_map={:#?}",
+ anon_type_map
+ );
+
+ debug!(
+ "equate_inputs_and_outputs: mir_output_ty={:?}",
+ mir_output_ty
);
- debug!("equate_inputs_and_outputs: anon_defn_ty={:?}", anon_defn_ty);
obligations.add(
infcx
- .at(&ObligationCause::dummy(), cx.param_env)
- .eq(anon_decl.concrete_ty, anon_defn_ty)?,
+ .at(&ObligationCause::dummy(), param_env)
+ .eq(output_ty, mir_output_ty)?,
);
- }
-
- debug!("equate_inputs_and_outputs: equated");
- Ok(InferOk {
- value: Some(anon_type_map),
- obligations: obligations.into_vec(),
- })
- },
+ for (&anon_def_id, anon_decl) in &anon_type_map {
+ let anon_defn_ty = tcx.type_of(anon_def_id);
+ let anon_defn_ty = anon_defn_ty.subst(tcx, anon_decl.substs);
+ let anon_defn_ty = renumber::renumber_regions(
+ infcx,
+ TyContext::Location(Location::START),
+ &anon_defn_ty,
+ );
+ debug!(
+ "equate_inputs_and_outputs: concrete_ty={:?}",
+ anon_decl.concrete_ty
+ );
+ debug!("equate_inputs_and_outputs: anon_defn_ty={:?}", anon_defn_ty);
+ obligations.add(
+ infcx
+ .at(&ObligationCause::dummy(), param_env)
+ .eq(anon_decl.concrete_ty, anon_defn_ty)?,
+ );
+ }
+
+ debug!("equate_inputs_and_outputs: equated");
+
+ Ok(InferOk {
+ value: Some(anon_type_map),
+ obligations: obligations.into_vec(),
+ })
+ },
+ || format!("input_output"),
+ ),
).unwrap_or_else(|terr| {
span_mirbug!(
self,
if let Some(anon_type_map) = anon_type_map {
self.fully_perform_op(
Locations::All,
- || format!("anon_type_map"),
- |_cx| {
- infcx.constrain_anon_types(&anon_type_map, universal_regions);
- Ok(InferOk {
- value: (),
- obligations: vec![],
- })
- },
+ CustomTypeOp::new(
+ |_cx| {
+ infcx.constrain_anon_types(&anon_type_map, universal_regions);
+ Ok(InferOk {
+ value: (),
+ obligations: vec![],
+ })
+ },
+ || format!("anon_type_map"),
+ ),
).unwrap();
}
}
use dataflow::move_paths::{HasMoveData, MoveData};
use dataflow::MaybeInitializedPlaces;
use dataflow::{FlowAtLocation, FlowsAtLocation};
-use rustc::infer::region_constraints::RegionConstraintData;
+use rustc::infer::canonical::QueryRegionConstraint;
use rustc::mir::Local;
use rustc::mir::{BasicBlock, Location, Mir};
-use rustc::traits::ObligationCause;
-use rustc::ty::subst::Kind;
+use rustc::traits::query::dropck_outlives::DropckOutlivesResult;
+use rustc::traits::query::type_op::outlives::DropckOutlives;
+use rustc::traits::query::type_op::TypeOp;
use rustc::ty::{Ty, TypeFoldable};
use rustc_data_structures::fx::FxHashMap;
use std::rc::Rc;
}
struct DropData<'tcx> {
- dropped_kinds: Vec<Kind<'tcx>>,
- region_constraint_data: Option<Rc<RegionConstraintData<'tcx>>>,
+ dropck_result: DropckOutlivesResult<'tcx>,
+ region_constraint_data: Option<Rc<Vec<QueryRegionConstraint<'tcx>>>>,
}
impl<'gen, 'typeck, 'flow, 'gcx, 'tcx> TypeLivenessGenerator<'gen, 'typeck, 'flow, 'gcx, 'tcx> {
);
cx.tcx().for_each_free_region(&value, |live_region| {
- cx
- .constraints
+ cx.constraints
.liveness_set
.push((live_region, location, cause.clone()));
});
});
if let Some(data) = &drop_data.region_constraint_data {
- self.cx
- .push_region_constraints(location.at_self(), data.clone());
+ self.cx.push_region_constraints(location.at_self(), data);
}
+ drop_data.dropck_result.report_overflows(
+ self.cx.infcx.tcx,
+ self.mir.source_info(location).span,
+ dropped_ty,
+ );
+
// All things in the `outlives` array may be touched by
// the destructor and must be live at this point.
let cause = Cause::DropVar(dropped_local, location);
- for &kind in &drop_data.dropped_kinds {
+ for &kind in &drop_data.dropck_result.kinds {
Self::push_type_live_constraint(&mut self.cx, kind, location, cause);
}
}
) -> DropData<'tcx> {
debug!("compute_drop_data(dropped_ty={:?})", dropped_ty,);
- let (dropped_kinds, region_constraint_data) =
- cx.fully_perform_op_and_get_region_constraint_data(
- || format!("compute_drop_data(dropped_ty={:?})", dropped_ty),
- |cx| {
- Ok(cx
- .infcx
- .at(&ObligationCause::dummy(), cx.param_env)
- .dropck_outlives(dropped_ty))
- },
- ).unwrap();
+ let param_env = cx.param_env;
+ let (dropck_result, region_constraint_data) = param_env
+ .and(DropckOutlives::new(dropped_ty))
+ .fully_perform(cx.infcx)
+ .unwrap();
DropData {
- dropped_kinds,
+ dropck_result,
region_constraint_data,
}
}
use dataflow::FlowAtLocation;
use dataflow::MaybeInitializedPlaces;
use rustc::hir::def_id::DefId;
-use rustc::infer::region_constraints::{GenericKind, RegionConstraintData};
-use rustc::infer::{InferCtxt, InferOk, InferResult, LateBoundRegionConversionTime, UnitResult};
+use rustc::infer::canonical::QueryRegionConstraint;
+use rustc::infer::region_constraints::GenericKind;
+use rustc::infer::{InferCtxt, LateBoundRegionConversionTime};
use rustc::mir::interpret::EvalErrorKind::BoundsCheck;
use rustc::mir::tcx::PlaceTy;
use rustc::mir::visit::{PlaceContext, Visitor};
use rustc::mir::*;
-use rustc::traits::query::NoSolution;
-use rustc::traits::{self, ObligationCause, Normalized, TraitEngine};
-use rustc::ty::error::TypeError;
+use rustc::traits::query::type_op;
+use rustc::traits::query::{Fallible, NoSolution};
use rustc::ty::fold::TypeFoldable;
use rustc::ty::{self, ToPolyTraitRef, Ty, TyCtxt, TypeVariants};
use std::fmt;
-use std::rc::Rc;
use syntax_pos::{Span, DUMMY_SP};
use transform::{MirPass, MirSource};
use util::liveness::LivenessResults;
let instantiated_predicates =
tcx.predicates_of(def_id).instantiate(tcx, substs);
- let predicates =
- type_checker.normalize(&instantiated_predicates.predicates, location);
- type_checker.prove_predicates(predicates, location);
+ type_checker.normalize_and_prove_instantiated_predicates(
+ instantiated_predicates,
+ location,
+ );
}
value.ty
Place::Static(box Static { def_id, ty: sty }) => {
let sty = self.sanitize_type(place, sty);
let ty = self.tcx().type_of(def_id);
- let ty = self.cx.normalize(&ty, location);
+ let ty = self.cx.normalize(ty, location);
if let Err(terr) = self.cx.eq_types(ty, sty, location.at_self()) {
span_mirbug!(
self,
fn fully_perform_op<R>(
&mut self,
locations: Locations,
- describe_op: impl Fn() -> String,
- op: impl FnOnce(&mut Self) -> InferResult<'tcx, R>,
- ) -> Result<R, TypeError<'tcx>> {
- let (r, opt_data) = self.fully_perform_op_and_get_region_constraint_data(
- || format!("{} at {:?}", describe_op(), locations),
- op,
- )?;
-
- if let Some(data) = opt_data {
+ op: impl type_op::TypeOp<'gcx, 'tcx, Output = R>,
+ ) -> Fallible<R> {
+ let (r, opt_data) = op.fully_perform(self.infcx)?;
+
+ if let Some(data) = &opt_data {
self.push_region_constraints(locations, data);
}
fn push_region_constraints(
&mut self,
locations: Locations,
- data: Rc<RegionConstraintData<'tcx>>,
+ data: &[QueryRegionConstraint<'tcx>],
) {
debug!(
"push_region_constraints: constraints generated at {:?} are {:#?}",
if let Some(borrowck_context) = &mut self.borrowck_context {
constraint_conversion::ConstraintConversion::new(
+ self.infcx.tcx,
self.mir,
borrowck_context.universal_regions,
borrowck_context.location_table,
+ self.region_bound_pairs,
+ self.implicit_region_bound,
+ self.param_env,
+ locations,
&mut self.constraints.outlives_constraints,
&mut self.constraints.type_tests,
&mut borrowck_context.all_facts,
- ).convert(locations, &data);
- }
- }
-
- /// Helper for `fully_perform_op`, but also used on its own
- /// sometimes to enable better caching: executes `op` fully (along
- /// with resulting obligations) and returns the full set of region
- /// obligations. If the same `op` were to be performed at some
- /// other location, then the same set of region obligations would
- /// be generated there, so this can be useful for caching.
- fn fully_perform_op_and_get_region_constraint_data<R>(
- &mut self,
- describe_op: impl Fn() -> String,
- op: impl FnOnce(&mut Self) -> InferResult<'tcx, R>,
- ) -> Result<(R, Option<Rc<RegionConstraintData<'tcx>>>), TypeError<'tcx>> {
- if cfg!(debug_assertions) {
- info!(
- "fully_perform_op_and_get_region_constraint_data({})",
- describe_op(),
- );
- }
-
- let mut fulfill_cx = TraitEngine::new(self.infcx.tcx);
- let dummy_body_id = ObligationCause::dummy().body_id;
- let InferOk { value, obligations } = self.infcx.commit_if_ok(|_| op(self))?;
- debug_assert!(obligations.iter().all(|o| o.cause.body_id == dummy_body_id));
- fulfill_cx.register_predicate_obligations(self.infcx, obligations);
- if let Err(e) = fulfill_cx.select_all_or_error(self.infcx) {
- span_mirbug!(self, "", "errors selecting obligation: {:?}", e);
- }
-
- self.infcx.process_registered_region_obligations(
- self.region_bound_pairs,
- self.implicit_region_bound,
- self.param_env,
- dummy_body_id,
- );
-
- let data = self.infcx.take_and_reset_region_constraints();
- if data.is_empty() {
- Ok((value, None))
- } else {
- Ok((value, Some(Rc::new(data))))
+ ).convert_all(&data);
}
}
sub: Ty<'tcx>,
sup: Ty<'tcx>,
locations: Locations,
- ) -> UnitResult<'tcx> {
- // Micro-optimization.
- if sub == sup {
- return Ok(());
- }
-
+ ) -> Fallible<()> {
+ let param_env = self.param_env;
self.fully_perform_op(
locations,
- || format!("sub_types({:?} <: {:?})", sub, sup),
- |this| {
- this.infcx
- .at(&ObligationCause::dummy(), this.param_env)
- .sup(sup, sub)
- },
+ param_env.and(type_op::subtype::Subtype::new(sub, sup)),
)
}
- fn eq_types(&mut self, a: Ty<'tcx>, b: Ty<'tcx>, locations: Locations) -> UnitResult<'tcx> {
- // Micro-optimization.
- if a == b {
- return Ok(());
- }
-
- self.fully_perform_op(
- locations,
- || format!("eq_types({:?} = {:?})", a, b),
- |this| {
- this.infcx
- .at(&ObligationCause::dummy(), this.param_env)
- .eq(b, a)
- },
- )
+ fn eq_types(&mut self, a: Ty<'tcx>, b: Ty<'tcx>, locations: Locations) -> Fallible<()> {
+ let param_env = self.param_env;
+ self.fully_perform_op(locations, param_env.and(type_op::eq::Eq::new(b, a)))
}
fn tcx(&self) -> TyCtxt<'a, 'gcx, 'tcx> {
);
}
self.check_rvalue(mir, rv, location);
+ let trait_ref = ty::TraitRef {
+ def_id: tcx.lang_items().sized_trait().unwrap(),
+ substs: tcx.mk_substs_trait(place_ty, &[]),
+ };
+ self.prove_trait_ref(trait_ref, location);
}
StatementKind::SetDiscriminant {
ref place,
LateBoundRegionConversionTime::FnCall,
&sig,
);
- let sig = self.normalize(&sig, term_location);
+ let sig = self.normalize(sig, term_location);
self.check_call_dest(mir, term, &sig, destination, term_location);
self.prove_predicates(
let variant = &def.variants[variant_index];
let adj_field_index = active_field_index.unwrap_or(field_index);
if let Some(field) = variant.fields.get(adj_field_index) {
- Ok(self.normalize(&field.ty(tcx, substs), location))
+ Ok(self.normalize(field.ty(tcx, substs), location))
} else {
Err(FieldAccessError::OutOfRange {
field_count: variant.fields.len(),
// function definition. When we extract the
// signature, it comes from the `fn_sig` query,
// and hence may contain unnormalized results.
- let fn_sig = self.normalize(&fn_sig, location);
+ let fn_sig = self.normalize(fn_sig, location);
let ty_fn_ptr_from = tcx.mk_fn_ptr(fn_sig);
// function definition. When we extract the
// signature, it comes from the `fn_sig` query,
// and hence may contain unnormalized results.
- let fn_sig = self.normalize(&fn_sig, location);
+ let fn_sig = self.normalize(fn_sig, location);
let ty_fn_ptr_from = tcx.safe_to_unsafe_fn_ty(fn_sig);
if let Some(closure_region_requirements) =
tcx.mir_borrowck(*def_id).closure_requirements
{
- let dummy_body_id = ObligationCause::dummy().body_id;
- closure_region_requirements.apply_requirements(
- self.infcx,
- dummy_body_id,
+ let closure_constraints = closure_region_requirements.apply_requirements(
+ self.infcx.tcx,
location,
*def_id,
*substs,
);
+
+ self.push_region_constraints(
+ location.at_self(),
+ &closure_constraints,
+ );
}
tcx.predicates_of(*def_id).instantiate(tcx, substs.substs)
AggregateKind::Array(_) | AggregateKind::Tuple => ty::InstantiatedPredicates::empty(),
};
- let predicates = self.normalize(&instantiated_predicates.predicates, location);
- debug!("prove_aggregate_predicates: predicates={:?}", predicates);
- self.prove_predicates(predicates, location);
+ self.normalize_and_prove_instantiated_predicates(instantiated_predicates, location);
}
fn prove_trait_ref(&mut self, trait_ref: ty::TraitRef<'tcx>, location: Location) {
);
}
- fn prove_predicates<T>(&mut self, predicates: T, location: Location)
- where
- T: IntoIterator<Item = ty::Predicate<'tcx>> + Clone,
- {
- let cause = ObligationCause::dummy();
- let obligations: Vec<_> = predicates
- .into_iter()
- .map(|p| traits::Obligation::new(cause.clone(), self.param_env, p))
- .collect();
-
- // Micro-optimization
- if obligations.is_empty() {
- return;
+ fn normalize_and_prove_instantiated_predicates(
+ &mut self,
+ instantiated_predicates: ty::InstantiatedPredicates<'tcx>,
+ location: Location,
+ ) {
+ for predicate in instantiated_predicates.predicates {
+ let predicate = self.normalize(predicate, location);
+ self.prove_predicate(predicate, location);
}
+ }
- // This intermediate vector is mildly unfortunate, in that we
- // sometimes create it even when logging is disabled, but only
- // if debug-info is enabled, and I doubt it is actually
- // expensive. -nmatsakis
- let predicates_vec: Vec<_> = if cfg!(debug_assertions) {
- obligations.iter().map(|o| o.predicate).collect()
- } else {
- Vec::new()
- };
+ fn prove_predicates(
+ &mut self,
+ predicates: impl IntoIterator<Item = ty::Predicate<'tcx>>,
+ location: Location,
+ ) {
+ for predicate in predicates {
+ debug!(
+ "prove_predicates(predicate={:?}, location={:?})",
+ predicate, location,
+ );
+ self.prove_predicate(predicate, location);
+ }
+ }
+
+ fn prove_predicate(&mut self, predicate: ty::Predicate<'tcx>, location: Location) {
debug!(
- "prove_predicates(predicates={:?}, location={:?})",
- predicates_vec, location,
+ "prove_predicate(predicate={:?}, location={:?})",
+ predicate, location,
);
+ let param_env = self.param_env;
self.fully_perform_op(
location.at_self(),
- || format!("prove_predicates({:?})", predicates_vec),
- |_this| {
- Ok(InferOk {
- value: (),
- obligations,
- })
- },
- ).unwrap()
+ param_env.and(type_op::prove_predicate::ProvePredicate::new(predicate)),
+ ).unwrap_or_else(|NoSolution| {
+ span_mirbug!(self, NoSolution, "could not prove {:?}", predicate);
+ })
}
fn typeck_mir(&mut self, mir: &Mir<'tcx>) {
}
}
- fn normalize<T>(&mut self, value: &T, location: impl ToLocations) -> T
+ fn normalize<T>(&mut self, value: T, location: impl ToLocations) -> T
where
- T: fmt::Debug + TypeFoldable<'tcx>,
+ T: type_op::normalize::Normalizable<'gcx, 'tcx> + Copy,
{
- // Micro-optimization: avoid work when we don't have to
- if !value.has_projections() {
- return value.clone();
- }
-
debug!("normalize(value={:?}, location={:?})", value, location);
+ let param_env = self.param_env;
self.fully_perform_op(
location.to_locations(),
- || format!("normalize(value={:?})", value),
- |this| {
- let Normalized { value, obligations } = this
- .infcx
- .at(&ObligationCause::dummy(), this.param_env)
- .normalize(value)
- .unwrap_or_else(|NoSolution| {
- span_bug!(
- this.last_span,
- "normalization of `{:?}` failed at {:?}",
- value,
- location,
- );
- });
- Ok(InferOk { value, obligations })
- },
- ).unwrap()
+ param_env.and(type_op::normalize::Normalize::new(value)),
+ ).unwrap_or_else(|NoSolution| {
+ span_mirbug!(self, NoSolution, "failed to normalize `{:?}`", value);
+ value
+ })
}
}
// broken MIR, so try not to report duplicate errors.
return;
}
+
+ if tcx.is_struct_constructor(def_id) {
+ // We just assume that the automatically generated struct constructors are
+ // correct. See the comment in the `mir_borrowck` implementation for an
+ // explanation why we need this.
+ return;
+ }
+
let param_env = tcx.param_env(def_id);
tcx.infer_ctxt().enter(|infcx| {
- let _ = type_check_internal(
- &infcx,
- def_id,
- param_env,
- mir,
- &[],
- None,
- None,
- &mut |_| (),
- );
+ let _ =
+ type_check_internal(&infcx, def_id, param_env, mir, &[], None, None, &mut |_| ());
// For verification purposes, we just ignore the resulting
// region constraint sets. Not our problem. =)
/// `'1: '2`, then the caller would impose the constraint that
/// `V[1]: V[2]`.
pub fn closure_mapping(
- infcx: &InferCtxt<'_, '_, 'tcx>,
+ tcx: TyCtxt<'_, '_, 'tcx>,
closure_ty: Ty<'tcx>,
expected_num_vars: usize,
) -> IndexVec<RegionVid, ty::Region<'tcx>> {
let mut region_mapping = IndexVec::with_capacity(expected_num_vars);
- region_mapping.push(infcx.tcx.types.re_static);
- infcx.tcx.for_each_free_region(&closure_ty, |fr| {
+ region_mapping.push(tcx.types.re_static);
+ tcx.for_each_free_region(&closure_ty, |fr| {
region_mapping.push(fr);
});
mutability: Mutability::Not,
};
if let Some(hir::map::NodeBinding(pat)) = tcx.hir.find(var_id) {
- if let hir::PatKind::Binding(_, _, ref name, _) = pat.node {
- decl.debug_name = name.node;
+ if let hir::PatKind::Binding(_, _, ident, _) = pat.node {
+ decl.debug_name = ident.name;
let bm = *hir.tables.pat_binding_modes()
.get(pat.hir_id)
// If this is a simple binding pattern, give the local a nice name for debuginfo.
let mut name = None;
if let Some(pat) = pattern {
- if let hir::PatKind::Binding(_, _, ref ident, _) = pat.node {
- name = Some(ident.node);
+ if let hir::PatKind::Binding(_, _, ident, _) = pat.node {
+ name = Some(ident.name);
}
}
let method_name = Symbol::intern(method_name);
let substs = self.tcx.mk_substs_trait(self_ty, params);
for item in self.tcx.associated_items(trait_def_id) {
- if item.kind == ty::AssociatedKind::Method && item.name == method_name {
+ if item.kind == ty::AssociatedKind::Method && item.ident.name == method_name {
let method_ty = self.tcx.type_of(item.def_id);
let method_ty = method_ty.subst(self.tcx, substs);
return (method_ty,
use self::Usefulness::*;
use self::WitnessPreference::*;
-use rustc::middle::const_val::ConstVal;
-
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::indexed_vec::Idx;
for row in patterns {
match *row.kind {
- PatternKind::Constant {
- value: const_val @ &ty::Const {
- val: ConstVal::Value(..),
- ..
- }
- } => {
- if let Some(ptr) = const_val.to_ptr() {
- let is_array_ptr = const_val.ty
+ PatternKind::Constant { value } => {
+ if let Some(ptr) = value.to_ptr() {
+ let is_array_ptr = value.ty
.builtin_deref(true)
.and_then(|t| t.ty.builtin_index())
.map_or(false, |t| t == cx.tcx.types.u8);
suffix: &[Pattern<'tcx>]
) -> Result<bool, ErrorReported> {
let data: &[u8] = match *ctor {
- ConstantValue(&ty::Const { val: ConstVal::Value(const_val), ty }) => {
- let val = match const_val {
- ConstValue::ByRef(..) => bug!("unexpected ConstValue::ByRef"),
+ ConstantValue(const_val) => {
+ let val = match const_val.val {
+ ConstValue::Unevaluated(..) |
+ ConstValue::ByRef(..) => bug!("unexpected ConstValue: {:?}", const_val),
ConstValue::Scalar(val) | ConstValue::ScalarPair(val, _) => val,
};
if let Ok(ptr) = val.to_ptr() {
- let is_array_ptr = ty
+ let is_array_ptr = const_val.ty
.builtin_deref(true)
.and_then(|t| t.ty.builtin_index())
.map_or(false, |t| t == tcx.types.u8);
}
PatternError::FloatBug => {
// FIXME(#31407) this is only necessary because float parsing is buggy
- ::rustc::middle::const_val::struct_error(
+ ::rustc::mir::interpret::struct_error(
self.tcx.at(pat_span),
"could not evaluate float literal (see issue #31407)",
).emit();
}
PatternError::NonConstPath(span) => {
- ::rustc::middle::const_val::struct_error(
+ ::rustc::mir::interpret::struct_error(
self.tcx.at(span),
"runtime values cannot be referenced in patterns",
).emit();
fn check_for_bindings_named_the_same_as_variants(cx: &MatchVisitor, pat: &Pat) {
pat.walk(|p| {
- if let PatKind::Binding(_, _, name, None) = p.node {
+ if let PatKind::Binding(_, _, ident, None) = p.node {
let bm = *cx.tables
.pat_binding_modes()
.get(p.hir_id)
let pat_ty = cx.tables.pat_ty(p);
if let ty::TyAdt(edef, _) = pat_ty.sty {
if edef.is_enum() && edef.variants.iter().any(|variant| {
- variant.name == name.node && variant.ctor_kind == CtorKind::Const
+ variant.name == ident.name && variant.ctor_kind == CtorKind::Const
}) {
let ty_path = cx.tcx.item_path_str(edef.did);
let mut err = struct_span_warn!(cx.tcx.sess, p.span, E0170,
"pattern binding `{}` is named the same as one \
of the variants of the type `{}`",
- name.node, ty_path);
+ ident, ty_path);
err.span_suggestion_with_applicability(
p.span,
"to match on the variant, qualify the path",
- format!("{}::{}", ty_path, name.node),
+ format!("{}::{}", ty_path, ident),
Applicability::MachineApplicable
);
err.emit();
use interpret::{const_val_field, const_variant_index, self};
-use rustc::middle::const_val::ConstVal;
use rustc::mir::{fmt_const_val, Field, BorrowKind, Mutability};
use rustc::mir::interpret::{Scalar, GlobalId, ConstValue, Value};
use rustc::ty::{self, TyCtxt, AdtDef, Ty, Region};
},
}
-fn print_const_val(value: &ty::Const, f: &mut fmt::Formatter) -> fmt::Result {
- match value.val {
- ConstVal::Value(..) => fmt_const_val(f, value),
- ConstVal::Unevaluated(..) => bug!("{:?} not printable in a pattern", value)
- }
-}
-
impl<'tcx> fmt::Display for Pattern<'tcx> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self.kind {
write!(f, "{}", subpattern)
}
PatternKind::Constant { value } => {
- print_const_val(value, f)
+ fmt_const_val(f, value)
}
PatternKind::Range { lo, hi, end } => {
- print_const_val(lo, f)?;
+ fmt_const_val(f, lo)?;
match end {
RangeEnd::Included => write!(f, "...")?,
RangeEnd::Excluded => write!(f, "..")?,
}
- print_const_val(hi, f)
+ fmt_const_val(f, hi)
}
PatternKind::Slice { ref prefix, ref slice, ref suffix } |
PatternKind::Array { ref prefix, ref slice, ref suffix } => {
}
}
- PatKind::Binding(_, id, ref name, ref sub) => {
+ PatKind::Binding(_, id, ident, ref sub) => {
let var_ty = self.tables.node_id_to_type(pat.hir_id);
let region = match var_ty.sty {
ty::TyRef(r, _, _) => Some(r),
if let ty::TyRef(_, rty, _) = ty.sty {
ty = rty;
} else {
- bug!("`ref {}` has wrong type {}", name.node, ty);
+ bug!("`ref {}` has wrong type {}", ident, ty);
}
}
PatternKind::Binding {
mutability,
mode,
- name: name.node,
+ name: ident.name,
var: id,
ty: var_ty,
subpattern: self.lower_opt_pattern(sub),
debug!("const_to_pat: cv={:#?}", cv);
let adt_subpattern = |i, variant_opt| {
let field = Field::new(i);
- let val = match cv.val {
- ConstVal::Value(miri) => const_val_field(
- self.tcx, self.param_env, instance,
- variant_opt, field, miri, cv.ty,
- ).expect("field access failed"),
- _ => bug!("{:#?} is not a valid adt", cv),
- };
+ let val = const_val_field(
+ self.tcx, self.param_env, instance,
+ variant_opt, field, cv,
+ ).expect("field access failed");
self.const_to_pat(instance, val, id, span)
};
let adt_subpatterns = |n, variant_opt| {
PatternKind::Wild
},
ty::TyAdt(adt_def, substs) if adt_def.is_enum() => {
- match cv.val {
- ConstVal::Value(val) => {
- let variant_index = const_variant_index(
- self.tcx, self.param_env, instance, val, cv.ty
- ).expect("const_variant_index failed");
- let subpatterns = adt_subpatterns(
- adt_def.variants[variant_index].fields.len(),
- Some(variant_index),
- );
- PatternKind::Variant {
- adt_def,
- substs,
- variant_index,
- subpatterns,
- }
- },
- ConstVal::Unevaluated(..) =>
- span_bug!(span, "{:#?} is not a valid enum constant", cv),
+ let variant_index = const_variant_index(
+ self.tcx, self.param_env, instance, cv
+ ).expect("const_variant_index failed");
+ let subpatterns = adt_subpatterns(
+ adt_def.variants[variant_index].fields.len(),
+ Some(variant_index),
+ );
+ PatternKind::Variant {
+ adt_def,
+ substs,
+ variant_index,
+ subpatterns,
}
},
ty::TyAdt(adt_def, _) => {
use rustc::hir;
-use rustc::middle::const_val::{ConstEvalErr, ErrKind};
-use rustc::middle::const_val::ErrKind::{TypeckError, CheckMatchError};
+use rustc::mir::interpret::{ConstEvalErr};
use rustc::mir;
use rustc::ty::{self, TyCtxt, Ty, Instance};
use rustc::ty::layout::{self, LayoutOf, Primitive};
use std::fmt;
use std::error::Error;
-use rustc_data_structures::sync::Lrc;
pub fn mk_borrowck_eval_cx<'a, 'mir, 'tcx>(
tcx: TyCtxt<'a, 'tcx, 'tcx>,
val: Value,
ty: Ty<'tcx>,
) -> &'tcx ty::Const<'tcx> {
- let layout = ecx.tcx.layout_of(ty::ParamEnv::reveal_all().and(ty)).unwrap();
+ let layout = ecx.layout_of(ty).unwrap();
match (val, &layout.abi) {
(Value::Scalar(Scalar::Bits { defined: 0, ..}), _) if layout.is_zst() => {},
(Value::ByRef(..), _) |
let (frames, span) = ecx.generate_stacktrace(None);
let err = ConstEvalErr {
span,
- kind: ErrKind::Miri(err, frames).into(),
+ error: err,
+ stacktrace: frames,
};
err.report_as_error(
ecx.tcx,
instance: ty::Instance<'tcx>,
variant: Option<usize>,
field: mir::Field,
- value: ConstValue<'tcx>,
- ty: Ty<'tcx>,
-) -> ::rustc::middle::const_val::EvalResult<'tcx> {
- trace!("const_val_field: {:?}, {:?}, {:?}, {:?}", instance, field, value, ty);
+ value: &'tcx ty::Const<'tcx>,
+) -> ::rustc::mir::interpret::ConstEvalResult<'tcx> {
+ trace!("const_val_field: {:?}, {:?}, {:?}", instance, field, value);
let mut ecx = mk_eval_cx(tcx, instance, param_env).unwrap();
let result = (|| {
- let value = ecx.const_value_to_value(value, ty)?;
+ let ty = value.ty;
+ let value = ecx.const_to_value(value.val)?;
let layout = ecx.layout_of(ty)?;
let (ptr, align) = match value {
Value::ByRef(ptr, align) => (ptr, align),
})();
result.map_err(|err| {
let (trace, span) = ecx.generate_stacktrace(None);
- let err = ErrKind::Miri(err, trace);
ConstEvalErr {
- kind: err.into(),
+ error: err,
+ stacktrace: trace,
span,
- }
+ }.into()
})
}
tcx: TyCtxt<'a, 'tcx, 'tcx>,
param_env: ty::ParamEnv<'tcx>,
instance: ty::Instance<'tcx>,
- val: ConstValue<'tcx>,
- ty: Ty<'tcx>,
+ val: &'tcx ty::Const<'tcx>,
) -> EvalResult<'tcx, usize> {
- trace!("const_variant_index: {:?}, {:?}, {:?}", instance, val, ty);
+ trace!("const_variant_index: {:?}, {:?}", instance, val);
let mut ecx = mk_eval_cx(tcx, instance, param_env).unwrap();
- let value = ecx.const_value_to_value(val, ty)?;
+ let value = ecx.const_to_value(val.val)?;
let (ptr, align) = match value {
Value::ScalarPair(..) | Value::Scalar(_) => {
- let layout = ecx.layout_of(ty)?;
+ let layout = ecx.layout_of(val.ty)?;
let ptr = ecx.memory.allocate(layout.size, layout.align, Some(MemoryKind::Stack))?.into();
- ecx.write_value_to_ptr(value, ptr, layout.align, ty)?;
+ ecx.write_value_to_ptr(value, ptr, layout.align, val.ty)?;
(ptr, layout.align)
},
Value::ByRef(ptr, align) => (ptr, align),
};
let place = Place::from_scalar_ptr(ptr, align);
- ecx.read_discriminant_as_variant_index(place, ty)
+ ecx.read_discriminant_as_variant_index(place, val.ty)
}
pub fn const_value_to_allocation_provider<'a, 'tcx>(
tcx: TyCtxt<'a, 'tcx, 'tcx>,
- (val, ty): (ConstValue<'tcx>, Ty<'tcx>),
+ val: &'tcx ty::Const<'tcx>,
) -> &'tcx Allocation {
- match val {
+ match val.val {
ConstValue::ByRef(alloc, offset) => {
assert_eq!(offset.bytes(), 0);
return alloc;
ty::ParamEnv::reveal_all(),
CompileTimeEvaluator,
());
- let value = ecx.const_value_to_value(val, ty)?;
- let layout = ecx.layout_of(ty)?;
+ let value = ecx.const_to_value(val.val)?;
+ let layout = ecx.layout_of(val.ty)?;
let ptr = ecx.memory.allocate(layout.size, layout.align, Some(MemoryKind::Stack))?;
- ecx.write_value_to_ptr(value, ptr.into(), layout.align, ty)?;
+ ecx.write_value_to_ptr(value, ptr.into(), layout.align, val.ty)?;
let alloc = ecx.memory.get(ptr.alloc_id)?;
Ok(tcx.intern_const_alloc(alloc.clone()))
};
- result().expect("unable to convert ConstVal to Allocation")
+ result().expect("unable to convert ConstValue to Allocation")
}
pub fn const_eval_provider<'a, 'tcx>(
tcx: TyCtxt<'a, 'tcx, 'tcx>,
key: ty::ParamEnvAnd<'tcx, GlobalId<'tcx>>,
-) -> ::rustc::middle::const_val::EvalResult<'tcx> {
+) -> ::rustc::mir::interpret::ConstEvalResult<'tcx> {
trace!("const eval: {:?}", key);
let cid = key.value;
let def_id = cid.instance.def.def_id();
// Do match-check before building MIR
if tcx.check_match(def_id).is_err() {
return Err(ConstEvalErr {
- kind: Lrc::new(CheckMatchError),
+ error: EvalErrorKind::CheckMatchError.into(),
+ stacktrace: vec![],
span,
- });
+ }.into());
}
if let hir::BodyOwnerKind::Const = tcx.hir.body_owner_kind(id) {
// Do not continue into miri if typeck errors occurred; it will fail horribly
if tables.tainted_by_errors {
return Err(ConstEvalErr {
- kind: Lrc::new(TypeckError),
+ error: EvalErrorKind::CheckMatchError.into(),
+ stacktrace: vec![],
span,
- });
+ }.into());
}
};
Ok(value_to_const_value(&ecx, val, miri_ty))
}).map_err(|err| {
let (trace, span) = ecx.generate_stacktrace(None);
- let err = ErrKind::Miri(err, trace);
let err = ConstEvalErr {
- kind: err.into(),
+ error: err,
+ stacktrace: trace,
span,
};
if tcx.is_static(def_id).is_some() {
err.report_as_error(ecx.tcx, "could not evaluate static initializer");
}
- err
+ err.into()
})
}
use rustc::hir::def_id::DefId;
use rustc::hir::def::Def;
use rustc::hir::map::definitions::DefPathData;
-use rustc::middle::const_val::ConstVal;
use rustc::mir;
use rustc::ty::layout::{self, Size, Align, HasDataLayout, IntegerExt, LayoutOf, TyLayout};
use rustc::ty::subst::{Subst, Substs};
use rustc::ty::{self, Ty, TyCtxt, TypeAndMut};
use rustc::ty::query::TyCtxtAt;
use rustc_data_structures::indexed_vec::{IndexVec, Idx};
-use rustc::middle::const_val::FrameInfo;
+use rustc::mir::interpret::FrameInfo;
use syntax::codemap::{self, Span};
use syntax::ast::Mutability;
use rustc::mir::interpret::{
Ok(Scalar::Ptr(ptr).to_value_with_len(s.len() as u64, self.tcx.tcx))
}
- pub fn const_value_to_value(
+ pub fn const_to_value(
&mut self,
val: ConstValue<'tcx>,
- _ty: Ty<'tcx>,
) -> EvalResult<'tcx, Value> {
match val {
+ ConstValue::Unevaluated(def_id, substs) => {
+ let instance = self.resolve(def_id, substs)?;
+ self.read_global_as_value(GlobalId {
+ instance,
+ promoted: None,
+ })
+ }
ConstValue::ByRef(alloc, offset) => {
// FIXME: Allocate new AllocId for all constants inside
let id = self.memory.allocate_value(alloc.clone(), Some(MemoryKind::Stack))?;
}
}
- pub(super) fn const_to_value(
- &mut self,
- const_val: &ConstVal<'tcx>,
- ty: Ty<'tcx>
- ) -> EvalResult<'tcx, Value> {
- match *const_val {
- ConstVal::Unevaluated(def_id, substs) => {
- let instance = self.resolve(def_id, substs)?;
- self.read_global_as_value(GlobalId {
- instance,
- promoted: None,
- }, ty)
- }
- ConstVal::Value(val) => self.const_value_to_value(val, ty)
- }
- }
-
pub(super) fn resolve(&self, def_id: DefId, substs: &'tcx Substs<'tcx>) -> EvalResult<'tcx, ty::Instance<'tcx>> {
trace!("resolve: {:?}, {:#?}", def_id, substs);
trace!("substs: {:#?}", self.substs());
self.param_env,
def_id,
substs,
- ).ok_or_else(|| EvalErrorKind::TypeckError.into()) // turn error prop into a panic to expose associated type in const issue
+ ).ok_or_else(|| EvalErrorKind::TooGeneric.into())
}
pub(super) fn type_is_sized(&self, ty: Ty<'tcx>) -> bool {
self.param_env,
def_id,
substs,
- ).ok_or_else(|| EvalErrorKind::TypeckError.into());
+ ).ok_or_else(|| EvalErrorKind::TooGeneric.into());
let fn_ptr = self.memory.create_fn_alloc(instance?);
let valty = ValTy {
value: Value::Scalar(fn_ptr.into()),
use rustc::mir::Literal;
let mir::Constant { ref literal, .. } = **constant;
let value = match *literal {
- Literal::Value { ref value } => self.const_to_value(&value.val, ty)?,
+ Literal::Value { ref value } => self.const_to_value(value.val)?,
Literal::Promoted { index } => {
let instance = self.frame().instance;
self.read_global_as_value(GlobalId {
instance,
promoted: Some(index),
- }, ty)?
+ })?
}
};
Ok(())
}
- pub fn read_global_as_value(&mut self, gid: GlobalId<'tcx>, ty: Ty<'tcx>) -> EvalResult<'tcx, Value> {
- if self.tcx.is_static(gid.instance.def_id()).is_some() {
- let alloc_id = self
- .tcx
- .alloc_map
- .lock()
- .intern_static(gid.instance.def_id());
- let layout = self.layout_of(ty)?;
- return Ok(Value::ByRef(Scalar::Ptr(alloc_id.into()), layout.align))
- }
+ pub fn read_global_as_value(&mut self, gid: GlobalId<'tcx>) -> EvalResult<'tcx, Value> {
let cv = self.const_eval(gid)?;
- self.const_to_value(&cv.val, ty)
+ self.const_to_value(cv.val)
}
pub fn const_eval(&self, gid: GlobalId<'tcx>) -> EvalResult<'tcx, &'tcx ty::Const<'tcx>> {
use rustc::ty::query::TyCtxtAt;
use rustc::ty::layout::{self, Align, TargetDataLayout, Size};
use syntax::ast::Mutability;
-use rustc::middle::const_val::ConstVal;
use rustc_data_structures::fx::{FxHashSet, FxHashMap};
use rustc::mir::interpret::{Pointer, AllocId, Allocation, AccessKind, Value,
instance,
promoted: None,
};
- self.tcx.const_eval(ParamEnv::reveal_all().and(gid)).map_err(|_| {
+ self.tcx.const_eval(ParamEnv::reveal_all().and(gid)).map_err(|err| {
// no need to report anything, the const_eval call takes care of that for statics
assert!(self.tcx.is_static(def_id).is_some());
- EvalErrorKind::TypeckError.into()
+ EvalErrorKind::ReferencedConstant(err).into()
}).map(|val| {
- let const_val = match val.val {
- ConstVal::Value(val) => val,
- ConstVal::Unevaluated(..) => bug!("should be evaluated"),
- };
- self.tcx.const_value_to_allocation((const_val, val.ty))
+ self.tcx.const_value_to_allocation(val)
})
}
use rustc::hir::map as hir_map;
use rustc::hir::def_id::DefId;
-use rustc::middle::const_val::ConstVal;
use rustc::mir::interpret::{AllocId, ConstValue};
use rustc::middle::lang_items::{ExchangeMallocFnLangItem, StartFnLangItem};
use rustc::ty::subst::Substs;
MonoItemCollectionMode::Lazy => {
self.entry_fn == Some(def_id) ||
self.tcx.is_reachable_non_generic(def_id) ||
+ self.tcx.is_weak_lang_item(def_id) ||
self.tcx.codegen_fn_attrs(def_id).flags.contains(
CodegenFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL)
}
if let Some(trait_ref) = tcx.impl_trait_ref(impl_def_id) {
let overridden_methods: FxHashSet<_> =
impl_item_refs.iter()
- .map(|iiref| iiref.name)
+ .map(|iiref| iiref.ident.modern())
.collect();
for method in tcx.provided_trait_methods(trait_ref.def_id) {
- if overridden_methods.contains(&method.name) {
+ if overridden_methods.contains(&method.ident.modern()) {
continue;
}
match tcx.const_eval(param_env.and(cid)) {
Ok(val) => collect_const(tcx, val, instance.substs, output),
Err(err) => {
- use rustc::middle::const_val::ErrKind;
use rustc::mir::interpret::EvalErrorKind;
- if let ErrKind::Miri(ref miri, ..) = *err.kind {
- if let EvalErrorKind::ReferencedConstant(_) = miri.kind {
- err.report_as_error(
- tcx.at(mir.promoted[i].span),
- "erroneous constant used",
- );
- }
+ if let EvalErrorKind::ReferencedConstant(_) = err.error.kind {
+ err.report_as_error(
+ tcx.at(mir.promoted[i].span),
+ "erroneous constant used",
+ );
}
},
}
debug!("visiting const {:?}", *constant);
let val = match constant.val {
- ConstVal::Unevaluated(def_id, substs) => {
+ ConstValue::Unevaluated(def_id, substs) => {
let param_env = ty::ParamEnv::reveal_all();
let substs = tcx.subst_and_normalize_erasing_regions(
param_substs,
_ => constant.val,
};
match val {
- ConstVal::Unevaluated(..) => bug!("const eval yielded unevaluated const"),
- ConstVal::Value(ConstValue::ScalarPair(Scalar::Ptr(a), Scalar::Ptr(b))) => {
+ ConstValue::Unevaluated(..) => bug!("const eval yielded unevaluated const"),
+ ConstValue::ScalarPair(Scalar::Ptr(a), Scalar::Ptr(b)) => {
collect_miri(tcx, a.alloc_id, output);
collect_miri(tcx, b.alloc_id, output);
}
- ConstVal::Value(ConstValue::ScalarPair(_, Scalar::Ptr(ptr))) |
- ConstVal::Value(ConstValue::ScalarPair(Scalar::Ptr(ptr), _)) |
- ConstVal::Value(ConstValue::Scalar(Scalar::Ptr(ptr))) =>
+ ConstValue::ScalarPair(_, Scalar::Ptr(ptr)) |
+ ConstValue::ScalarPair(Scalar::Ptr(ptr), _) |
+ ConstValue::Scalar(Scalar::Ptr(ptr)) =>
collect_miri(tcx, ptr.alloc_id, output),
- ConstVal::Value(ConstValue::ByRef(alloc, _offset)) => {
+ ConstValue::ByRef(alloc, _offset) => {
for &id in alloc.relocations.values() {
collect_miri(tcx, id, output);
}
for projection in projections {
let projection = projection.skip_binder();
- let name = &self.tcx.associated_item(projection.item_def_id).name.as_str();
+ let name = &self.tcx.associated_item(projection.item_def_id).ident.as_str();
output.push_str(name);
output.push_str("=");
self.push_type_name(projection.ty, output);
use rustc::mir::{NullOp, StatementKind, Statement, BasicBlock, LocalKind};
use rustc::mir::{TerminatorKind, ClearCrossCrate, SourceInfo, BinOp, ProjectionElem};
use rustc::mir::visit::{Visitor, PlaceContext};
-use rustc::middle::const_val::{ConstVal, ConstEvalErr, ErrKind};
+use rustc::mir::interpret::ConstEvalErr;
use rustc::ty::{TyCtxt, self, Instance};
use rustc::mir::interpret::{Value, Scalar, GlobalId, EvalResult};
use interpret::EvalContext;
return;
}
match tcx.describe_def(source.def_id) {
- // skip statics because they'll be evaluated by miri anyway
+ // skip statics/consts because they'll be evaluated by miri anyway
+ Some(Def::Const(..)) |
Some(Def::Static(..)) => return,
+ // we still run on associated constants, because they might not get evaluated
+ // within the current crate
_ => {},
}
trace!("ConstProp starting for {:?}", source.def_id);
let (frames, span) = self.ecx.generate_stacktrace(None);
let err = ConstEvalErr {
span,
- kind: ErrKind::Miri(err, frames).into(),
+ error: err,
+ stacktrace: frames,
};
err.report_as_lint(
self.ecx.tcx,
r
}
- fn const_eval(&mut self, cid: GlobalId<'tcx>, source_info: SourceInfo) -> Option<Const<'tcx>> {
- let value = match self.tcx.const_eval(self.param_env.and(cid)) {
- Ok(val) => val,
- Err(err) => {
- err.report_as_error(
- self.tcx.at(err.span),
- "constant evaluation error",
- );
- return None;
- },
- };
- let val = match value.val {
- ConstVal::Value(v) => {
- self.use_ecx(source_info, |this| this.ecx.const_value_to_value(v, value.ty))?
- },
- _ => bug!("eval produced: {:?}", value),
- };
- let val = (val, value.ty, source_info.span);
- trace!("evaluated {:?} to {:?}", cid, val);
- Some(val)
- }
-
fn eval_constant(
&mut self,
c: &Constant<'tcx>,
source_info: SourceInfo,
) -> Option<Const<'tcx>> {
match c.literal {
- Literal::Value { value } => match value.val {
- ConstVal::Value(v) => {
- let v = self.use_ecx(source_info, |this| {
- this.ecx.const_value_to_value(v, value.ty)
- })?;
- Some((v, value.ty, c.span))
- },
- ConstVal::Unevaluated(did, substs) => {
- let instance = Instance::resolve(
- self.tcx,
- self.param_env,
- did,
- substs,
- )?;
- let cid = GlobalId {
- instance,
- promoted: None,
- };
- self.const_eval(cid, source_info)
- },
+ Literal::Value { value } => {
+ self.ecx.tcx.span = source_info.span;
+ match self.ecx.const_to_value(value.val) {
+ Ok(val) => Some((val, value.ty, c.span)),
+ Err(error) => {
+ let (stacktrace, span) = self.ecx.generate_stacktrace(None);
+ let err = ConstEvalErr {
+ span,
+ error,
+ stacktrace,
+ };
+ err.report_as_error(
+ self.tcx.at(source_info.span),
+ "could not evaluate constant",
+ );
+ None
+ },
+ }
},
// evaluate the promoted and replace the constant with the evaluated result
Literal::Promoted { index } => {
use rustc_data_structures::fx::FxHashSet;
use rustc::hir;
use rustc::hir::def_id::DefId;
-use rustc::middle::const_val::ConstVal;
+use rustc::mir::interpret::ConstValue;
use rustc::traits::{self, TraitEngine};
use rustc::ty::{self, TyCtxt, Ty, TypeFoldable};
use rustc::ty::cast::CastTy;
}
Operand::Constant(ref constant) => {
if let Literal::Value {
- value: &ty::Const { val: ConstVal::Unevaluated(def_id, _), ty, .. }
+ value: &ty::Const { val: ConstValue::Unevaluated(def_id, _), ty, .. }
} = constant.literal {
// Don't peek inside trait associated constants.
if self.tcx.trait_of_item(def_id).is_some() {
def_info: _,
allow_internal_unstable,
allow_internal_unsafe,
+ local_inner_macros,
unstable_feature,
edition,
} => {
def_info: Some((nid, self.krate_span)),
allow_internal_unstable,
allow_internal_unsafe,
+ local_inner_macros,
unstable_feature,
edition,
}
def_info: None,
allow_internal_unstable: false,
allow_internal_unsafe: false,
+ local_inner_macros: false,
unstable_feature: None,
edition: hygiene::default_edition(),
});
let is_local_static = if let Def::Static(..) = def { def_id.is_local() } else { false };
if !self.item_is_accessible(def_id) && !is_local_static {
let name = match *qpath {
- hir::QPath::Resolved(_, ref path) => format!("{}", path),
- hir::QPath::TypeRelative(_, ref segment) => segment.name.to_string(),
+ hir::QPath::Resolved(_, ref path) => path.to_string(),
+ hir::QPath::TypeRelative(_, ref segment) => segment.ident.to_string(),
};
let msg = format!("{} `{}` is private", def.kind_name(), name);
self.tcx.sess.span_err(span, &msg);
args: Option<P<hir::GenericArgs>>,
is_value: bool
) -> hir::Path {
- let mut segments = iter::once(keywords::CrateRoot.name())
+ let mut segments = iter::once(keywords::CrateRoot.ident())
.chain(
crate_root.into_iter()
.chain(components.iter().cloned())
- .map(Symbol::intern)
- ).map(hir::PathSegment::from_name).collect::<Vec<_>>();
+ .map(Ident::from_str)
+ ).map(hir::PathSegment::from_ident).collect::<Vec<_>>();
if let Some(args) = args {
- let name = segments.last().unwrap().name;
+ let ident = segments.last().unwrap().ident;
*segments.last_mut().unwrap() = hir::PathSegment {
- name,
+ ident,
args: Some(args),
infer_types: true,
};
hir::Path {
span,
def: Def::Err,
- segments: iter::once(keywords::CrateRoot.name()).chain({
- path_str.split("::").skip(1).map(Symbol::intern)
- }).map(hir::PathSegment::from_name).collect(),
+ segments: iter::once(keywords::CrateRoot.ident()).chain({
+ path_str.split("::").skip(1).map(Ident::from_str)
+ }).map(hir::PathSegment::from_ident).collect(),
}
} else {
hir::Path {
span,
def: Def::Err,
- segments: path_str.split("::").map(Symbol::intern)
- .map(hir::PathSegment::from_name).collect(),
+ segments: path_str.split("::").map(Ident::from_str)
+ .map(hir::PathSegment::from_ident).collect(),
}
};
self.resolve_hir_path_cb(&mut path, is_value, |_, _, _| errored = true);
/// resolve_hir_path, but takes a callback in case there was an error
fn resolve_hir_path_cb<F>(&mut self, path: &mut hir::Path, is_value: bool, error_callback: F)
- where F: for<'c, 'b> FnOnce(&'c mut Resolver, Span, ResolutionError<'b>)
- {
+ where F: for<'c, 'b> FnOnce(&'c mut Resolver, Span, ResolutionError<'b>)
+ {
let namespace = if is_value { ValueNS } else { TypeNS };
let hir::Path { ref segments, span, ref mut def } = *path;
- let path: Vec<Ident> = segments.iter()
- .map(|seg| Ident::new(seg.name, span))
- .collect();
+ let path: Vec<_> = segments.iter().map(|seg| seg.ident).collect();
// FIXME (Manishearth): Intra doc links won't get warned of epoch changes
match self.resolve_path(&path, Some(namespace), true, span, CrateLint::No) {
PathResult::Module(module) => *def = module.def().unwrap(),
match path.get(1) {
// If this import looks like `crate::...` it's already good
- Some(name) if name.name == keywords::Crate.name() => return,
+ Some(ident) if ident.name == keywords::Crate.name() => return,
// Otherwise go below to see if it's an extern crate
Some(_) => {}
// If the path has length one (and it's `CrateRoot` most likely)
kind: MacroKind, force: bool)
-> Result<Def, Determinacy> {
let ast::Path { ref segments, span } = *path;
- let path: Vec<_> = segments.iter().map(|seg| seg.ident).collect();
+ let mut path: Vec<_> = segments.iter().map(|seg| seg.ident).collect();
let invocation = self.invocations[&scope];
let module = invocation.module.get();
self.current_module = if module.is_trait() { module.parent.unwrap() } else { module };
+ // Possibly apply the macro helper hack
+ if self.use_extern_macros && kind == MacroKind::Bang && path.len() == 1 &&
+ path[0].span.ctxt().outer().expn_info().map_or(false, |info| info.local_inner_macros) {
+ let root = Ident::new(keywords::DollarCrate.name(), path[0].span);
+ path.insert(0, root);
+ }
+
if path.len() > 1 {
if !self.use_extern_macros && self.gated_errors.insert(span) {
let msg = "non-ident macro paths are experimental";
sig: &'l ast::MethodSig,
body: Option<&'l ast::Block>,
id: ast::NodeId,
- name: ast::Ident,
+ ident: ast::Ident,
generics: &'l ast::Generics,
vis: ast::Visibility,
span: Span,
) {
- debug!("process_method: {}:{}", id, name);
+ debug!("process_method: {}:{}", id, ident);
- if let Some(mut method_data) = self.save_ctxt.get_method_data(id, name.name, span) {
+ if let Some(mut method_data) = self.save_ctxt.get_method_data(id, ident.name, span) {
let sig_str = ::make_signature(&sig.decl, &generics);
if body.is_some() {
self.nest_tables(
self.process_generic_params(&generics, span, &method_data.qualname, id);
method_data.value = sig_str;
- method_data.sig = sig::method_signature(id, name, generics, sig, &self.save_ctxt);
+ method_data.sig = sig::method_signature(id, ident, generics, sig, &self.save_ctxt);
self.dumper.dump_def(&access_from!(self.save_ctxt, vis, id), method_data);
}
qualname.push_str(&self.tcx.item_path_str(def_id));
self.tcx
.associated_items(def_id)
- .find(|item| item.name == name)
+ .find(|item| item.ident.name == name)
.map(|item| decl_id = Some(item.def_id));
}
qualname.push_str(">");
let ti = self.tcx.associated_item(decl_id);
self.tcx
.associated_items(ti.container.id())
- .find(|item| item.name == ti.name && item.defaultness.has_value())
+ .find(|item| item.ident.name == ti.ident.name &&
+ item.defaultness.has_value())
.map(|item| item.def_id)
} else {
None
use chalk_engine::fallible::Fallible as ChalkEngineFallible;
use chalk_engine::{context, hh::HhGoal, DelayedLiteral, ExClause};
-use rustc::infer::canonical::{
- Canonical, CanonicalVarValues, Canonicalize, QueryRegionConstraint, QueryResult,
-};
+use rustc::infer::canonical::{Canonical, CanonicalVarValues, QueryRegionConstraint, QueryResult};
use rustc::infer::{InferCtxt, InferOk, LateBoundRegionConversionTime};
use rustc::traits::{
WellFormed,
subst, constraints
}
}
-
-impl<'gcx: 'tcx, 'tcx> Canonicalize<'gcx, 'tcx> for ConstrainedSubst<'tcx> {
- type Canonicalized = Canonical<'gcx, ConstrainedSubst<'gcx>>;
-
- fn intern(
- _gcx: TyCtxt<'_, 'gcx, 'gcx>,
- value: Canonical<'gcx, ConstrainedSubst<'gcx>>,
- ) -> Self::Canonicalized {
- value
- }
-}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use rustc::infer::canonical::{Canonical, QueryResult};
use rustc::hir::def_id::DefId;
-use rustc::traits::{FulfillmentContext, Normalized, ObligationCause};
+use rustc::infer::canonical::{Canonical, QueryResult};
+use rustc::traits::query::dropck_outlives::{DropckOutlivesResult, DtorckConstraint};
use rustc::traits::query::{CanonicalTyGoal, NoSolution};
-use rustc::traits::query::dropck_outlives::{DtorckConstraint, DropckOutlivesResult};
-use rustc::ty::{self, ParamEnvAnd, Ty, TyCtxt};
+use rustc::traits::{FulfillmentContext, Normalized, ObligationCause, TraitEngineExt};
+use rustc::ty::query::Providers;
use rustc::ty::subst::{Subst, Substs};
+use rustc::ty::{self, ParamEnvAnd, Ty, TyCtxt};
use rustc::util::nodemap::FxHashSet;
use rustc_data_structures::sync::Lrc;
use syntax::codemap::{Span, DUMMY_SP};
-use util;
-crate fn dropck_outlives<'tcx>(
+crate fn provide(p: &mut Providers) {
+ *p = Providers {
+ dropck_outlives,
+ adt_dtorck_constraint,
+ ..*p
+ };
+}
+
+fn dropck_outlives<'tcx>(
tcx: TyCtxt<'_, 'tcx, 'tcx>,
goal: CanonicalTyGoal<'tcx>,
) -> Result<Lrc<Canonical<'tcx, QueryResult<'tcx, DropckOutlivesResult<'tcx>>>>, NoSolution> {
canonical_inference_vars,
) = infcx.instantiate_canonical_with_fresh_inference_vars(DUMMY_SP, &goal);
- let mut result = DropckOutlivesResult { kinds: vec![], overflows: vec![] };
+ let mut result = DropckOutlivesResult {
+ kinds: vec![],
+ overflows: vec![],
+ };
// A stack of types left to process. Each round, we pop
// something from the stack and invoke
debug!("dropck_outlives: result = {:#?}", result);
- util::make_query_response(infcx, canonical_inference_vars, result, fulfill_cx)
+ infcx.make_canonicalized_query_result(canonical_inference_vars, result, fulfill_cx)
})
}
dtorck_constraint_for_ty(tcx, span, for_ty, depth + 1, ety)
}
- ty::TyTuple(tys) => tys.iter()
+ ty::TyTuple(tys) => tys
+ .iter()
.map(|ty| dtorck_constraint_for_ty(tcx, span, for_ty, depth + 1, ty))
.collect(),
dtorck_types: vec![],
overflows: vec![],
};
- debug!("dtorck_constraint: generator {:?} => {:?}", def_id, constraint);
+ debug!(
+ "dtorck_constraint: generator {:?} => {:?}",
+ def_id, constraint
+ );
Ok(constraint)
}
return Ok(result);
}
- let mut result = def.all_fields()
+ let mut result = def
+ .all_fields()
.map(|field| tcx.type_of(field.did))
.map(|fty| dtorck_constraint_for_ty(tcx, span, fty, 0, fty))
.collect::<Result<DtorckConstraint, NoSolution>>()?;
use rustc::traits::{EvaluationResult, Obligation, ObligationCause,
OverflowError, SelectionContext, TraitQueryMode};
use rustc::traits::query::CanonicalPredicateGoal;
+use rustc::ty::query::Providers;
use rustc::ty::{ParamEnvAnd, TyCtxt};
use syntax::codemap::DUMMY_SP;
-crate fn evaluate_obligation<'tcx>(
+crate fn provide(p: &mut Providers) {
+ *p = Providers {
+ evaluate_obligation,
+ ..*p
+ };
+}
+
+fn evaluate_obligation<'tcx>(
tcx: TyCtxt<'_, 'tcx, 'tcx>,
goal: CanonicalPredicateGoal<'tcx>,
) -> Result<EvaluationResult, OverflowError> {
mod evaluate_obligation;
mod normalize_projection_ty;
mod normalize_erasing_regions;
-mod util;
pub mod lowering;
+mod type_op;
use rustc::ty::query::Providers;
pub fn provide(p: &mut Providers) {
- *p = Providers {
- dropck_outlives: dropck_outlives::dropck_outlives,
- adt_dtorck_constraint: dropck_outlives::adt_dtorck_constraint,
- normalize_projection_ty: normalize_projection_ty::normalize_projection_ty,
- normalize_ty_after_erasing_regions:
- normalize_erasing_regions::normalize_ty_after_erasing_regions,
- program_clauses_for: lowering::program_clauses_for,
- program_clauses_for_env: lowering::program_clauses_for_env,
- evaluate_obligation: evaluate_obligation::evaluate_obligation,
- ..*p
- };
+ dropck_outlives::provide(p);
+ evaluate_obligation::provide(p);
+ lowering::provide(p);
+ normalize_projection_ty::provide(p);
+ normalize_erasing_regions::provide(p);
+ type_op::provide(p);
}
use rustc::hir::{self, ImplPolarity};
use rustc::traits::{Clause, Clauses, DomainGoal, Goal, PolyDomainGoal, ProgramClause,
WhereClause, FromEnv, WellFormed};
+use rustc::ty::query::Providers;
use rustc::ty::subst::Substs;
use rustc::ty::{self, Slice, TyCtxt};
use rustc_data_structures::fx::FxHashSet;
use std::iter;
+crate fn provide(p: &mut Providers) {
+ *p = Providers {
+ program_clauses_for,
+ program_clauses_for_env,
+ ..*p
+ };
+}
+
crate trait Lower<T> {
/// Lower a rustc construct (e.g. `ty::TraitPredicate`) to a chalk-like type.
fn lower(&self) -> T;
let hypotheses = vec![trait_implemented];
// `<A0 as Trait<A1..An>>::AssocType<Pn+1..Pm>`
- let projection_ty = ty::ProjectionTy::from_ref_and_name(tcx, trait_ref, item.name);
+ let projection_ty = ty::ProjectionTy::from_ref_and_name(tcx, trait_ref, item.ident);
// `Normalize(<A0 as Trait<A1..An>>::AssocType<Pn+1..Pm> -> T)`
let normalize_goal = DomainGoal::Normalize(ty::ProjectionPredicate { projection_ty, ty });
use rustc::traits::{Normalized, ObligationCause};
use rustc::traits::query::NoSolution;
+use rustc::ty::query::Providers;
use rustc::ty::{self, ParamEnvAnd, Ty, TyCtxt};
use std::sync::atomic::Ordering;
-crate fn normalize_ty_after_erasing_regions<'tcx>(
+crate fn provide(p: &mut Providers) {
+ *p = Providers {
+ normalize_ty_after_erasing_regions,
+ ..*p
+ };
+}
+
+fn normalize_ty_after_erasing_regions<'tcx>(
tcx: TyCtxt<'_, 'tcx, 'tcx>,
goal: ParamEnvAnd<'tcx, Ty<'tcx>>,
) -> Ty<'tcx> {
// except according to those terms.
use rustc::infer::canonical::{Canonical, QueryResult};
-use rustc::traits::{self, FulfillmentContext, ObligationCause, SelectionContext};
-use rustc::traits::query::{CanonicalProjectionGoal, NoSolution, normalize::NormalizationResult};
+use rustc::traits::query::{normalize::NormalizationResult, CanonicalProjectionGoal, NoSolution};
+use rustc::traits::{self, ObligationCause, SelectionContext, TraitEngineExt};
+use rustc::ty::query::Providers;
use rustc::ty::{ParamEnvAnd, TyCtxt};
use rustc_data_structures::sync::Lrc;
+use std::sync::atomic::Ordering;
use syntax::ast::DUMMY_NODE_ID;
use syntax_pos::DUMMY_SP;
-use util;
-use std::sync::atomic::Ordering;
-crate fn normalize_projection_ty<'tcx>(
+crate fn provide(p: &mut Providers) {
+ *p = Providers {
+ normalize_projection_ty,
+ ..*p
+ };
+}
+
+fn normalize_projection_ty<'tcx>(
tcx: TyCtxt<'_, 'tcx, 'tcx>,
goal: CanonicalProjectionGoal<'tcx>,
) -> Result<Lrc<Canonical<'tcx, QueryResult<'tcx, NormalizationResult<'tcx>>>>, NoSolution> {
debug!("normalize_provider(goal={:#?})", goal);
- tcx.sess.perf_stats.normalize_projection_ty.fetch_add(1, Ordering::Relaxed);
- tcx.infer_ctxt().enter(|ref infcx| {
- let (
- ParamEnvAnd {
+ tcx.sess
+ .perf_stats
+ .normalize_projection_ty
+ .fetch_add(1, Ordering::Relaxed);
+ tcx.infer_ctxt().enter_canonical_trait_query(
+ &goal,
+ |infcx,
+ fulfill_cx,
+ ParamEnvAnd {
+ param_env,
+ value: goal,
+ }| {
+ let selcx = &mut SelectionContext::new(infcx);
+ let cause = ObligationCause::misc(DUMMY_SP, DUMMY_NODE_ID);
+ let mut obligations = vec![];
+ let answer = traits::normalize_projection_type(
+ selcx,
param_env,
- value: goal,
- },
- canonical_inference_vars,
- ) = infcx.instantiate_canonical_with_fresh_inference_vars(DUMMY_SP, &goal);
- let fulfill_cx = &mut FulfillmentContext::new();
- let selcx = &mut SelectionContext::new(infcx);
- let cause = ObligationCause::misc(DUMMY_SP, DUMMY_NODE_ID);
- let mut obligations = vec![];
- let answer =
- traits::normalize_projection_type(selcx, param_env, goal, cause, 0, &mut obligations);
- fulfill_cx.register_predicate_obligations(infcx, obligations);
-
- // Now that we have fulfilled as much as we can, create a solution
- // from what we've learned.
- util::make_query_response(
- infcx,
- canonical_inference_vars,
- NormalizationResult { normalized_ty: answer },
- fulfill_cx,
- )
- })
+ goal,
+ cause,
+ 0,
+ &mut obligations,
+ );
+ fulfill_cx.register_predicate_obligations(infcx, obligations);
+ Ok(NormalizationResult {
+ normalized_ty: answer,
+ })
+ },
+ )
}
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use rustc::infer::canonical::{Canonical, QueryResult};
+use rustc::infer::InferCtxt;
+use rustc::traits::query::type_op::eq::Eq;
+use rustc::traits::query::type_op::normalize::Normalize;
+use rustc::traits::query::type_op::prove_predicate::ProvePredicate;
+use rustc::traits::query::type_op::subtype::Subtype;
+use rustc::traits::query::{Fallible, NoSolution};
+use rustc::traits::{FulfillmentContext, Normalized, Obligation, ObligationCause, TraitEngine,
+ TraitEngineExt};
+use rustc::ty::query::Providers;
+use rustc::ty::{FnSig, Lift, ParamEnvAnd, PolyFnSig, Predicate, Ty, TyCtxt, TypeFoldable};
+use rustc_data_structures::sync::Lrc;
+use std::fmt;
+
+crate fn provide(p: &mut Providers) {
+ *p = Providers {
+ type_op_eq,
+ type_op_prove_predicate,
+ type_op_subtype,
+ type_op_normalize_ty,
+ type_op_normalize_predicate,
+ type_op_normalize_fn_sig,
+ type_op_normalize_poly_fn_sig,
+ ..*p
+ };
+}
+
+fn type_op_eq<'tcx>(
+ tcx: TyCtxt<'_, 'tcx, 'tcx>,
+ canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, Eq<'tcx>>>,
+) -> Result<Lrc<Canonical<'tcx, QueryResult<'tcx, ()>>>, NoSolution> {
+ tcx.infer_ctxt()
+ .enter_canonical_trait_query(&canonicalized, |infcx, fulfill_cx, key| {
+ let (param_env, Eq { a, b }) = key.into_parts();
+ Ok(infcx
+ .at(&ObligationCause::dummy(), param_env)
+ .eq(a, b)?
+ .into_value_registering_obligations(infcx, fulfill_cx))
+ })
+}
+
+fn type_op_normalize<T>(
+ infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+ fulfill_cx: &mut FulfillmentContext<'tcx>,
+ key: ParamEnvAnd<'tcx, Normalize<T>>,
+) -> Fallible<T>
+where
+ T: fmt::Debug + TypeFoldable<'tcx> + Lift<'gcx>,
+{
+ let (param_env, Normalize { value }) = key.into_parts();
+ let Normalized { value, obligations } = infcx
+ .at(&ObligationCause::dummy(), param_env)
+ .normalize(&value)?;
+ fulfill_cx.register_predicate_obligations(infcx, obligations);
+ Ok(value)
+}
+
+fn type_op_normalize_ty(
+ tcx: TyCtxt<'_, 'tcx, 'tcx>,
+ canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, Normalize<Ty<'tcx>>>>,
+) -> Result<Lrc<Canonical<'tcx, QueryResult<'tcx, Ty<'tcx>>>>, NoSolution> {
+ tcx.infer_ctxt()
+ .enter_canonical_trait_query(&canonicalized, type_op_normalize)
+}
+
+fn type_op_normalize_predicate(
+ tcx: TyCtxt<'_, 'tcx, 'tcx>,
+ canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, Normalize<Predicate<'tcx>>>>,
+) -> Result<Lrc<Canonical<'tcx, QueryResult<'tcx, Predicate<'tcx>>>>, NoSolution> {
+ tcx.infer_ctxt()
+ .enter_canonical_trait_query(&canonicalized, type_op_normalize)
+}
+
+fn type_op_normalize_fn_sig(
+ tcx: TyCtxt<'_, 'tcx, 'tcx>,
+ canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, Normalize<FnSig<'tcx>>>>,
+) -> Result<Lrc<Canonical<'tcx, QueryResult<'tcx, FnSig<'tcx>>>>, NoSolution> {
+ tcx.infer_ctxt()
+ .enter_canonical_trait_query(&canonicalized, type_op_normalize)
+}
+
+fn type_op_normalize_poly_fn_sig(
+ tcx: TyCtxt<'_, 'tcx, 'tcx>,
+ canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, Normalize<PolyFnSig<'tcx>>>>,
+) -> Result<Lrc<Canonical<'tcx, QueryResult<'tcx, PolyFnSig<'tcx>>>>, NoSolution> {
+ tcx.infer_ctxt()
+ .enter_canonical_trait_query(&canonicalized, type_op_normalize)
+}
+
+fn type_op_subtype<'tcx>(
+ tcx: TyCtxt<'_, 'tcx, 'tcx>,
+ canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, Subtype<'tcx>>>,
+) -> Result<Lrc<Canonical<'tcx, QueryResult<'tcx, ()>>>, NoSolution> {
+ tcx.infer_ctxt()
+ .enter_canonical_trait_query(&canonicalized, |infcx, fulfill_cx, key| {
+ let (param_env, Subtype { sub, sup }) = key.into_parts();
+ Ok(infcx
+ .at(&ObligationCause::dummy(), param_env)
+ .sup(sup, sub)?
+ .into_value_registering_obligations(infcx, fulfill_cx))
+ })
+}
+
+fn type_op_prove_predicate<'tcx>(
+ tcx: TyCtxt<'_, 'tcx, 'tcx>,
+ canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, ProvePredicate<'tcx>>>,
+) -> Result<Lrc<Canonical<'tcx, QueryResult<'tcx, ()>>>, NoSolution> {
+ tcx.infer_ctxt()
+ .enter_canonical_trait_query(&canonicalized, |infcx, fulfill_cx, key| {
+ let (param_env, ProvePredicate { predicate }) = key.into_parts();
+ fulfill_cx.register_predicate_obligation(
+ infcx,
+ Obligation::new(ObligationCause::dummy(), param_env, predicate),
+ );
+ Ok(())
+ })
+}
+++ /dev/null
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use rustc::infer::InferCtxt;
-use rustc::infer::canonical::{CanonicalVarValues, Canonicalize, Certainty, QueryResult};
-use rustc::infer::region_constraints::{Constraint, RegionConstraintData};
-use rustc::traits::{FulfillmentContext, TraitEngine};
-use rustc::traits::query::NoSolution;
-use rustc::ty;
-use std::fmt::Debug;
-
-/// The canonicalization form of `QueryResult<'tcx, T>`.
-type CanonicalizedQueryResult<'gcx, 'tcx, T> =
- <QueryResult<'tcx, T> as Canonicalize<'gcx, 'tcx>>::Canonicalized;
-
-crate fn make_query_response<'gcx, 'tcx, T>(
- infcx: &InferCtxt<'_, 'gcx, 'tcx>,
- inference_vars: CanonicalVarValues<'tcx>,
- answer: T,
- fulfill_cx: &mut FulfillmentContext<'tcx>,
-) -> Result<CanonicalizedQueryResult<'gcx, 'tcx, T>, NoSolution>
-where
- T: Debug,
- QueryResult<'tcx, T>: Canonicalize<'gcx, 'tcx>,
-{
- let tcx = infcx.tcx;
-
- debug!(
- "make_query_response(\
- inference_vars={:?}, \
- answer={:?})",
- inference_vars, answer,
- );
-
- // Select everything, returning errors.
- let true_errors = match fulfill_cx.select_where_possible(infcx) {
- Ok(()) => vec![],
- Err(errors) => errors,
- };
- debug!("true_errors = {:#?}", true_errors);
-
- if !true_errors.is_empty() {
- // FIXME -- we don't indicate *why* we failed to solve
- debug!("make_query_response: true_errors={:#?}", true_errors);
- return Err(NoSolution);
- }
-
- // Anything left unselected *now* must be an ambiguity.
- let ambig_errors = match fulfill_cx.select_all_or_error(infcx) {
- Ok(()) => vec![],
- Err(errors) => errors,
- };
- debug!("ambig_errors = {:#?}", ambig_errors);
-
- let region_obligations = infcx.take_registered_region_obligations();
-
- let region_constraints = infcx.with_region_constraints(|region_constraints| {
- let RegionConstraintData {
- constraints,
- verifys,
- givens,
- } = region_constraints;
-
- assert!(verifys.is_empty());
- assert!(givens.is_empty());
-
- let mut outlives: Vec<_> = constraints
- .into_iter()
- .map(|(k, _)| match *k {
- // Swap regions because we are going from sub (<=) to outlives
- // (>=).
- Constraint::VarSubVar(v1, v2) => ty::OutlivesPredicate(
- tcx.mk_region(ty::ReVar(v2)).into(),
- tcx.mk_region(ty::ReVar(v1)),
- ),
- Constraint::VarSubReg(v1, r2) => {
- ty::OutlivesPredicate(r2.into(), tcx.mk_region(ty::ReVar(v1)))
- }
- Constraint::RegSubVar(r1, v2) => {
- ty::OutlivesPredicate(tcx.mk_region(ty::ReVar(v2)).into(), r1)
- }
- Constraint::RegSubReg(r1, r2) => ty::OutlivesPredicate(r2.into(), r1),
- })
- .map(ty::Binder::dummy) // no bound regions in the code above
- .collect();
-
- outlives.extend(
- region_obligations
- .into_iter()
- .map(|(_, r_o)| ty::OutlivesPredicate(r_o.sup_type.into(), r_o.sub_region))
- .map(ty::Binder::dummy) // no bound regions in the code above
- );
-
- outlives
- });
-
- let certainty = if ambig_errors.is_empty() {
- Certainty::Proven
- } else {
- Certainty::Ambiguous
- };
-
- let (canonical_result, _) = infcx.canonicalize_response(&QueryResult {
- var_values: inference_vars,
- region_constraints,
- certainty,
- value: answer,
- });
-
- debug!(
- "make_query_response: canonical_result = {:#?}",
- canonical_result
- );
-
- Ok(canonical_result)
-}
}
struct ConvertedBinding<'tcx> {
- item_name: ast::Name,
+ item_name: ast::Ident,
ty: Ty<'tcx>,
span: Span,
}
let assoc_bindings = generic_args.bindings.iter().map(|binding| {
ConvertedBinding {
- item_name: binding.name,
+ item_name: binding.ident,
ty: self.ast_ty_to_ty(&binding.ty),
span: binding.span,
}
fn trait_defines_associated_type_named(&self,
trait_def_id: DefId,
- assoc_name: ast::Name)
+ assoc_name: ast::Ident)
-> bool
{
self.tcx().associated_items(trait_def_id).any(|item| {
item.kind == ty::AssociatedKind::Type &&
- self.tcx().hygienic_eq(assoc_name, item.name, trait_def_id)
+ self.tcx().hygienic_eq(assoc_name, item.ident, trait_def_id)
})
}
}?;
let (assoc_ident, def_scope) =
- tcx.adjust_ident(binding.item_name.to_ident(), candidate.def_id(), ref_id);
+ tcx.adjust_ident(binding.item_name, candidate.def_id(), ref_id);
let assoc_ty = tcx.associated_items(candidate.def_id()).find(|i| {
- i.kind == ty::AssociatedKind::Type && i.name.to_ident() == assoc_ident
+ i.kind == ty::AssociatedKind::Type && i.ident.modern() == assoc_ident
}).expect("missing associated type");
if !assoc_ty.vis.is_accessible_from(def_scope, tcx) {
let trait_def_id = assoc_item.container.id();
struct_span_err!(tcx.sess, span, E0191,
"the value of the associated type `{}` (from the trait `{}`) must be specified",
- assoc_item.name,
+ assoc_item.ident,
tcx.item_path_str(trait_def_id))
.span_label(span, format!(
- "missing associated type `{}` value", assoc_item.name))
+ "missing associated type `{}` value", assoc_item.ident))
.emit();
}
// any ambiguity.
fn find_bound_for_assoc_item(&self,
ty_param_def_id: DefId,
- assoc_name: ast::Name,
+ assoc_name: ast::Ident,
span: Span)
-> Result<ty::PolyTraitRef<'tcx>, ErrorReported>
{
fn one_bound_for_assoc_type<I>(&self,
mut bounds: I,
ty_param_name: &str,
- assoc_name: ast::Name,
+ assoc_name: ast::Ident,
span: Span)
-> Result<ty::PolyTraitRef<'tcx>, ErrorReported>
where I: Iterator<Item=ty::PolyTraitRef<'tcx>>
for bound in bounds {
let bound_span = self.tcx().associated_items(bound.def_id()).find(|item| {
item.kind == ty::AssociatedKind::Type &&
- self.tcx().hygienic_eq(assoc_name, item.name, bound.def_id())
+ self.tcx().hygienic_eq(assoc_name, item.ident, bound.def_id())
})
.and_then(|item| self.tcx().hir.span_if_local(item.def_id));
-> (Ty<'tcx>, Def)
{
let tcx = self.tcx();
- let assoc_name = item_segment.name;
+ let assoc_name = item_segment.ident;
debug!("associated_path_def_to_ty: {:?}::{}", ty, assoc_name);
let candidates =
traits::supertraits(tcx, ty::Binder::bind(trait_ref))
- .filter(|r| self.trait_defines_associated_type_named(r.def_id(),
- assoc_name));
+ .filter(|r| self.trait_defines_associated_type_named(r.def_id(), assoc_name));
match self.one_bound_for_assoc_type(candidates, "Self", assoc_name, span) {
Ok(bound) => bound,
};
let trait_did = bound.def_id();
- let (assoc_ident, def_scope) = tcx.adjust_ident(assoc_name.to_ident(), trait_did, ref_id);
+ let (assoc_ident, def_scope) = tcx.adjust_ident(assoc_name, trait_did, ref_id);
let item = tcx.associated_items(trait_did).find(|i| {
Namespace::from(i.kind) == Namespace::Type &&
- i.name.to_ident() == assoc_ident
+ i.ident.modern() == assoc_ident
})
.expect("missing associated type");
self.report_ambiguous_associated_type(span,
"Type",
&path_str,
- &item_segment.name.as_str());
+ &item_segment.ident.as_str());
return tcx.types.err;
};
use rustc::ty::adjustment::{Adjustment, Adjust, OverloadedDeref};
use syntax_pos::Span;
-use syntax::symbol::Symbol;
+use syntax::ast::Ident;
use std::iter;
ty::ProjectionTy::from_ref_and_name(
tcx,
trait_ref,
- Symbol::intern("Target"),
+ Ident::from_str("Target"),
),
cause,
0,
use rustc::ty::{self, TyCtxt, TypeFoldable, Ty};
use rustc::ty::adjustment::{Adjustment, Adjust, AllowTwoPhase, AutoBorrow, AutoBorrowMutability};
use rustc_target::spec::abi;
-use syntax::symbol::Symbol;
+use syntax::ast::Ident;
use syntax_pos::Span;
use rustc::hir;
MethodCallee<'tcx>)> {
// Try the options that are least restrictive on the caller first.
for &(opt_trait_def_id, method_name, borrow) in
- &[(self.tcx.lang_items().fn_trait(), Symbol::intern("call"), true),
- (self.tcx.lang_items().fn_mut_trait(), Symbol::intern("call_mut"), true),
- (self.tcx.lang_items().fn_once_trait(), Symbol::intern("call_once"), false)] {
+ &[(self.tcx.lang_items().fn_trait(), Ident::from_str("call"), true),
+ (self.tcx.lang_items().fn_mut_trait(), Ident::from_str("call_mut"), true),
+ (self.tcx.lang_items().fn_once_trait(), Ident::from_str("call_once"), false)] {
let trait_def_id = match opt_trait_def_id {
Some(def_id) => def_id,
None => continue,
"supposed to be part of a block tail expression, but the \
expression is empty");
});
- fcx.suggest_mismatched_types_on_tail(&mut db, expr,
- expected, found,
- cause.span, blk_id);
+ fcx.suggest_mismatched_types_on_tail(
+ &mut db,
+ expr,
+ expected,
+ found,
+ cause.span,
+ blk_id,
+ );
}
_ => {
db = fcx.report_mismatched_types(cause, expected, found, err);
span: impl_m_span,
body_id: impl_m_node_id,
code: ObligationCauseCode::CompareImplMethodObligation {
- item_name: impl_m.name,
+ item_name: impl_m.ident.name,
impl_item_def_id: impl_m.def_id,
trait_item_def_id: trait_m.def_id,
},
cause.span(&tcx),
E0053,
"method `{}` has an incompatible type for trait",
- trait_m.name);
+ trait_m.ident);
infcx.note_type_err(&mut diag,
&cause,
E0195,
"lifetime parameters or bounds on method `{}` do not match \
the trait declaration",
- impl_m.name);
+ impl_m.ident);
err.span_label(span, "lifetimes do not match method in trait");
if let Some(sp) = tcx.hir.span_if_local(trait_m.def_id) {
err.span_label(tcx.sess.codemap().def_span(sp),
E0185,
"method `{}` has a `{}` declaration in the impl, but \
not in the trait",
- trait_m.name,
+ trait_m.ident,
self_descr);
err.span_label(impl_m_span, format!("`{}` used in impl", self_descr));
if let Some(span) = tcx.hir.span_if_local(trait_m.def_id) {
err.span_label(span, format!("trait method declared without `{}`", self_descr));
} else {
- err.note_trait_signature(trait_m.name.to_string(),
+ err.note_trait_signature(trait_m.ident.to_string(),
trait_m.signature(&tcx));
}
err.emit();
E0186,
"method `{}` has a `{}` declaration in the trait, but \
not in the impl",
- trait_m.name,
+ trait_m.ident,
self_descr);
err.span_label(impl_m_span, format!("expected `{}` in impl", self_descr));
if let Some(span) = tcx.hir.span_if_local(trait_m.def_id) {
err.span_label(span, format!("`{}` used in trait", self_descr));
} else {
- err.note_trait_signature(trait_m.name.to_string(),
+ err.note_trait_signature(trait_m.ident.to_string(),
trait_m.signature(&tcx));
}
err.emit();
E0049,
"method `{}` has {} type parameter{} but its trait \
declaration has {} type parameter{}",
- trait_m.name,
+ trait_m.ident,
num_impl_m_type_params,
if num_impl_m_type_params == 1 { "" } else { "s" },
num_trait_m_type_params,
E0050,
"method `{}` has {} parameter{} but the declaration in \
trait `{}` has {}",
- trait_m.name,
+ trait_m.ident,
impl_number_args,
if impl_number_args == 1 { "" } else { "s" },
tcx.item_path_str(trait_m.def_id),
format!("{} parameter", trait_number_args)
}));
} else {
- err.note_trait_signature(trait_m.name.to_string(),
+ err.note_trait_signature(trait_m.ident.to_string(),
trait_m.signature(&tcx));
}
err.span_label(impl_span,
impl_span,
E0643,
"method `{}` has incompatible signature for trait",
- trait_m.name);
+ trait_m.ident);
err.span_label(trait_span, "declaration in trait here");
match (impl_synthetic, trait_synthetic) {
// The case where the impl method uses `impl Trait` but the trait method uses
E0326,
"implemented const `{}` has an incompatible type for \
trait",
- trait_c.name);
+ trait_c.ident);
let trait_c_node_id = tcx.hir.as_local_node_id(trait_c.def_id);
let trait_c_span = trait_c_node_id.map(|trait_c_node_id| {
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use std::iter;
-
use check::FnCtxt;
use rustc::infer::InferOk;
use rustc::traits::ObligationCause;
}
}
- if let Some((sp, msg, suggestion)) = self.check_ref(expr, checked_ty, expected) {
- err.span_suggestion(sp, msg, suggestion);
- } else if !self.check_for_cast(&mut err, expr, expr_ty, expected) {
- let methods = self.get_conversion_methods(expr.span, expected, checked_ty);
- if let Ok(expr_text) = self.tcx.sess.codemap().span_to_snippet(expr.span) {
- let suggestions = iter::repeat(expr_text).zip(methods.iter())
- .map(|(receiver, method)| format!("{}.{}()", receiver, method.name))
- .collect::<Vec<_>>();
- if !suggestions.is_empty() {
- err.span_suggestions(expr.span,
- "try using a conversion method",
- suggestions);
- }
- }
- }
+ self.suggest_ref_or_into(&mut err, expr, expected, expr_ty);
+
(expected, Some(err))
}
- fn get_conversion_methods(&self, span: Span, expected: Ty<'tcx>, checked_ty: Ty<'tcx>)
+ pub fn get_conversion_methods(&self, span: Span, expected: Ty<'tcx>, checked_ty: Ty<'tcx>)
-> Vec<AssociatedItem> {
let mut methods = self.probe_for_return_type(span,
probe::Mode::MethodCall,
})), 1) = (self.tcx.hir.find(parent), decl.inputs.len()) {
let self_ty = self.tables.borrow().node_id_to_type(expr[0].hir_id);
let self_ty = format!("{:?}", self_ty);
- let name = path.name.as_str();
+ let name = path.ident.as_str();
let is_as_ref_able = (
self_ty.starts_with("&std::option::Option") ||
self_ty.starts_with("&std::result::Result") ||
/// In addition of this check, it also checks between references mutability state. If the
/// expected is mutable but the provided isn't, maybe we could just say "Hey, try with
/// `&mut`!".
- fn check_ref(&self,
+ pub fn check_ref(&self,
expr: &hir::Expr,
checked_ty: Ty<'tcx>,
expected: Ty<'tcx>)
-> Option<(Span, &'static str, String)> {
- let sp = expr.span;
+ let cm = self.sess().codemap();
+ // Use the callsite's span if this is a macro call. #41858
+ let sp = cm.call_span_if_macro(expr.span);
+ if !cm.span_to_filename(sp).is_real() {
+ return None;
+ }
+
match (&expected.sty, &checked_ty.sty) {
(&ty::TyRef(_, exp, _), &ty::TyRef(_, check, _)) => match (&exp.sty, &check.sty) {
(&ty::TyStr, &ty::TyArray(arr, _)) |
(&ty::TyStr, &ty::TySlice(arr)) if arr == self.tcx.types.u8 => {
if let hir::ExprLit(_) = expr.node {
- let sp = self.sess().codemap().call_span_if_macro(expr.span);
- if let Ok(src) = self.tcx.sess.codemap().span_to_snippet(sp) {
+ if let Ok(src) = cm.span_to_snippet(sp) {
return Some((sp,
"consider removing the leading `b`",
src[1..].to_string()));
(&ty::TyArray(arr, _), &ty::TyStr) |
(&ty::TySlice(arr), &ty::TyStr) if arr == self.tcx.types.u8 => {
if let hir::ExprLit(_) = expr.node {
- let sp = self.sess().codemap().call_span_if_macro(expr.span);
- if let Ok(src) = self.tcx.sess.codemap().span_to_snippet(sp) {
+ if let Ok(src) = cm.span_to_snippet(sp) {
return Some((sp,
"consider adding a leading `b`",
format!("b{}", src)));
checked_ty),
};
if self.can_coerce(ref_ty, expected) {
- // Use the callsite's span if this is a macro call. #41858
- let sp = self.sess().codemap().call_span_if_macro(expr.span);
- if let Ok(src) = self.tcx.sess.codemap().span_to_snippet(sp) {
+ if let Ok(src) = cm.span_to_snippet(sp) {
let sugg_expr = match expr.node { // parenthesize if needed (Issue #46756)
hir::ExprCast(_, _) | hir::ExprBinary(_, _, _) => format!("({})", src),
_ => src,
// a macro; if so, it's hard to extract the text and make a good
// suggestion, so don't bother.)
if self.infcx.can_sub(self.param_env, checked, &expected).is_ok() &&
- expr.span.ctxt().outer().expn_info().is_none() {
+ sp.ctxt().outer().expn_info().is_none() {
match expr.node {
// Maybe remove `&`?
hir::ExprAddrOf(_, ref expr) => {
- if let Ok(code) = self.tcx.sess.codemap().span_to_snippet(expr.span) {
+ if !cm.span_to_filename(expr.span).is_real() {
+ return None;
+ }
+ if let Ok(code) = cm.span_to_snippet(expr.span) {
return Some((sp, "consider removing the borrow", code));
}
}
_ => {
if !self.infcx.type_moves_by_default(self.param_env,
checked,
- expr.span) {
- let sp = self.sess().codemap().call_span_if_macro(expr.span);
- if let Ok(code) = self.tcx.sess.codemap().span_to_snippet(sp) {
+ sp) {
+ let sp = cm.call_span_if_macro(sp);
+ if let Ok(code) = cm.span_to_snippet(sp) {
return Some((sp,
"consider dereferencing the borrow",
format!("*{}", code)));
None
}
- fn check_for_cast(&self,
+ pub fn check_for_cast(&self,
err: &mut DiagnosticBuilder<'tcx>,
expr: &hir::Expr,
checked_ty: Ty<'tcx>,
use rustc::middle::region;
use rustc::ty::subst::{Subst, Substs, UnpackedKind};
use rustc::ty::{self, Ty, TyCtxt};
-use rustc::traits::{ObligationCause, TraitEngine};
+use rustc::traits::{ObligationCause, TraitEngine, TraitEngineExt};
use util::common::ErrorReported;
use syntax::ast;
allow_private: bool)
-> bool {
let mode = probe::Mode::MethodCall;
- match self.probe_for_name(method_name.span, mode, method_name.name,
+ match self.probe_for_name(method_name.span, mode, method_name,
IsSuggestion(false), self_ty, call_expr_id,
ProbeScope::TraitsInScope) {
Ok(..) => true,
self_expr: &'gcx hir::Expr)
-> Result<MethodCallee<'tcx>, MethodError<'tcx>> {
debug!("lookup(method_name={}, self_ty={:?}, call_expr={:?}, self_expr={:?})",
- segment.name,
+ segment.ident,
self_ty,
call_expr,
self_expr);
let pick = self.lookup_probe(
span,
- segment.name,
+ segment.ident,
self_ty,
call_expr,
ProbeScope::TraitsInScope
// We probe again, taking all traits into account (not only those in scope).
let candidates =
match self.lookup_probe(span,
- segment.name,
+ segment.ident,
self_ty,
call_expr,
ProbeScope::AllTraits) {
fn lookup_probe(&self,
span: Span,
- method_name: ast::Name,
+ method_name: ast::Ident,
self_ty: Ty<'tcx>,
call_expr: &'gcx hir::Expr,
scope: ProbeScope)
/// of this method is basically the same as confirmation.
pub fn lookup_method_in_trait(&self,
span: Span,
- m_name: ast::Name,
+ m_name: ast::Ident,
trait_def_id: DefId,
self_ty: Ty<'tcx>,
opt_input_types: Option<&[Ty<'tcx>]>)
// Trait must have a method named `m_name` and it should not have
// type parameters or early-bound regions.
let tcx = self.tcx;
- let method_item = self.associated_item(trait_def_id, m_name, Namespace::Value).unwrap();
+ let method_item =
+ self.associated_item(trait_def_id, m_name, Namespace::Value).unwrap();
let def_id = method_item.def_id;
let generics = tcx.generics_of(def_id);
assert_eq!(generics.params.len(), 0);
pub fn resolve_ufcs(&self,
span: Span,
- method_name: ast::Name,
+ method_name: ast::Ident,
self_ty: Ty<'tcx>,
expr_id: ast::NodeId)
-> Result<Def, MethodError<'tcx>> {
/// Find item with name `item_name` defined in impl/trait `def_id`
/// and return it, or `None`, if no such item was defined there.
- pub fn associated_item(&self, def_id: DefId, item_name: ast::Name, ns: Namespace)
+ pub fn associated_item(&self, def_id: DefId, item_name: ast::Ident, ns: Namespace)
-> Option<ty::AssociatedItem> {
- self.tcx.associated_items(def_id)
- .find(|item| Namespace::from(item.kind) == ns &&
- self.tcx.hygienic_eq(item_name, item.name, def_id))
+ self.tcx.associated_items(def_id).find(|item| {
+ Namespace::from(item.kind) == ns &&
+ self.tcx.hygienic_eq(item_name, item.ident, def_id)
+ })
}
}
fcx: &'a FnCtxt<'a, 'gcx, 'tcx>,
span: Span,
mode: Mode,
- method_name: Option<ast::Name>,
+ method_name: Option<ast::Ident>,
return_type: Option<Ty<'tcx>>,
steps: Rc<Vec<CandidateStep<'tcx>>>,
inherent_candidates: Vec<Candidate<'tcx>>,
pub fn probe_for_name(&self,
span: Span,
mode: Mode,
- item_name: ast::Name,
+ item_name: ast::Ident,
is_suggestion: IsSuggestion,
self_ty: Ty<'tcx>,
scope_expr_id: ast::NodeId,
fn probe_op<OP,R>(&'a self,
span: Span,
mode: Mode,
- method_name: Option<ast::Name>,
+ method_name: Option<ast::Ident>,
return_type: Option<Ty<'tcx>>,
is_suggestion: IsSuggestion,
self_ty: Ty<'tcx>,
fn new(fcx: &'a FnCtxt<'a, 'gcx, 'tcx>,
span: Span,
mode: Mode,
- method_name: Option<ast::Name>,
+ method_name: Option<ast::Ident>,
return_type: Option<Ty<'tcx>>,
steps: Rc<Vec<CandidateStep<'tcx>>>,
is_suggestion: IsSuggestion)
{
let is_accessible = if let Some(name) = self.method_name {
let item = candidate.item;
- let def_scope =
- self.tcx.adjust_ident(name.to_ident(), item.container.id(), self.body_id).1;
+ let def_scope = self.tcx.adjust_ident(name, item.container.id(), self.body_id).1;
item.vis.is_accessible_from(def_scope, self.tcx)
} else {
true
Ok(())
}
- fn candidate_method_names(&self) -> Vec<ast::Name> {
+ fn candidate_method_names(&self) -> Vec<ast::Ident> {
let mut set = FxHashSet();
let mut names: Vec<_> = self.inherent_candidates
.iter()
true
}
})
- .map(|candidate| candidate.item.name)
+ .map(|candidate| candidate.item.ident)
.filter(|&name| set.insert(name))
.collect();
Ok(None)
} else {
let best_name = {
- let names = applicable_close_candidates.iter().map(|cand| &cand.name);
+ let names = applicable_close_candidates.iter().map(|cand| &cand.ident.name);
find_best_match_for_name(names,
&self.method_name.unwrap().as_str(),
None)
}.unwrap();
Ok(applicable_close_candidates
.into_iter()
- .find(|method| method.name == best_name))
+ .find(|method| method.ident.name == best_name))
}
})
}
let max_dist = max(name.as_str().len(), 3) / 3;
self.tcx.associated_items(def_id)
.filter(|x| {
- let dist = lev_distance(&*name.as_str(), &x.name.as_str());
+ let dist = lev_distance(&*name.as_str(), &x.ident.as_str());
Namespace::from(x.kind) == Namespace::Value && dist > 0
&& dist <= max_dist
})
pub fn report_method_error(&self,
span: Span,
rcvr_ty: Ty<'tcx>,
- item_name: ast::Name,
+ item_name: ast::Ident,
rcvr_expr: Option<&hir::Expr>,
error: MethodError<'tcx>,
args: Option<&'gcx [hir::Expr]>) {
match ty.sty {
ty::TyAdt(def, substs) if !def.is_enum() => {
let variant = &def.non_enum_variant();
- if let Some(index) =
- self.tcx.find_field_index(item_name.to_ident(), variant) {
+ if let Some(index) = self.tcx.find_field_index(item_name, variant) {
let field = &variant.fields[index];
let snippet = tcx.sess.codemap().span_to_snippet(expr.span);
let expr_string = match snippet {
report_function!(expr.span, expr_string);
} else if let hir::ExprPath(hir::QPath::Resolved(_, ref path)) = expr.node {
if let Some(segment) = path.segments.last() {
- report_function!(expr.span, segment.name);
+ report_function!(expr.span, segment.ident);
}
}
}
}
if let Some(lev_candidate) = lev_candidate {
- err.help(&format!("did you mean `{}`?", lev_candidate.name));
+ err.help(&format!("did you mean `{}`?", lev_candidate.ident));
}
err.emit();
}
err: &mut DiagnosticBuilder,
span: Span,
rcvr_ty: Ty<'tcx>,
- item_name: ast::Name,
+ item_name: ast::Ident,
rcvr_expr: Option<&hir::Expr>,
valid_out_of_scope_traits: Vec<DefId>) {
if self.suggest_valid_traits(err, valid_out_of_scope_traits) {
use std::collections::hash_map::Entry;
use std::cmp;
use std::fmt::Display;
+use std::iter;
use std::mem::replace;
use std::ops::{self, Deref};
use rustc_target::spec::abi::Abi;
// Add pattern bindings.
fn visit_pat(&mut self, p: &'gcx hir::Pat) {
- if let PatKind::Binding(_, _, ref path1, _) = p.node {
+ if let PatKind::Binding(_, _, ident, _) = p.node {
let var_ty = self.assign(p.span, p.id, None);
self.fcx.require_type_is_sized(var_ty, p.span,
traits::VariableType(p.id));
debug!("Pattern binding {} is assigned to {} with type {:?}",
- path1.node,
+ ident,
self.fcx.ty_to_string(
self.fcx.locals.borrow().get(&p.id).unwrap().clone()),
var_ty);
// The check for a non-trivial pattern is a hack to avoid duplicate warnings
// for simple cases like `fn foo(x: Trait)`,
// where we would error once on the parameter as a whole, and once on the binding `x`.
- if arg.pat.simple_name().is_none() {
+ if arg.pat.simple_ident().is_none() {
fcx.require_type_is_sized(arg_ty, decl.output.span(), traits::MiscObligation);
}
tcx.sess, impl_item.span, E0520,
"`{}` specializes an item from a parent `impl`, but \
that item is not marked `default`",
- impl_item.name);
+ impl_item.ident);
err.span_label(impl_item.span, format!("cannot specialize default item `{}`",
- impl_item.name));
+ impl_item.ident));
match tcx.span_of_impl(parent_impl) {
Ok(span) => {
err.span_label(span, "parent `impl` is here");
err.note(&format!("to specialize, `{}` in the parent `impl` must be marked `default`",
- impl_item.name));
+ impl_item.ident));
}
Err(cname) => {
err.note(&format!("parent implementation is in crate `{}`", cname));
hir::ImplItemKind::Type(_) => ty::AssociatedKind::Type
};
- let parent = ancestors.defs(tcx, trait_item.name, kind, trait_def.def_id).skip(1).next()
+ let parent = ancestors.defs(tcx, trait_item.ident, kind, trait_def.def_id).skip(1).next()
.map(|node_item| node_item.map(|parent| parent.defaultness));
if let Some(parent) = parent {
let ty_impl_item = tcx.associated_item(tcx.hir.local_def_id(impl_item.id));
let ty_trait_item = tcx.associated_items(impl_trait_ref.def_id)
.find(|ac| Namespace::from(&impl_item.node) == Namespace::from(ac.kind) &&
- tcx.hygienic_eq(ty_impl_item.name, ac.name, impl_trait_ref.def_id))
+ tcx.hygienic_eq(ty_impl_item.ident, ac.ident, impl_trait_ref.def_id))
.or_else(|| {
// Not compatible, but needed for the error message
tcx.associated_items(impl_trait_ref.def_id)
- .find(|ac| tcx.hygienic_eq(ty_impl_item.name, ac.name, impl_trait_ref.def_id))
+ .find(|ac| tcx.hygienic_eq(ty_impl_item.ident, ac.ident, impl_trait_ref.def_id))
});
// Check that impl definition matches trait definition
let mut err = struct_span_err!(tcx.sess, impl_item.span, E0323,
"item `{}` is an associated const, \
which doesn't match its trait `{}`",
- ty_impl_item.name,
+ ty_impl_item.ident,
impl_trait_ref);
err.span_label(impl_item.span, "does not match trait");
// We can only get the spans from local trait definition
let mut err = struct_span_err!(tcx.sess, impl_item.span, E0324,
"item `{}` is an associated method, \
which doesn't match its trait `{}`",
- ty_impl_item.name,
+ ty_impl_item.ident,
impl_trait_ref);
err.span_label(impl_item.span, "does not match trait");
if let Some(trait_span) = tcx.hir.span_if_local(ty_trait_item.def_id) {
let mut err = struct_span_err!(tcx.sess, impl_item.span, E0325,
"item `{}` is an associated type, \
which doesn't match its trait `{}`",
- ty_impl_item.name,
+ ty_impl_item.ident,
impl_trait_ref);
err.span_label(impl_item.span, "does not match trait");
if let Some(trait_span) = tcx.hir.span_if_local(ty_trait_item.def_id) {
let associated_type_overridden = overridden_associated_type.is_some();
for trait_item in tcx.associated_items(impl_trait_ref.def_id) {
let is_implemented = trait_def.ancestors(tcx, impl_id)
- .defs(tcx, trait_item.name, trait_item.kind, impl_trait_ref.def_id)
+ .defs(tcx, trait_item.ident, trait_item.kind, impl_trait_ref.def_id)
.next()
.map(|node_item| !node_item.node.is_from_trait())
.unwrap_or(false);
if !trait_item.defaultness.has_value() {
missing_items.push(trait_item);
} else if associated_type_overridden {
- invalidated_items.push(trait_item.name);
+ invalidated_items.push(trait_item.ident);
}
}
}
let mut err = struct_span_err!(tcx.sess, impl_span, E0046,
"not all trait items implemented, missing: `{}`",
missing_items.iter()
- .map(|trait_item| trait_item.name.to_string())
+ .map(|trait_item| trait_item.ident.to_string())
.collect::<Vec<_>>().join("`, `"));
err.span_label(impl_span, format!("missing `{}` in implementation",
missing_items.iter()
- .map(|trait_item| trait_item.name.to_string())
+ .map(|trait_item| trait_item.ident.to_string())
.collect::<Vec<_>>().join("`, `")));
for trait_item in missing_items {
if let Some(span) = tcx.hir.span_if_local(trait_item.def_id) {
- err.span_label(span, format!("`{}` from trait", trait_item.name));
+ err.span_label(span, format!("`{}` from trait", trait_item.ident));
} else {
- err.note_trait_signature(trait_item.name.to_string(),
+ err.note_trait_signature(trait_item.ident.to_string(),
trait_item.signature(&tcx));
}
}
span_err!(tcx.sess, invalidator.span, E0399,
"the following trait items need to be reimplemented \
as `{}` was overridden: `{}`",
- invalidator.name,
+ invalidator.ident,
invalidated_items.iter()
.map(|name| name.to_string())
.collect::<Vec<_>>().join("`, `"))
None
}
- fn resolve_place_op(&self, op: PlaceOp, is_mut: bool) -> (Option<DefId>, Symbol) {
+ fn resolve_place_op(&self, op: PlaceOp, is_mut: bool) -> (Option<DefId>, ast::Ident) {
let (tr, name) = match (op, is_mut) {
(PlaceOp::Deref, false) =>
(self.tcx.lang_items().deref_trait(), "deref"),
(PlaceOp::Index, true) =>
(self.tcx.lang_items().index_mut_trait(), "index_mut"),
};
- (tr, Symbol::intern(name))
+ (tr, ast::Ident::from_str(name))
}
fn try_overloaded_place_op(&self,
Ok(method)
}
Err(error) => {
- if segment.name != keywords::Invalid.name() {
+ if segment.ident.name != keywords::Invalid.name() {
self.report_method_error(span,
rcvr_t,
- segment.name,
+ segment.ident,
Some(rcvr),
error,
Some(args));
// ... except when we try to 'break rust;'.
// ICE this expression in particular (see #43162).
if let hir::ExprPath(hir::QPath::Resolved(_, ref path)) = e.node {
- if path.segments.len() == 1 && path.segments[0].name == "rust" {
+ if path.segments.len() == 1 && path.segments[0].ident.name == "rust" {
fatally_break_rust(self.tcx.sess);
}
}
// errors with default match binding modes. See #44614.
return (*cached_def, Some(ty), slice::from_ref(&**item_segment))
}
- let item_name = item_segment.name;
+ let item_name = item_segment.ident;
let def = match self.resolve_ufcs(span, item_name, ty, node_id) {
Ok(def) => def,
Err(error) => {
method::MethodError::PrivateMatch(def, _) => def,
_ => Def::Err,
};
- if item_name != keywords::Invalid.name() {
+ if item_name.name != keywords::Invalid.name() {
self.report_method_error(span, ty, item_name, None, error, None);
}
def
cause_span: Span,
blk_id: ast::NodeId) {
self.suggest_missing_semicolon(err, expression, expected, cause_span);
-
if let Some((fn_decl, can_suggest)) = self.get_fn_decl(blk_id) {
self.suggest_missing_return_type(err, &fn_decl, expected, found, can_suggest);
}
+ self.suggest_ref_or_into(err, expression, expected, found);
+ }
+
+ pub fn suggest_ref_or_into(
+ &self,
+ err: &mut DiagnosticBuilder<'tcx>,
+ expr: &hir::Expr,
+ expected: Ty<'tcx>,
+ found: Ty<'tcx>,
+ ) {
+ if let Some((sp, msg, suggestion)) = self.check_ref(expr, found, expected) {
+ err.span_suggestion(sp, msg, suggestion);
+ } else if !self.check_for_cast(err, expr, found, expected) {
+ let methods = self.get_conversion_methods(expr.span, expected, found);
+ if let Ok(expr_text) = self.sess().codemap().span_to_snippet(expr.span) {
+ let suggestions = iter::repeat(expr_text).zip(methods.iter())
+ .map(|(receiver, method)| format!("{}.{}()", receiver, method.ident))
+ .collect::<Vec<_>>();
+ if !suggestions.is_empty() {
+ err.span_suggestions(expr.span, "try using a conversion method", suggestions);
+ }
+ }
+ }
}
/// A common error is to forget to add a semicolon at the end of a block:
use rustc::infer::type_variable::TypeVariableOrigin;
use errors;
use syntax_pos::Span;
-use syntax::symbol::Symbol;
+use syntax::ast::Ident;
use rustc::hir;
impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
trait_did);
let method = trait_did.and_then(|trait_did| {
- let opname = Symbol::intern(opname);
+ let opname = Ident::from_str(opname);
self.lookup_method_in_trait(span, opname, trait_did, lhs_ty, Some(other_tys))
});
}
let param = &hir_generics.params[index];
- report_bivariance(tcx, param.span, param.name.name());
+ report_bivariance(tcx, param.span, param.name.ident().name);
}
}
let name_and_namespace = |def_id| {
let item = self.tcx.associated_item(def_id);
- (item.name, Namespace::from(item.kind))
+ (item.ident, Namespace::from(item.kind))
};
let impl_items1 = self.tcx.associated_item_def_ids(impl1);
let early_lifetimes = early_bound_lifetimes_from_generics(tcx, ast_generics);
params.extend(early_lifetimes.enumerate().map(|(i, param)| {
ty::GenericParamDef {
- name: param.name.name().as_interned_str(),
+ name: param.name.ident().as_interned_str(),
index: own_start + i as u32,
def_id: tcx.hir.local_def_id(param.id),
pure_wrt_drop: param.pure_wrt_drop,
let mut i = 0;
params.extend(ast_generics.params.iter().filter_map(|param| match param.kind {
GenericParamKind::Type { ref default, synthetic, .. } => {
- if param.name.name() == keywords::SelfType.name() {
+ if param.name.ident().name == keywords::SelfType.name() {
span_bug!(param.span, "`Self` should not be the name of a regular parameter");
}
let ty_param = ty::GenericParamDef {
index: type_start + i as u32,
- name: param.name.name().as_interned_str(),
+ name: param.name.ident().as_interned_str(),
def_id: tcx.hir.local_def_id(param.id),
pure_wrt_drop: param.pure_wrt_drop,
kind: ty::GenericParamDefKind::Type {
let region = tcx.mk_region(ty::ReEarlyBound(ty::EarlyBoundRegion {
def_id: tcx.hir.local_def_id(param.id),
index,
- name: param.name.name().as_interned_str(),
+ name: param.name.ident().as_interned_str(),
}));
index += 1;
for param in &ast_generics.params {
match param.kind {
GenericParamKind::Type { .. } => {
- let name = param.name.name().as_interned_str();
+ let name = param.name.ident().as_interned_str();
let param_ty = ty::ParamTy::new(index, name).to_ty(tcx);
index += 1;
hir::ImplItemKind::Type(_) => &mut seen_type_items,
_ => &mut seen_value_items,
};
- match seen_items.entry(impl_item.name) {
+ match seen_items.entry(impl_item.ident.modern()) {
Occupied(entry) => {
let mut err = struct_span_err!(tcx.sess, impl_item.span, E0201,
"duplicate definitions with name `{}`:",
- impl_item.name);
+ impl_item.ident);
err.span_label(*entry.get(),
format!("previous definition of `{}` here",
- impl_item.name));
+ impl_item.ident));
err.span_label(impl_item.span, "duplicate definition");
err.emit();
}
use rustc::ty::subst::Substs;
use rustc::ty::{self, Ty, TyCtxt};
use rustc::ty::query::Providers;
-use rustc::traits::{ObligationCause, ObligationCauseCode, TraitEngine};
+use rustc::traits::{ObligationCause, ObligationCauseCode, TraitEngine, TraitEngineExt};
use session::{CompileIncomplete, config};
use util::common::time;
let mut segments = path.segments.into_vec();
let last = segments.pop().unwrap();
- let real_name = name.map(|name| Symbol::intern(&name));
+ let real_name = name.map(|name| Ident::from_str(&name));
segments.push(hir::PathSegment::new(
- real_name.unwrap_or(last.name),
+ real_name.unwrap_or(last.ident),
self.generics_to_path_params(generics.clone()),
false,
));
match param.kind {
ty::GenericParamDefKind::Lifetime => {
let name = if param.name == "" {
- hir::ParamName::Plain(keywords::StaticLifetime.name())
+ hir::ParamName::Plain(keywords::StaticLifetime.ident())
} else {
- hir::ParamName::Plain(param.name.as_symbol())
+ hir::ParamName::Plain(ast::Ident::from_interned_str(param.name))
};
args.push(hir::GenericArg::Lifetime(hir::Lifetime {
}));
}
ty::GenericParamDefKind::Type {..} => {
- args.push(hir::GenericArg::Type(P(self.ty_param_to_ty(param.clone()))));
+ args.push(hir::GenericArg::Type(self.ty_param_to_ty(param.clone())));
}
}
}
span: DUMMY_SP,
def: Def::TyParam(param.def_id),
segments: HirVec::from_vec(vec![
- hir::PathSegment::from_name(param.name.as_symbol())
+ hir::PathSegment::from_ident(Ident::from_interned_str(param.name))
]),
}),
)),
let provided = trait_.def_id().map(|did| {
tcx.provided_trait_methods(did)
.into_iter()
- .map(|meth| meth.name.to_string())
+ .map(|meth| meth.ident.to_string())
.collect()
}).unwrap_or(FxHashSet());
use syntax::symbol::{Symbol, InternedString};
use syntax_pos::{self, DUMMY_SP, Pos, FileName};
-use rustc::middle::const_val::ConstVal;
+use rustc::mir::interpret::ConstValue;
use rustc::middle::privacy::AccessLevels;
use rustc::middle::resolve_lifetime as rl;
use rustc::ty::fold::TypeFolder;
Def::Struct(did) | Def::Union(did) | Def::Enum(did) | Def::TyAlias(did) => {
let item = cx.tcx.inherent_impls(did).iter()
.flat_map(|imp| cx.tcx.associated_items(*imp))
- .find(|item| item.name == item_name);
+ .find(|item| item.ident.name == item_name);
if let Some(item) = item {
let out = match item.kind {
ty::AssociatedKind::Method if is_val => "method",
Def::Trait(did) => {
let item = cx.tcx.associated_item_def_ids(did).iter()
.map(|item| cx.tcx.associated_item(*item))
- .find(|item| item.name == item_name);
+ .find(|item| item.ident.name == item_name);
if let Some(item) = item {
let kind = match item.kind {
ty::AssociatedKind::Const if is_val => "associatedconstant",
_ => {}
}
}
- Lifetime(self.name.name().to_string())
+ Lifetime(self.name.ident().to_string())
}
}
hir::GenericBound::Outlives(lt) => lt,
_ => panic!(),
});
- let name = bounds.next().unwrap().name.name();
- let mut s = format!("{}: {}", self.name.name(), name);
+ let name = bounds.next().unwrap().name.ident();
+ let mut s = format!("{}: {}", self.name.ident(), name);
for bound in bounds {
- s.push_str(&format!(" + {}", bound.name.name()));
+ s.push_str(&format!(" + {}", bound.name.ident()));
}
Lifetime(s)
} else {
- Lifetime(self.name.name().to_string())
+ Lifetime(self.name.ident().to_string())
}
}
_ => panic!(),
GenericBound::Outlives(_) => panic!("cleaning a trait got a lifetime"),
};
Type::QPath {
- name: cx.tcx.associated_item(self.item_def_id).name.clean(cx),
+ name: cx.tcx.associated_item(self.item_def_id).ident.name.clean(cx),
self_type: box self.self_ty().clean(cx),
trait_: box trait_
}
hir::GenericBound::Outlives(lt) => lt,
_ => panic!(),
});
- let name = bounds.next().unwrap().name.name();
- let mut s = format!("{}: {}", self.name.name(), name);
+ let name = bounds.next().unwrap().name.ident();
+ let mut s = format!("{}: {}", self.name.ident(), name);
for bound in bounds {
- s.push_str(&format!(" + {}", bound.name.name()));
+ s.push_str(&format!(" + {}", bound.name.ident()));
}
s
} else {
- self.name.name().to_string()
+ self.name.ident().to_string()
};
(name, GenericParamDefKind::Lifetime)
}
hir::GenericParamKind::Type { ref default, synthetic, .. } => {
- (self.name.name().clean(cx), GenericParamDefKind::Type {
+ (self.name.ident().name.clean(cx), GenericParamDefKind::Type {
did: cx.tcx.hir.local_def_id(self.id),
bounds: self.bounds.clean(cx),
default: default.clean(cx),
pub values: Vec<Argument>,
}
-impl<'a> Clean<Arguments> for (&'a [P<hir::Ty>], &'a [Spanned<ast::Name>]) {
+impl<'a> Clean<Arguments> for (&'a [hir::Ty], &'a [ast::Ident]) {
fn clean(&self, cx: &DocContext) -> Arguments {
Arguments {
values: self.0.iter().enumerate().map(|(i, ty)| {
- let mut name = self.1.get(i).map(|n| n.node.to_string())
+ let mut name = self.1.get(i).map(|ident| ident.to_string())
.unwrap_or(String::new());
if name.is_empty() {
name = "_".to_string();
}
}
-impl<'a> Clean<Arguments> for (&'a [P<hir::Ty>], hir::BodyId) {
+impl<'a> Clean<Arguments> for (&'a [hir::Ty], hir::BodyId) {
fn clean(&self, cx: &DocContext) -> Arguments {
let body = cx.tcx.hir.body(self.1);
}
impl<'a, A: Copy> Clean<FnDecl> for (&'a hir::FnDecl, A)
- where (&'a [P<hir::Ty>], A): Clean<Arguments>
+ where (&'a [hir::Ty], A): Clean<Arguments>
{
fn clean(&self, cx: &DocContext) -> FnDecl {
FnDecl {
}
};
Item {
- name: Some(self.name.clean(cx)),
+ name: Some(self.ident.name.clean(cx)),
attrs: self.attrs.clean(cx),
source: self.span.clean(cx),
def_id: cx.tcx.hir.local_def_id(self.id),
}, true),
};
Item {
- name: Some(self.name.clean(cx)),
+ name: Some(self.ident.name.clean(cx)),
source: self.span.clean(cx),
attrs: self.attrs.clean(cx),
def_id: cx.tcx.hir.local_def_id(self.id),
}
}
ty::AssociatedKind::Type => {
- let my_name = self.name.clean(cx);
+ let my_name = self.ident.name.clean(cx);
if let ty::TraitContainer(did) = self.container {
// When loading a cross-crate associated type, the bounds for this type
};
Item {
- name: Some(self.name.clean(cx)),
+ name: Some(self.ident.name.clean(cx)),
visibility,
stability: get_stability(cx, self.def_id),
deprecation: get_deprecation(cx, self.def_id),
}
});
if let Some(ty) = type_.cloned() {
- ty_substs.insert(ty_param_def, ty.into_inner().clean(cx));
+ ty_substs.insert(ty_param_def, ty.clean(cx));
} else if let Some(default) = default.clone() {
ty_substs.insert(ty_param_def,
default.into_inner().clean(cx));
segments: segments.into(),
};
Type::QPath {
- name: p.segments.last().unwrap().name.clean(cx),
+ name: p.segments.last().unwrap().ident.name.clean(cx),
self_type: box qself.clean(cx),
trait_: box resolve_type(cx, trait_path.clean(cx), self.id)
}
segments: vec![].into(),
};
Type::QPath {
- name: segment.name.clean(cx),
+ name: segment.ident.name.clean(cx),
self_type: box qself.clean(cx),
trait_: box resolve_type(cx, trait_path.clean(cx), self.id)
}
ty::TySlice(ty) => Slice(box ty.clean(cx)),
ty::TyArray(ty, n) => {
let mut n = cx.tcx.lift(&n).unwrap();
- if let ConstVal::Unevaluated(def_id, substs) = n.val {
+ if let ConstValue::Unevaluated(def_id, substs) = n.val {
let param_env = cx.tcx.param_env(def_id);
let cid = GlobalId {
instance: ty::Instance::new(def_id, substs),
let mut bindings = vec![];
for pb in obj.projection_bounds() {
bindings.push(TypeBinding {
- name: cx.tcx.associated_item(pb.item_def_id()).name.clean(cx),
+ name: cx.tcx.associated_item(pb.item_def_id()).ident.name.clean(cx),
ty: pb.skip_binder().ty.clean(cx)
});
}
if proj.projection_ty.trait_ref(cx.tcx) == *trait_ref.skip_binder() {
Some(TypeBinding {
name: cx.tcx.associated_item(proj.projection_ty.item_def_id)
- .name.clean(cx),
+ .ident.name.clean(cx),
ty: proj.ty.clean(cx),
})
} else {
impl Clean<PathSegment> for hir::PathSegment {
fn clean(&self, cx: &DocContext) -> PathSegment {
PathSegment {
- name: self.name.clean(cx),
+ name: self.ident.name.clean(cx),
args: self.with_generic_args(|generic_args| generic_args.clean(cx))
}
}
fn qpath_to_string(p: &hir::QPath) -> String {
let segments = match *p {
hir::QPath::Resolved(_, ref path) => &path.segments,
- hir::QPath::TypeRelative(_, ref segment) => return segment.name.to_string(),
+ hir::QPath::TypeRelative(_, ref segment) => return segment.ident.to_string(),
};
let mut s = String::new();
if i > 0 {
s.push_str("::");
}
- if seg.name != keywords::CrateRoot.name() {
- s.push_str(&*seg.name.as_str());
+ if seg.ident.name != keywords::CrateRoot.name() {
+ s.push_str(&*seg.ident.as_str());
}
}
s
let provided = trait_.def_id().map(|did| {
cx.tcx.provided_trait_methods(did)
.into_iter()
- .map(|meth| meth.name.to_string())
+ .map(|meth| meth.ident.to_string())
.collect()
}).unwrap_or(FxHashSet());
match p.node {
PatKind::Wild => "_".to_string(),
- PatKind::Binding(_, _, ref p, _) => p.node.to_string(),
+ PatKind::Binding(_, _, ident, _) => ident.to_string(),
PatKind::TupleStruct(ref p, ..) | PatKind::Path(ref p) => qpath_to_string(p),
PatKind::Struct(ref name, ref fields, etc) => {
format!("{} {{ {}{} }}", qpath_to_string(name),
fn print_const(cx: &DocContext, n: &ty::Const) -> String {
match n.val {
- ConstVal::Unevaluated(def_id, _) => {
+ ConstValue::Unevaluated(def_id, _) => {
if let Some(node_id) = cx.tcx.hir.as_local_node_id(def_id) {
print_const_expr(cx, cx.tcx.hir.body_owned_by(node_id))
} else {
inline::print_inlined_const(cx, def_id)
}
},
- ConstVal::Value(..) => {
+ _ => {
let mut s = String::new();
::rustc::mir::fmt_const_val(&mut s, n).unwrap();
// array lengths are obviously usize
impl Clean<TypeBinding> for hir::TypeBinding {
fn clean(&self, cx: &DocContext) -> TypeBinding {
TypeBinding {
- name: self.name.clean(cx),
+ name: self.ident.name.clean(cx),
ty: self.ty.clean(cx)
}
}
span: DUMMY_SP,
def: def_ctor(def_id),
segments: hir::HirVec::from_vec(apb.names.iter().map(|s| hir::PathSegment {
- name: ast::Name::intern(&s),
+ ident: ast::Ident::from_str(&s),
args: None,
infer_types: false,
}).collect())
}
fn visit_trait_item(&mut self, item: &'hir hir::TraitItem) {
- self.visit_testable(item.name.to_string(), &item.attrs, |this| {
+ self.visit_testable(item.ident.to_string(), &item.attrs, |this| {
intravisit::walk_trait_item(this, item);
});
}
fn visit_impl_item(&mut self, item: &'hir hir::ImplItem) {
- self.visit_testable(item.name.to_string(), &item.attrs, |this| {
+ self.visit_testable(item.ident.to_string(), &item.attrs, |this| {
intravisit::walk_impl_item(this, item);
});
}
// except according to those terms.
#[inline]
-pub fn write_to_vec(vec: &mut Vec<u8>, position: usize, byte: u8) {
- if position == vec.len() {
- vec.push(byte);
- } else {
- vec[position] = byte;
- }
+pub fn write_to_vec(vec: &mut Vec<u8>, byte: u8) {
+ vec.push(byte);
}
#[cfg(target_pointer_width = "32")]
macro_rules! impl_write_unsigned_leb128 {
($fn_name:ident, $int_ty:ident) => (
#[inline]
- pub fn $fn_name(out: &mut Vec<u8>, start_position: usize, mut value: $int_ty) -> usize {
- let mut position = start_position;
+ pub fn $fn_name(out: &mut Vec<u8>, mut value: $int_ty) {
for _ in 0 .. leb128_size!($int_ty) {
let mut byte = (value & 0x7F) as u8;
value >>= 7;
byte |= 0x80;
}
- write_to_vec(out, position, byte);
- position += 1;
+ write_to_vec(out, byte);
if value == 0 {
break;
}
}
-
- position - start_position
}
)
}
/// The callback `write` is called once for each position
/// that is to be written to with the byte to be encoded
/// at that position.
-pub fn write_signed_leb128_to<W>(mut value: i128, mut write: W) -> usize
- where W: FnMut(usize, u8)
+pub fn write_signed_leb128_to<W>(mut value: i128, mut write: W)
+ where W: FnMut(u8)
{
- let mut position = 0;
-
loop {
let mut byte = (value as u8) & 0x7f;
value >>= 7;
byte |= 0x80; // Mark this byte to show that more bytes will follow.
}
- write(position, byte);
- position += 1;
+ write(byte);
if !more {
break;
}
}
- position
}
-pub fn write_signed_leb128(out: &mut Vec<u8>, start_position: usize, value: i128) -> usize {
- write_signed_leb128_to(value, |i, v| write_to_vec(out, start_position+i, v))
+pub fn write_signed_leb128(out: &mut Vec<u8>, value: i128) {
+ write_signed_leb128_to(value, |v| write_to_vec(out, v))
}
#[inline]
let mut stream = Vec::new();
for x in 0..62 {
- let pos = stream.len();
- let bytes_written = $write_fn_name(&mut stream, pos, (3u64 << x) as $int_ty);
- assert_eq!(stream.len(), pos + bytes_written);
+ $write_fn_name(&mut stream, (3u64 << x) as $int_ty);
}
let mut position = 0;
let values: Vec<_> = (-500..500).map(|i| i * 0x12345789ABCDEF).collect();
let mut stream = Vec::new();
for &x in &values {
- let pos = stream.len();
- let bytes_written = write_signed_leb128(&mut stream, pos, x);
- assert_eq!(stream.len(), pos + bytes_written);
+ write_signed_leb128(&mut stream, x);
}
let mut pos = 0;
for &x in &values {
#![feature(box_syntax)]
#![feature(core_intrinsics)]
#![feature(specialization)]
+#![feature(never_type)]
#![cfg_attr(test, feature(test))]
pub use self::serialize::{Decoder, Encoder, Decodable, Encodable};
use leb128::{self, read_signed_leb128, write_signed_leb128};
use std::borrow::Cow;
-use std::io::{self, Write};
use serialize;
// -----------------------------------------------------------------------------
// Encoder
// -----------------------------------------------------------------------------
-pub type EncodeResult = io::Result<()>;
+pub type EncodeResult = Result<(), !>;
-pub struct Encoder<'a> {
- pub cursor: &'a mut io::Cursor<Vec<u8>>,
+pub struct Encoder {
+ pub data: Vec<u8>,
}
-impl<'a> Encoder<'a> {
- pub fn new(cursor: &'a mut io::Cursor<Vec<u8>>) -> Encoder<'a> {
- Encoder { cursor: cursor }
+impl Encoder {
+ pub fn new(data: Vec<u8>) -> Encoder {
+ Encoder { data }
}
- pub fn emit_raw_bytes(&mut self, s: &[u8]) -> EncodeResult {
- self.cursor.write_all(s)
+ pub fn into_inner(self) -> Vec<u8> {
+ self.data
}
-}
+ pub fn emit_raw_bytes(&mut self, s: &[u8]) {
+ self.data.extend_from_slice(s);
+ }
+}
macro_rules! write_uleb128 {
($enc:expr, $value:expr, $fun:ident) => {{
- let pos = $enc.cursor.position() as usize;
- let bytes_written = leb128::$fun($enc.cursor.get_mut(), pos, $value);
- $enc.cursor.set_position((pos + bytes_written) as u64);
+ leb128::$fun(&mut $enc.data, $value);
Ok(())
}}
}
macro_rules! write_sleb128 {
($enc:expr, $value:expr) => {{
- let pos = $enc.cursor.position() as usize;
- let bytes_written = write_signed_leb128($enc.cursor.get_mut(), pos, $value as i128);
- $enc.cursor.set_position((pos + bytes_written) as u64);
+ write_signed_leb128(&mut $enc.data, $value as i128);
Ok(())
}}
}
-impl<'a> serialize::Encoder for Encoder<'a> {
- type Error = io::Error;
+impl serialize::Encoder for Encoder {
+ type Error = !;
#[inline]
fn emit_nil(&mut self) -> EncodeResult {
#[inline]
fn emit_u8(&mut self, v: u8) -> EncodeResult {
- let pos = self.cursor.position() as usize;
- leb128::write_to_vec(self.cursor.get_mut(), pos, v);
- self.cursor.set_position((pos + 1) as u64);
+ self.data.push(v);
Ok(())
}
#[inline]
fn emit_str(&mut self, v: &str) -> EncodeResult {
self.emit_usize(v.len())?;
- let _ = self.cursor.write_all(v.as_bytes());
+ self.emit_raw_bytes(v.as_bytes());
Ok(())
}
}
-impl<'a> Encoder<'a> {
+impl Encoder {
#[inline]
pub fn position(&self) -> usize {
- self.cursor.position() as usize
+ self.data.len()
}
}
#[cfg(test)]
mod tests {
use serialize::{Encodable, Decodable};
- use std::io::Cursor;
use std::fmt::Debug;
use super::{Encoder, Decoder};
fn check_round_trip<T: Encodable + Decodable + PartialEq + Debug>(values: Vec<T>) {
- let mut cursor = Cursor::new(Vec::new());
+ let mut encoder = Encoder::new(Vec::new());
for value in &values {
- let mut encoder = Encoder::new(&mut cursor);
Encodable::encode(&value, &mut encoder).unwrap();
}
- let data = cursor.into_inner();
+ let data = encoder.into_inner();
let mut decoder = Decoder::new(&data[..], 0);
for value in values {
/// Calculates the Euclidean modulo (self mod rhs), which is never negative.
///
- /// In particular, the result `n` satisfies `0 <= n < rhs.abs()`.
+ /// In particular, the return value `r` satisfies `0.0 <= r < rhs.abs()` in
+ /// most cases. However, due to a floating point round-off error it can
+ /// result in `r == rhs.abs()`, violating the mathematical definition, if
+ /// `self` is much smaller than `rhs.abs()` in magnitude and `self < 0.0`.
+ /// This result is not an element of the function's codomain, but it is the
+ /// closest floating point number in the real numbers and thus fulfills the
+ /// property `self == self.div_euc(rhs) * rhs + self.mod_euc(rhs)`
+ /// approximatively.
///
/// # Examples
///
/// assert_eq!((-a).mod_euc(b), 1.0);
/// assert_eq!(a.mod_euc(-b), 3.0);
/// assert_eq!((-a).mod_euc(-b), 1.0);
+ /// // limitation due to round-off error
+ /// assert!((-std::f32::EPSILON).mod_euc(3.0) != 0.0);
/// ```
#[inline]
#[unstable(feature = "euclidean_division", issue = "49048")]
/// Calculates the Euclidean modulo (self mod rhs), which is never negative.
///
- /// In particular, the result `n` satisfies `0 <= n < rhs.abs()`.
+ /// In particular, the return value `r` satisfies `0.0 <= r < rhs.abs()` in
+ /// most cases. However, due to a floating point round-off error it can
+ /// result in `r == rhs.abs()`, violating the mathematical definition, if
+ /// `self` is much smaller than `rhs.abs()` in magnitude and `self < 0.0`.
+ /// This result is not an element of the function's codomain, but it is the
+ /// closest floating point number in the real numbers and thus fulfills the
+ /// property `self == self.div_euc(rhs) * rhs + self.mod_euc(rhs)`
+ /// approximatively.
///
/// # Examples
///
/// assert_eq!((-a).mod_euc(b), 1.0);
/// assert_eq!(a.mod_euc(-b), 3.0);
/// assert_eq!((-a).mod_euc(-b), 1.0);
+ /// // limitation due to round-off error
+ /// assert!((-std::f64::EPSILON).mod_euc(3.0) != 0.0);
/// ```
#[inline]
#[unstable(feature = "euclidean_division", issue = "49048")]
mod imp {
#[link(name = "zircon")]
extern {
- fn zx_cprng_draw_new(buffer: *mut u8, len: usize) -> i32;
- }
-
- fn getrandom(buf: &mut [u8]) -> Result<usize, i32> {
- unsafe {
- let status = zx_cprng_draw_new(buf.as_mut_ptr(), buf.len());
- if status == 0 {
- Ok(buf.len())
- } else {
- Err(status)
- }
- }
+ fn zx_cprng_draw(buffer: *mut u8, len: usize);
}
pub fn fill_bytes(v: &mut [u8]) {
- let mut buf = v;
- while !buf.is_empty() {
- let ret = getrandom(buf);
- match ret {
- Err(err) => {
- panic!("kernel zx_cprng_draw call failed! (returned {}, buf.len() {})",
- err, buf.len())
- }
- Ok(actual) => {
- let move_buf = buf;
- buf = &mut move_buf[(actual as usize)..];
- }
- }
- }
+ unsafe { zx_cprng_draw(v.as_mut_ptr(), v.len()) }
}
}
///
/// This will lazily initialize the value if this thread has not referenced
/// this key yet. If the key has been destroyed (which may happen if this is called
- /// in a destructor), this function will return a `ThreadLocalError`.
+ /// in a destructor), this function will return an [`AccessError`](struct.AccessError.html).
///
/// # Panics
///
}
}
- /// Creates a new filemap without setting its line information. If you don't
- /// intend to set the line information yourself, you should use new_filemap_and_lines.
+ /// Creates a new filemap.
/// This does not ensure that only one FileMap exists per file name.
pub fn new_filemap(&self, filename: FileName, src: String) -> Lrc<FileMap> {
let start_pos = self.next_start_pos();
filemap
}
- /// Creates a new filemap and sets its line information.
- /// This does not ensure that only one FileMap exists per file name.
- pub fn new_filemap_and_lines(&self, filename: &Path, src: &str) -> Lrc<FileMap> {
- let fm = self.new_filemap(filename.to_owned().into(), src.to_owned());
- let mut byte_pos: u32 = fm.start_pos.0;
- for line in src.lines() {
- // register the start of this line
- fm.next_line(BytePos(byte_pos));
-
- // update byte_pos to include this line and the \n at the end
- byte_pos += line.len() as u32 + 1;
- }
- fm
- }
-
-
/// Allocates a new FileMap representing a source file from an external
/// crate. The source code of such an "imported filemap" is not available,
/// but we still know enough to generate accurate debuginfo location
external_src: Lock::new(ExternalSource::AbsentOk),
start_pos,
end_pos,
- lines: Lock::new(file_local_lines),
- multibyte_chars: Lock::new(file_local_multibyte_chars),
- non_narrow_chars: Lock::new(file_local_non_narrow_chars),
+ lines: file_local_lines,
+ multibyte_chars: file_local_multibyte_chars,
+ non_narrow_chars: file_local_non_narrow_chars,
name_hash,
});
match self.lookup_line(pos) {
Ok(FileMapAndLine { fm: f, line: a }) => {
let line = a + 1; // Line numbers start at 1
- let linebpos = (*f.lines.borrow())[a];
+ let linebpos = f.lines[a];
let linechpos = self.bytepos_to_file_charpos(linebpos);
let col = chpos - linechpos;
let col_display = {
- let non_narrow_chars = f.non_narrow_chars.borrow();
- let start_width_idx = non_narrow_chars
+ let start_width_idx = f
+ .non_narrow_chars
.binary_search_by_key(&linebpos, |x| x.pos())
.unwrap_or_else(|x| x);
- let end_width_idx = non_narrow_chars
+ let end_width_idx = f
+ .non_narrow_chars
.binary_search_by_key(&pos, |x| x.pos())
.unwrap_or_else(|x| x);
let special_chars = end_width_idx - start_width_idx;
- let non_narrow: usize =
- non_narrow_chars[start_width_idx..end_width_idx]
+ let non_narrow: usize = f
+ .non_narrow_chars[start_width_idx..end_width_idx]
.into_iter()
.map(|x| x.width())
.sum();
}
Err(f) => {
let col_display = {
- let non_narrow_chars = f.non_narrow_chars.borrow();
- let end_width_idx = non_narrow_chars
+ let end_width_idx = f
+ .non_narrow_chars
.binary_search_by_key(&pos, |x| x.pos())
.unwrap_or_else(|x| x);
- let non_narrow: usize =
- non_narrow_chars[0..end_width_idx]
+ let non_narrow: usize = f
+ .non_narrow_chars[0..end_width_idx]
.into_iter()
.map(|x| x.width())
.sum();
// The number of extra bytes due to multibyte chars in the FileMap
let mut total_extra_bytes = 0;
- for mbc in map.multibyte_chars.borrow().iter() {
+ for mbc in map.multibyte_chars.iter() {
debug!("{}-byte char at {:?}", mbc.bytes, mbc.pos);
if mbc.pos < bpos {
// every character is at least one byte, so we only
// count the actual extra bytes.
- total_extra_bytes += mbc.bytes - 1;
+ total_extra_bytes += mbc.bytes as u32 - 1;
// We should never see a byte position in the middle of a
// character
- assert!(bpos.to_usize() >= mbc.pos.to_usize() + mbc.bytes);
+ assert!(bpos.to_u32() >= mbc.pos.to_u32() + mbc.bytes as u32);
} else {
break;
}
}
- assert!(map.start_pos.to_usize() + total_extra_bytes <= bpos.to_usize());
- CharPos(bpos.to_usize() - map.start_pos.to_usize() - total_extra_bytes)
+ assert!(map.start_pos.to_u32() + total_extra_bytes <= bpos.to_u32());
+ CharPos(bpos.to_usize() - map.start_pos.to_usize() - total_extra_bytes as usize)
}
// Return the index of the filemap (in self.files) which contains pos.
#[cfg(test)]
mod tests {
use super::*;
- use std::borrow::Cow;
use rustc_data_structures::sync::Lrc;
- #[test]
- fn t1 () {
- let cm = CodeMap::new(FilePathMapping::empty());
- let fm = cm.new_filemap(PathBuf::from("blork.rs").into(),
- "first line.\nsecond line".to_string());
- fm.next_line(BytePos(0));
- // Test we can get lines with partial line info.
- assert_eq!(fm.get_line(0), Some(Cow::from("first line.")));
- // TESTING BROKEN BEHAVIOR: line break declared before actual line break.
- fm.next_line(BytePos(10));
- assert_eq!(fm.get_line(1), Some(Cow::from(".")));
- fm.next_line(BytePos(12));
- assert_eq!(fm.get_line(2), Some(Cow::from("second line")));
- }
-
- #[test]
- #[should_panic]
- fn t2 () {
- let cm = CodeMap::new(FilePathMapping::empty());
- let fm = cm.new_filemap(PathBuf::from("blork.rs").into(),
- "first line.\nsecond line".to_string());
- // TESTING *REALLY* BROKEN BEHAVIOR:
- fm.next_line(BytePos(0));
- fm.next_line(BytePos(10));
- fm.next_line(BytePos(2));
- }
-
fn init_code_map() -> CodeMap {
let cm = CodeMap::new(FilePathMapping::empty());
- let fm1 = cm.new_filemap(PathBuf::from("blork.rs").into(),
- "first line.\nsecond line".to_string());
- let fm2 = cm.new_filemap(PathBuf::from("empty.rs").into(),
- "".to_string());
- let fm3 = cm.new_filemap(PathBuf::from("blork2.rs").into(),
- "first line.\nsecond line".to_string());
-
- fm1.next_line(BytePos(0));
- fm1.next_line(BytePos(12));
- fm2.next_line(fm2.start_pos);
- fm3.next_line(fm3.start_pos);
- fm3.next_line(fm3.start_pos + BytePos(12));
-
+ cm.new_filemap(PathBuf::from("blork.rs").into(),
+ "first line.\nsecond line".to_string());
+ cm.new_filemap(PathBuf::from("empty.rs").into(),
+ "".to_string());
+ cm.new_filemap(PathBuf::from("blork2.rs").into(),
+ "first line.\nsecond line".to_string());
cm
}
fn init_code_map_mbc() -> CodeMap {
let cm = CodeMap::new(FilePathMapping::empty());
// € is a three byte utf8 char.
- let fm1 =
- cm.new_filemap(PathBuf::from("blork.rs").into(),
- "fir€st €€€€ line.\nsecond line".to_string());
- let fm2 = cm.new_filemap(PathBuf::from("blork2.rs").into(),
- "first line€€.\n€ second line".to_string());
-
- fm1.next_line(BytePos(0));
- fm1.next_line(BytePos(28));
- fm2.next_line(fm2.start_pos);
- fm2.next_line(fm2.start_pos + BytePos(20));
-
- fm1.record_multibyte_char(BytePos(3), 3);
- fm1.record_multibyte_char(BytePos(9), 3);
- fm1.record_multibyte_char(BytePos(12), 3);
- fm1.record_multibyte_char(BytePos(15), 3);
- fm1.record_multibyte_char(BytePos(18), 3);
- fm2.record_multibyte_char(fm2.start_pos + BytePos(10), 3);
- fm2.record_multibyte_char(fm2.start_pos + BytePos(13), 3);
- fm2.record_multibyte_char(fm2.start_pos + BytePos(18), 3);
-
+ cm.new_filemap(PathBuf::from("blork.rs").into(),
+ "fir€st €€€€ line.\nsecond line".to_string());
+ cm.new_filemap(PathBuf::from("blork2.rs").into(),
+ "first line€€.\n€ second line".to_string());
cm
}
let cm = CodeMap::new(FilePathMapping::empty());
let inputtext = "aaaaa\nbbbbBB\nCCC\nDDDDDddddd\neee\n";
let selection = " \n ~~\n~~~\n~~~~~ \n \n";
- cm.new_filemap_and_lines(Path::new("blork.rs"), inputtext);
+ cm.new_filemap(Path::new("blork.rs").to_owned().into(), inputtext.to_string());
let span = span_from_selection(inputtext, selection);
// check that we are extracting the text we thought we were extracting
let inputtext = "bbbb BB\ncc CCC\n";
let selection1 = " ~~\n \n";
let selection2 = " \n ~~~\n";
- cm.new_filemap_and_lines(Path::new("blork.rs"), inputtext);
+ cm.new_filemap(Path::new("blork.rs").to_owned().into(), inputtext.to_owned());
let span1 = span_from_selection(inputtext, selection1);
let span2 = span_from_selection(inputtext, selection2);
/// Whether the contents of the macro can use `unsafe`
/// without triggering the `unsafe_code` lint.
allow_internal_unsafe: bool,
+ /// Enables the macro helper hack (`ident!(...)` -> `$crate::ident!(...)`)
+ /// for a given macro.
+ local_inner_macros: bool,
/// The macro's feature name if it is unstable, and the stability feature
unstable_feature: Option<(Symbol, u32)>,
/// Edition of the crate in which the macro is defined
format: ExpnFormat::MacroAttribute(Symbol::intern(&pretty_name)),
allow_internal_unstable: true,
allow_internal_unsafe: false,
+ local_inner_macros: false,
edition: hygiene::default_edition(),
});
format: MacroAttribute(Symbol::intern(&format!("{}", attr.path))),
allow_internal_unstable: false,
allow_internal_unsafe: false,
+ local_inner_macros: false,
edition: ext.edition(),
});
def_site_span: Option<Span>,
allow_internal_unstable,
allow_internal_unsafe,
+ local_inner_macros,
// can't infer this type
unstable_feature: Option<(Symbol, u32)>,
edition| {
format: macro_bang_format(path),
allow_internal_unstable,
allow_internal_unsafe,
+ local_inner_macros,
edition,
});
Ok(())
let opt_expanded = match *ext {
DeclMacro(ref expand, def_span, edition) => {
if let Err(dummy_span) = validate_and_set_expn_info(self, def_span.map(|(_, s)| s),
- false, false, None,
+ false, false, false, None,
edition) {
dummy_span
} else {
def_info,
allow_internal_unstable,
allow_internal_unsafe,
+ local_inner_macros,
unstable_feature,
edition,
} => {
if let Err(dummy_span) = validate_and_set_expn_info(self, def_info.map(|(_, s)| s),
allow_internal_unstable,
allow_internal_unsafe,
+ local_inner_macros,
unstable_feature,
edition) {
dummy_span
format: macro_bang_format(path),
allow_internal_unstable,
allow_internal_unsafe: false,
+ local_inner_macros: false,
edition: hygiene::default_edition(),
});
// FIXME probably want to follow macro_rules macros here.
allow_internal_unstable,
allow_internal_unsafe: false,
+ local_inner_macros: false,
edition,
});
format: MacroAttribute(pretty_name),
allow_internal_unstable: false,
allow_internal_unsafe: false,
+ local_inner_macros: false,
edition: ext.edition(),
};
match String::from_utf8(buf) {
Ok(src) => {
+ let src_interned = Symbol::intern(&src);
+
// Add this input file to the code map to make it available as
// dependency information
- self.cx.codemap().new_filemap_and_lines(&filename, &src);
+ self.cx.codemap().new_filemap(filename.into(), src);
let include_info = vec![
dummy_spanned(ast::NestedMetaItemKind::MetaItem(
dummy_spanned(file)))),
dummy_spanned(ast::NestedMetaItemKind::MetaItem(
attr::mk_name_value_item_str(Ident::from_str("contents"),
- dummy_spanned(Symbol::intern(&src))))),
+ dummy_spanned(src_interned)))),
];
let include_ident = Ident::from_str("include");
};
match String::from_utf8(bytes) {
Ok(src) => {
+ let interned_src = Symbol::intern(&src);
+
// Add this input file to the code map to make it available as
// dependency information
- cx.codemap().new_filemap_and_lines(&file, &src);
+ cx.codemap().new_filemap(file.into(), src);
- base::MacEager::expr(cx.expr_str(sp, Symbol::intern(&src)))
+ base::MacEager::expr(cx.expr_str(sp, interned_src))
}
Err(_) => {
cx.span_err(sp,
Ok(..) => {
// Add this input file to the code map to make it available as
// dependency information, but don't enter it's contents
- cx.codemap().new_filemap_and_lines(&file, "");
+ cx.codemap().new_filemap(file.into(), "".to_string());
base::MacEager::expr(cx.expr_lit(sp, ast::LitKind::ByteStr(Lrc::new(bytes))))
}
if body.legacy {
let allow_internal_unstable = attr::contains_name(&def.attrs, "allow_internal_unstable");
let allow_internal_unsafe = attr::contains_name(&def.attrs, "allow_internal_unsafe");
+ let mut local_inner_macros = false;
+ if let Some(macro_export) = attr::find_by_name(&def.attrs, "macro_export") {
+ if let Some(l) = macro_export.meta_item_list() {
+ local_inner_macros = attr::list_contains_name(&l, "local_inner_macros");
+ }
+ }
let unstable_feature = attr::find_stability(&sess.span_diagnostic,
&def.attrs, def.span).and_then(|stability| {
def_info: Some((def.id, def.span)),
allow_internal_unstable,
allow_internal_unsafe,
+ local_inner_macros,
unstable_feature,
edition,
}
":literal fragment specifier is experimental and subject to change";
pub const EXPLAIN_UNSIZED_TUPLE_COERCION: &'static str =
- "Unsized tuple coercion is not stable enough for use and is subject to change";
+ "unsized tuple coercion is not stable enough for use and is subject to change";
pub const EXPLAIN_MACRO_AT_MOST_ONCE_REP: &'static str =
- "Using the `?` macro Kleene operator for \"at most one\" repetition is unstable";
+ "using the `?` macro Kleene operator for \"at most one\" repetition is unstable";
pub const EXPLAIN_MACROS_IN_EXTERN: &'static str =
- "Macro invocations in `extern {}` blocks are experimental.";
+ "macro invocations in `extern {}` blocks are experimental.";
// mention proc-macros when enabled
pub const EXPLAIN_PROC_MACROS_IN_EXTERN: &'static str =
- "Macro and proc-macro invocations in `extern {}` blocks are experimental.";
+ "macro and proc-macro invocations in `extern {}` blocks are experimental.";
struct PostExpansionVisitor<'a> {
context: &'a Context<'a>,
let mut lines: Vec<String> = Vec::new();
// Count the number of chars since the start of the line by rescanning.
- let mut src_index = rdr.src_index(rdr.filemap.line_begin_pos());
+ let mut src_index = rdr.src_index(rdr.filemap.line_begin_pos(rdr.pos));
let end_src_index = rdr.src_index(rdr.pos);
- assert!(src_index <= end_src_index);
+ assert!(src_index <= end_src_index,
+ "src_index={}, end_src_index={}, line_begin_pos={}",
+ src_index, end_src_index, rdr.filemap.line_begin_pos(rdr.pos).to_u32());
let mut n = 0;
while src_index < end_src_index {
let c = char_at(&rdr.src, src_index);
pub ch: Option<char>,
pub filemap: Lrc<syntax_pos::FileMap>,
/// Stop reading src at this index.
- end_src_index: usize,
- /// Whether to record new-lines and multibyte chars in filemap.
- /// This is only necessary the first time a filemap is lexed.
- /// If part of a filemap is being re-lexed, this should be set to false.
- save_new_lines_and_multibyte: bool,
+ pub end_src_index: usize,
// cached:
peek_tok: token::Token,
peek_span: Span,
ch: Some('\n'),
filemap,
end_src_index: src.len(),
- save_new_lines_and_multibyte: true,
// dummy values; not read
peek_tok: token::Eof,
peek_span: syntax_pos::DUMMY_SP,
let mut sr = StringReader::new_raw_internal(sess, begin.fm, None);
// Seek the lexer to the right byte range.
- sr.save_new_lines_and_multibyte = false;
sr.next_pos = span.lo();
sr.end_src_index = sr.src_index(span.hi());
let next_ch = char_at(&self.src, next_src_index);
let next_ch_len = next_ch.len_utf8();
- if self.ch.unwrap() == '\n' {
- if self.save_new_lines_and_multibyte {
- self.filemap.next_line(self.next_pos);
- }
- }
- if next_ch_len > 1 {
- if self.save_new_lines_and_multibyte {
- self.filemap.record_multibyte_char(self.next_pos, next_ch_len);
- }
- }
- self.filemap.record_width(self.next_pos, next_ch);
-
self.ch = Some(next_ch);
self.pos = self.next_pos;
self.next_pos = self.next_pos + Pos::from_usize(next_ch_len);
match self.token {
token::Ident(ident, false) if ident.name == keywords::Underscore.name() => {
self.bump(); // `_`
- Ok(Some(Ident::new(ident.name.gensymed(), ident.span)))
+ Ok(Some(ident.gensym()))
}
_ => self.parse_ident().map(Some),
}
format: MacroAttribute(Symbol::intern("std_inject")),
allow_internal_unstable: true,
allow_internal_unsafe: false,
+ local_inner_macros: false,
edition: hygiene::default_edition(),
});
sp.with_ctxt(SyntaxContext::empty().apply_mark(mark))
format: MacroAttribute(Symbol::intern("test")),
allow_internal_unstable: true,
allow_internal_unsafe: false,
+ local_inner_macros: false,
edition: hygiene::default_edition(),
});
let output = Arc::new(Mutex::new(Vec::new()));
let code_map = Lrc::new(CodeMap::new(FilePathMapping::empty()));
- code_map.new_filemap_and_lines(Path::new("test.rs"), &file_text);
+ code_map.new_filemap(Path::new("test.rs").to_owned().into(), file_text.to_owned());
let primary_span = make_span(&file_text, &span_labels[0].start, &span_labels[0].end);
let mut msp = MultiSpan::from_span(primary_span);
def_info: None,
allow_internal_unstable: false,
allow_internal_unsafe: false,
+ local_inner_macros: false,
unstable_feature: None,
edition: hygiene::default_edition(),
});
def_info: None,
allow_internal_unstable: true,
allow_internal_unsafe: false,
+ local_inner_macros: false,
unstable_feature: None,
edition: hygiene::default_edition(),
});
format: MacroAttribute(Symbol::intern("proc_macro")),
allow_internal_unstable: true,
allow_internal_unsafe: false,
+ local_inner_macros: false,
edition: hygiene::default_edition(),
});
let span = DUMMY_SP.apply_mark(mark);
arena = { path = "../libarena" }
scoped-tls = { version = "0.1.1", features = ["nightly"] }
unicode-width = "0.1.4"
+cfg-if = "0.1.2"
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use unicode_width::UnicodeWidthChar;
+use super::*;
+
+/// Find all newlines, multi-byte characters, and non-narrow characters in a
+/// FileMap.
+///
+/// This function will use an SSE2 enhanced implementation if hardware support
+/// is detected at runtime.
+pub fn analyze_filemap(
+ src: &str,
+ filemap_start_pos: BytePos)
+ -> (Vec<BytePos>, Vec<MultiByteChar>, Vec<NonNarrowChar>)
+{
+ let mut lines = vec![filemap_start_pos];
+ let mut multi_byte_chars = vec![];
+ let mut non_narrow_chars = vec![];
+
+ // Calls the right implementation, depending on hardware support available.
+ analyze_filemap_dispatch(src,
+ filemap_start_pos,
+ &mut lines,
+ &mut multi_byte_chars,
+ &mut non_narrow_chars);
+
+ // The code above optimistically registers a new line *after* each \n
+ // it encounters. If that point is already outside the filemap, remove
+ // it again.
+ if let Some(&last_line_start) = lines.last() {
+ let file_map_end = filemap_start_pos + BytePos::from_usize(src.len());
+ assert!(file_map_end >= last_line_start);
+ if last_line_start == file_map_end {
+ lines.pop();
+ }
+ }
+
+ (lines, multi_byte_chars, non_narrow_chars)
+}
+
+cfg_if! {
+ if #[cfg(all(any(target_arch = "x86", target_arch = "x86_64"),
+ not(stage0)))] {
+ fn analyze_filemap_dispatch(src: &str,
+ filemap_start_pos: BytePos,
+ lines: &mut Vec<BytePos>,
+ multi_byte_chars: &mut Vec<MultiByteChar>,
+ non_narrow_chars: &mut Vec<NonNarrowChar>) {
+ if is_x86_feature_detected!("sse2") {
+ unsafe {
+ analyze_filemap_sse2(src,
+ filemap_start_pos,
+ lines,
+ multi_byte_chars,
+ non_narrow_chars);
+ }
+ } else {
+ analyze_filemap_generic(src,
+ src.len(),
+ filemap_start_pos,
+ lines,
+ multi_byte_chars,
+ non_narrow_chars);
+
+ }
+ }
+
+ /// Check 16 byte chunks of text at a time. If the chunk contains
+ /// something other than printable ASCII characters and newlines, the
+ /// function falls back to the generic implementation. Otherwise it uses
+ /// SSE2 intrinsics to quickly find all newlines.
+ #[target_feature(enable = "sse2")]
+ unsafe fn analyze_filemap_sse2(src: &str,
+ output_offset: BytePos,
+ lines: &mut Vec<BytePos>,
+ multi_byte_chars: &mut Vec<MultiByteChar>,
+ non_narrow_chars: &mut Vec<NonNarrowChar>) {
+ #[cfg(target_arch = "x86")]
+ use std::arch::x86::*;
+ #[cfg(target_arch = "x86_64")]
+ use std::arch::x86_64::*;
+
+ const CHUNK_SIZE: usize = 16;
+
+ let src_bytes = src.as_bytes();
+
+ let chunk_count = src.len() / CHUNK_SIZE;
+
+ // This variable keeps track of where we should start decoding a
+ // chunk. If a multi-byte character spans across chunk boundaries,
+ // we need to skip that part in the next chunk because we already
+ // handled it.
+ let mut intra_chunk_offset = 0;
+
+ for chunk_index in 0 .. chunk_count {
+ let ptr = src_bytes.as_ptr() as *const __m128i;
+ // We don't know if the pointer is aligned to 16 bytes, so we
+ // use `loadu`, which supports unaligned loading.
+ let chunk = _mm_loadu_si128(ptr.offset(chunk_index as isize));
+
+ // For character in the chunk, see if its byte value is < 0, which
+ // indicates that it's part of a UTF-8 char.
+ let multibyte_test = _mm_cmplt_epi8(chunk, _mm_set1_epi8(0));
+ // Create a bit mask from the comparison results.
+ let multibyte_mask = _mm_movemask_epi8(multibyte_test);
+
+ // If the bit mask is all zero, we only have ASCII chars here:
+ if multibyte_mask == 0 {
+ assert!(intra_chunk_offset == 0);
+
+ // Check if there are any control characters in the chunk. All
+ // control characters that we can encounter at this point have a
+ // byte value less than 32 or ...
+ let control_char_test0 = _mm_cmplt_epi8(chunk, _mm_set1_epi8(32));
+ let control_char_mask0 = _mm_movemask_epi8(control_char_test0);
+
+ // ... it's the ASCII 'DEL' character with a value of 127.
+ let control_char_test1 = _mm_cmpeq_epi8(chunk, _mm_set1_epi8(127));
+ let control_char_mask1 = _mm_movemask_epi8(control_char_test1);
+
+ let control_char_mask = control_char_mask0 | control_char_mask1;
+
+ if control_char_mask != 0 {
+ // Check for newlines in the chunk
+ let newlines_test = _mm_cmpeq_epi8(chunk, _mm_set1_epi8(b'\n' as i8));
+ let newlines_mask = _mm_movemask_epi8(newlines_test);
+
+ if control_char_mask == newlines_mask {
+ // All control characters are newlines, record them
+ let mut newlines_mask = 0xFFFF0000 | newlines_mask as u32;
+ let output_offset = output_offset +
+ BytePos::from_usize(chunk_index * CHUNK_SIZE + 1);
+
+ loop {
+ let index = newlines_mask.trailing_zeros();
+
+ if index >= CHUNK_SIZE as u32 {
+ // We have arrived at the end of the chunk.
+ break
+ }
+
+ lines.push(BytePos(index) + output_offset);
+
+ // Clear the bit, so we can find the next one.
+ newlines_mask &= (!1) << index;
+ }
+
+ // We are done for this chunk. All control characters were
+ // newlines and we took care of those.
+ continue
+ } else {
+ // Some of the control characters are not newlines,
+ // fall through to the slow path below.
+ }
+ } else {
+ // No control characters, nothing to record for this chunk
+ continue
+ }
+ }
+
+ // The slow path.
+ // There are control chars in here, fallback to generic decoding.
+ let scan_start = chunk_index * CHUNK_SIZE + intra_chunk_offset;
+ intra_chunk_offset = analyze_filemap_generic(
+ &src[scan_start .. ],
+ CHUNK_SIZE - intra_chunk_offset,
+ BytePos::from_usize(scan_start) + output_offset,
+ lines,
+ multi_byte_chars,
+ non_narrow_chars
+ );
+ }
+
+ // There might still be a tail left to analyze
+ let tail_start = chunk_count * CHUNK_SIZE + intra_chunk_offset;
+ if tail_start < src.len() {
+ analyze_filemap_generic(&src[tail_start as usize ..],
+ src.len() - tail_start,
+ output_offset + BytePos::from_usize(tail_start),
+ lines,
+ multi_byte_chars,
+ non_narrow_chars);
+ }
+ }
+ } else {
+
+ // The target (or compiler version) does not support SSE2 ...
+ fn analyze_filemap_dispatch(src: &str,
+ filemap_start_pos: BytePos,
+ lines: &mut Vec<BytePos>,
+ multi_byte_chars: &mut Vec<MultiByteChar>,
+ non_narrow_chars: &mut Vec<NonNarrowChar>) {
+ analyze_filemap_generic(src,
+ src.len(),
+ filemap_start_pos,
+ lines,
+ multi_byte_chars,
+ non_narrow_chars);
+ }
+ }
+}
+
+// `scan_len` determines the number of bytes in `src` to scan. Note that the
+// function can read past `scan_len` if a multi-byte character start within the
+// range but extends past it. The overflow is returned by the function.
+fn analyze_filemap_generic(src: &str,
+ scan_len: usize,
+ output_offset: BytePos,
+ lines: &mut Vec<BytePos>,
+ multi_byte_chars: &mut Vec<MultiByteChar>,
+ non_narrow_chars: &mut Vec<NonNarrowChar>)
+ -> usize
+{
+ assert!(src.len() >= scan_len);
+ let mut i = 0;
+ let src_bytes = src.as_bytes();
+
+ while i < scan_len {
+ let byte = unsafe {
+ // We verified that i < scan_len <= src.len()
+ *src_bytes.get_unchecked(i as usize)
+ };
+
+ // How much to advance in order to get to the next UTF-8 char in the
+ // string.
+ let mut char_len = 1;
+
+ if byte < 32 {
+ // This is an ASCII control character, it could be one of the cases
+ // that are interesting to us.
+
+ let pos = BytePos::from_usize(i) + output_offset;
+
+ match byte {
+ b'\n' => {
+ lines.push(pos + BytePos(1));
+ }
+ b'\t' => {
+ non_narrow_chars.push(NonNarrowChar::Tab(pos));
+ }
+ _ => {
+ non_narrow_chars.push(NonNarrowChar::ZeroWidth(pos));
+ }
+ }
+ } else if byte >= 127 {
+ // The slow path:
+ // This is either ASCII control character "DEL" or the beginning of
+ // a multibyte char. Just decode to `char`.
+ let c = (&src[i..]).chars().next().unwrap();
+ char_len = c.len_utf8();
+
+ let pos = BytePos::from_usize(i) + output_offset;
+
+ if char_len > 1 {
+ assert!(char_len >=2 && char_len <= 4);
+ let mbc = MultiByteChar {
+ pos,
+ bytes: char_len as u8,
+ };
+ multi_byte_chars.push(mbc);
+ }
+
+ // Assume control characters are zero width.
+ // FIXME: How can we decide between `width` and `width_cjk`?
+ let char_width = UnicodeWidthChar::width(c).unwrap_or(0);
+
+ if char_width != 1 {
+ non_narrow_chars.push(NonNarrowChar::new(pos, char_width));
+ }
+ }
+
+ i += char_len;
+ }
+
+ i - scan_len
+}
+
+
+
+macro_rules! test {
+ (case: $test_name:ident,
+ text: $text:expr,
+ filemap_start_pos: $filemap_start_pos:expr,
+ lines: $lines:expr,
+ multi_byte_chars: $multi_byte_chars:expr,
+ non_narrow_chars: $non_narrow_chars:expr,) => (
+
+ #[test]
+ fn $test_name() {
+
+ let (lines, multi_byte_chars, non_narrow_chars) =
+ analyze_filemap($text, BytePos($filemap_start_pos));
+
+ let expected_lines: Vec<BytePos> = $lines
+ .into_iter()
+ .map(|pos| BytePos(pos))
+ .collect();
+
+ assert_eq!(lines, expected_lines);
+
+ let expected_mbcs: Vec<MultiByteChar> = $multi_byte_chars
+ .into_iter()
+ .map(|(pos, bytes)| MultiByteChar {
+ pos: BytePos(pos),
+ bytes,
+ })
+ .collect();
+
+ assert_eq!(multi_byte_chars, expected_mbcs);
+
+ let expected_nncs: Vec<NonNarrowChar> = $non_narrow_chars
+ .into_iter()
+ .map(|(pos, width)| {
+ NonNarrowChar::new(BytePos(pos), width)
+ })
+ .collect();
+
+ assert_eq!(non_narrow_chars, expected_nncs);
+ })
+}
+
+test!(
+ case: empty_text,
+ text: "",
+ filemap_start_pos: 0,
+ lines: vec![],
+ multi_byte_chars: vec![],
+ non_narrow_chars: vec![],
+);
+
+test!(
+ case: newlines_short,
+ text: "a\nc",
+ filemap_start_pos: 0,
+ lines: vec![0, 2],
+ multi_byte_chars: vec![],
+ non_narrow_chars: vec![],
+);
+
+test!(
+ case: newlines_long,
+ text: "012345678\nabcdef012345678\na",
+ filemap_start_pos: 0,
+ lines: vec![0, 10, 26],
+ multi_byte_chars: vec![],
+ non_narrow_chars: vec![],
+);
+
+test!(
+ case: newline_and_multi_byte_char_in_same_chunk,
+ text: "01234β789\nbcdef0123456789abcdef",
+ filemap_start_pos: 0,
+ lines: vec![0, 11],
+ multi_byte_chars: vec![(5, 2)],
+ non_narrow_chars: vec![],
+);
+
+test!(
+ case: newline_and_control_char_in_same_chunk,
+ text: "01234\u{07}6789\nbcdef0123456789abcdef",
+ filemap_start_pos: 0,
+ lines: vec![0, 11],
+ multi_byte_chars: vec![],
+ non_narrow_chars: vec![(5, 0)],
+);
+
+test!(
+ case: multi_byte_char_short,
+ text: "aβc",
+ filemap_start_pos: 0,
+ lines: vec![0],
+ multi_byte_chars: vec![(1, 2)],
+ non_narrow_chars: vec![],
+);
+
+test!(
+ case: multi_byte_char_long,
+ text: "0123456789abcΔf012345β",
+ filemap_start_pos: 0,
+ lines: vec![0],
+ multi_byte_chars: vec![(13, 2), (22, 2)],
+ non_narrow_chars: vec![],
+);
+
+test!(
+ case: multi_byte_char_across_chunk_boundary,
+ text: "0123456789abcdeΔ123456789abcdef01234",
+ filemap_start_pos: 0,
+ lines: vec![0],
+ multi_byte_chars: vec![(15, 2)],
+ non_narrow_chars: vec![],
+);
+
+test!(
+ case: multi_byte_char_across_chunk_boundary_tail,
+ text: "0123456789abcdeΔ....",
+ filemap_start_pos: 0,
+ lines: vec![0],
+ multi_byte_chars: vec![(15, 2)],
+ non_narrow_chars: vec![],
+);
+
+test!(
+ case: non_narrow_short,
+ text: "0\t2",
+ filemap_start_pos: 0,
+ lines: vec![0],
+ multi_byte_chars: vec![],
+ non_narrow_chars: vec![(1, 4)],
+);
+
+test!(
+ case: non_narrow_long,
+ text: "01\t3456789abcdef01234567\u{07}9",
+ filemap_start_pos: 0,
+ lines: vec![0],
+ multi_byte_chars: vec![],
+ non_narrow_chars: vec![(2, 4), (24, 0)],
+);
+
+test!(
+ case: output_offset_all,
+ text: "01\t345\n789abcΔf01234567\u{07}9\nbcΔf",
+ filemap_start_pos: 1000,
+ lines: vec![0 + 1000, 7 + 1000, 27 + 1000],
+ multi_byte_chars: vec![(13 + 1000, 2), (29 + 1000, 2)],
+ non_narrow_chars: vec![(2 + 1000, 4), (24 + 1000, 0)],
+);
use GLOBALS;
use Span;
use edition::Edition;
-use symbol::{Ident, Symbol};
+use symbol::Symbol;
use serialize::{Encodable, Decodable, Encoder, Decoder};
use std::collections::HashMap;
marks: Vec<MarkData>,
syntax_contexts: Vec<SyntaxContextData>,
markings: HashMap<(SyntaxContext, Mark), SyntaxContext>,
- gensym_to_ctxt: HashMap<Symbol, Span>,
default_edition: Edition,
}
modern: SyntaxContext(0),
}],
markings: HashMap::new(),
- gensym_to_ctxt: HashMap::new(),
default_edition: Edition::Edition2015,
}
}
/// Whether the macro is allowed to use `unsafe` internally
/// even if the user crate has `#![forbid(unsafe_code)]`.
pub allow_internal_unsafe: bool,
+ /// Enables the macro helper hack (`ident!(...)` -> `$crate::ident!(...)`)
+ /// for a given macro.
+ pub local_inner_macros: bool,
/// Edition of the crate in which the macro is defined.
pub edition: Edition,
}
Ok(SyntaxContext::empty()) // FIXME(jseyfried) intercrate hygiene
}
}
-
-impl Symbol {
- pub fn from_ident(ident: Ident) -> Symbol {
- HygieneData::with(|data| {
- let gensym = ident.name.gensymed();
- data.gensym_to_ctxt.insert(gensym, ident.span);
- gensym
- })
- }
-
- pub fn to_ident(self) -> Ident {
- HygieneData::with(|data| {
- match data.gensym_to_ctxt.get(&self) {
- Some(&span) => Ident::new(self.interned(), span),
- None => Ident::with_empty_ctxt(self),
- }
- })
- }
-}
#![feature(optin_builtin_traits)]
#![allow(unused_attributes)]
#![feature(specialization)]
+#![feature(stdsimd)]
use std::borrow::Cow;
use std::cell::Cell;
extern crate serialize;
extern crate serialize as rustc_serialize; // used by deriving
+#[macro_use]
+extern crate cfg_if;
+
extern crate unicode_width;
pub mod edition;
pub mod symbol;
+mod analyze_filemap;
+
pub struct Globals {
symbol_interner: Lock<symbol::Interner>,
span_interner: Lock<span_encoding::SpanInterner>,
pub const NO_EXPANSION: SyntaxContext = SyntaxContext::empty();
/// Identifies an offset of a multi-byte character in a FileMap
-#[derive(Copy, Clone, RustcEncodable, RustcDecodable, Eq, PartialEq)]
+#[derive(Copy, Clone, RustcEncodable, RustcDecodable, Eq, PartialEq, Debug)]
pub struct MultiByteChar {
/// The absolute offset of the character in the CodeMap
pub pos: BytePos,
/// The number of bytes, >=2
- pub bytes: usize,
+ pub bytes: u8,
}
/// Identifies an offset of a non-narrow character in a FileMap
-#[derive(Copy, Clone, RustcEncodable, RustcDecodable, Eq, PartialEq)]
+#[derive(Copy, Clone, RustcEncodable, RustcDecodable, Eq, PartialEq, Debug)]
pub enum NonNarrowChar {
/// Represents a zero-width character
ZeroWidth(BytePos),
/// The end position of this source in the CodeMap
pub end_pos: BytePos,
/// Locations of lines beginnings in the source code
- pub lines: Lock<Vec<BytePos>>,
+ pub lines: Vec<BytePos>,
/// Locations of multi-byte characters in the source code
- pub multibyte_chars: Lock<Vec<MultiByteChar>>,
+ pub multibyte_chars: Vec<MultiByteChar>,
/// Width of characters that are not narrow in the source code
- pub non_narrow_chars: Lock<Vec<NonNarrowChar>>,
+ pub non_narrow_chars: Vec<NonNarrowChar>,
/// A hash of the filename, used for speeding up the incr. comp. hashing.
pub name_hash: u128,
}
s.emit_struct_field("start_pos", 4, |s| self.start_pos.encode(s))?;
s.emit_struct_field("end_pos", 5, |s| self.end_pos.encode(s))?;
s.emit_struct_field("lines", 6, |s| {
- let lines = self.lines.borrow();
+ let lines = &self.lines[..];
// store the length
s.emit_u32(lines.len() as u32)?;
Ok(())
})?;
s.emit_struct_field("multibyte_chars", 7, |s| {
- (*self.multibyte_chars.borrow()).encode(s)
+ self.multibyte_chars.encode(s)
})?;
s.emit_struct_field("non_narrow_chars", 8, |s| {
- (*self.non_narrow_chars.borrow()).encode(s)
+ self.non_narrow_chars.encode(s)
})?;
s.emit_struct_field("name_hash", 9, |s| {
self.name_hash.encode(s)
src: None,
src_hash,
external_src: Lock::new(ExternalSource::AbsentOk),
- lines: Lock::new(lines),
- multibyte_chars: Lock::new(multibyte_chars),
- non_narrow_chars: Lock::new(non_narrow_chars),
+ lines,
+ multibyte_chars,
+ non_narrow_chars,
name_hash,
})
})
};
let end_pos = start_pos.to_usize() + src.len();
+ let (lines, multibyte_chars, non_narrow_chars) =
+ analyze_filemap::analyze_filemap(&src[..], start_pos);
+
FileMap {
name,
name_was_remapped,
external_src: Lock::new(ExternalSource::Unneeded),
start_pos,
end_pos: Pos::from_usize(end_pos),
- lines: Lock::new(Vec::new()),
- multibyte_chars: Lock::new(Vec::new()),
- non_narrow_chars: Lock::new(Vec::new()),
+ lines,
+ multibyte_chars,
+ non_narrow_chars,
name_hash,
}
}
- /// EFFECT: register a start-of-line offset in the
- /// table of line-beginnings.
- /// UNCHECKED INVARIANT: these offsets must be added in the right
- /// order and must be in the right places; there is shared knowledge
- /// about what ends a line between this file and parse.rs
- /// WARNING: pos param here is the offset relative to start of CodeMap,
- /// and CodeMap will append a newline when adding a filemap without a newline at the end,
- /// so the safe way to call this is with value calculated as
- /// filemap.start_pos + newline_offset_relative_to_the_start_of_filemap.
- pub fn next_line(&self, pos: BytePos) {
- // the new charpos must be > the last one (or it's the first one).
- let mut lines = self.lines.borrow_mut();
- let line_len = lines.len();
- assert!(line_len == 0 || ((*lines)[line_len - 1] < pos));
- lines.push(pos);
- }
-
/// Return the BytePos of the beginning of the current line.
- pub fn line_begin_pos(&self) -> BytePos {
- let lines = self.lines.borrow();
- match lines.last() {
- Some(&line_pos) => line_pos,
- None => self.start_pos,
- }
+ pub fn line_begin_pos(&self, pos: BytePos) -> BytePos {
+ let line_index = self.lookup_line(pos).unwrap();
+ self.lines[line_index]
}
/// Add externally loaded source.
}
let begin = {
- let lines = self.lines.borrow();
- let line = if let Some(line) = lines.get(line_number) {
+ let line = if let Some(line) = self.lines.get(line_number) {
line
} else {
return None;
}
}
- pub fn record_multibyte_char(&self, pos: BytePos, bytes: usize) {
- assert!(bytes >=2 && bytes <= 4);
- let mbc = MultiByteChar {
- pos,
- bytes,
- };
- self.multibyte_chars.borrow_mut().push(mbc);
- }
-
- #[inline]
- pub fn record_width(&self, pos: BytePos, ch: char) {
- let width = match ch {
- '\t' =>
- // Tabs will consume 4 columns.
- 4,
- '\n' =>
- // Make newlines take one column so that displayed spans can point them.
- 1,
- ch =>
- // Assume control characters are zero width.
- // FIXME: How can we decide between `width` and `width_cjk`?
- unicode_width::UnicodeWidthChar::width(ch).unwrap_or(0),
- };
- // Only record non-narrow characters.
- if width != 1 {
- self.non_narrow_chars.borrow_mut().push(NonNarrowChar::new(pos, width));
- }
- }
-
pub fn is_real_file(&self) -> bool {
self.name.is_real()
}
self.end_pos.0 - self.start_pos.0
}
pub fn count_lines(&self) -> usize {
- self.lines.borrow().len()
+ self.lines.len()
}
/// Find the line containing the given position. The return value is the
/// number. If the filemap is empty or the position is located before the
/// first line, None is returned.
pub fn lookup_line(&self, pos: BytePos) -> Option<usize> {
- let lines = self.lines.borrow();
- if lines.len() == 0 {
+ if self.lines.len() == 0 {
return None;
}
- let line_index = lookup_line(&lines[..], pos);
- assert!(line_index < lines.len() as isize);
+ let line_index = lookup_line(&self.lines[..], pos);
+ assert!(line_index < self.lines.len() as isize);
if line_index >= 0 {
Some(line_index as usize)
} else {
return (self.start_pos, self.end_pos);
}
- let lines = self.lines.borrow();
- assert!(line_index < lines.len());
- if line_index == (lines.len() - 1) {
- (lines[line_index], self.end_pos)
+ assert!(line_index < self.lines.len());
+ if line_index == (self.lines.len() - 1) {
+ (self.lines[line_index], self.end_pos)
} else {
- (lines[line_index], lines[line_index + 1])
+ (self.lines[line_index], self.lines[line_index + 1])
}
}
pub trait Pos {
fn from_usize(n: usize) -> Self;
fn to_usize(&self) -> usize;
+ fn from_u32(n: u32) -> Self;
+ fn to_u32(&self) -> u32;
}
/// A byte offset. Keep this small (currently 32-bits), as AST contains
fn from_usize(n: usize) -> BytePos { BytePos(n as u32) }
#[inline(always)]
- fn to_usize(&self) -> usize { let BytePos(n) = *self; n as usize }
+ fn to_usize(&self) -> usize { self.0 as usize }
+
+ #[inline(always)]
+ fn from_u32(n: u32) -> BytePos { BytePos(n) }
+
+ #[inline(always)]
+ fn to_u32(&self) -> u32 { self.0 }
}
impl Add for BytePos {
fn from_usize(n: usize) -> CharPos { CharPos(n) }
#[inline(always)]
- fn to_usize(&self) -> usize { let CharPos(n) = *self; n }
+ fn to_usize(&self) -> usize { self.0 }
+
+ #[inline(always)]
+ fn from_u32(n: u32) -> CharPos { CharPos(n as usize) }
+
+ #[inline(always)]
+ fn to_u32(&self) -> u32 { self.0 as u32}
}
impl Add for CharPos {
Ident::new(Symbol::intern(self.as_str().trim_left_matches('\'')), self.span)
}
+ /// "Normalize" ident for use in comparisons using "item hygiene".
+ /// Identifiers with same string value become same if they came from the same "modern" macro
+ /// (e.g. `macro` item, but not `macro_rules` item) and stay different if they came from
+ /// different "modern" macros.
+ /// Technically, this operation strips all non-opaque marks from ident's syntactic context.
pub fn modern(self) -> Ident {
Ident::new(self.name, self.span.modern())
}
pub fn as_str(self) -> LocalInternedString {
self.name.as_str()
}
+
+ pub fn as_interned_str(self) -> InternedString {
+ self.name.as_interned_str()
+ }
}
impl PartialEq for Ident {
#[link(name = "rust_test_helpers", kind = "static")]
extern {
#[no_output]
- //~^ ERROR Macro and proc-macro invocations in `extern {}` blocks are experimental.
+ //~^ ERROR macro and proc-macro invocations in `extern {}` blocks are experimental.
fn some_definitely_unknown_symbol_which_should_be_removed();
#[nop_attr]
- //~^ ERROR Macro and proc-macro invocations in `extern {}` blocks are experimental.
+ //~^ ERROR macro and proc-macro invocations in `extern {}` blocks are experimental.
fn rust_get_test_int() -> isize;
emit_input!(fn rust_dbg_extern_identity_u32(arg: u32) -> u32;);
- //~^ ERROR Macro and proc-macro invocations in `extern {}` blocks are experimental.
+ //~^ ERROR macro and proc-macro invocations in `extern {}` blocks are experimental.
}
#![deny(const_err)]
pub const A: i8 = -std::i8::MIN; //~ ERROR const_err
-//~^ ERROR this constant cannot be used
-//~| ERROR this expression will panic at runtime
pub const B: u8 = 200u8 + 200u8; //~ ERROR const_err
-//~^ ERROR this constant cannot be used
pub const C: u8 = 200u8 * 4; //~ ERROR const_err
-//~^ ERROR this constant cannot be used
pub const D: u8 = 42u8 - (42u8 + 1); //~ ERROR const_err
-//~^ ERROR this constant cannot be used
pub const E: u8 = [5u8][1]; //~ ERROR const_err
-//~| ERROR this constant cannot be used
fn main() {
let _a = A;
#![deny(const_err)]
pub const A: i8 = -std::i8::MIN;
-//~^ ERROR E0080
-//~| ERROR attempt to negate with overflow
-//~| ERROR this expression will panic at runtime
-//~| ERROR this constant cannot be used
+//~^ ERROR this constant cannot be used
pub const B: i8 = A;
//~^ ERROR const_err
//~| ERROR const_err
// Make sure that the two uses get two errors.
const FOO: u8 = [5u8][1];
//~^ ERROR constant evaluation error
-//~| ERROR constant evaluation error
//~| index out of bounds: the len is 1 but the index is 1
fn main() {
black_box((FOO, FOO));
+ //~^ ERROR referenced constant has errors
+ //~| ERROR could not evaluate constant
}
//~^ ERROR this constant cannot be used
(
i8::MIN - 1,
- //~^ ERROR attempt to subtract with overflow
);
const VALS_I16: (i16,) =
//~^ ERROR this constant cannot be used
(
i16::MIN - 1,
- //~^ ERROR attempt to subtract with overflow
);
const VALS_I32: (i32,) =
//~^ ERROR this constant cannot be used
(
i32::MIN - 1,
- //~^ ERROR attempt to subtract with overflow
);
const VALS_I64: (i64,) =
//~^ ERROR this constant cannot be used
(
i64::MIN - 1,
- //~^ ERROR attempt to subtract with overflow
);
const VALS_U8: (u8,) =
//~^ ERROR this constant cannot be used
(
u8::MIN - 1,
- //~^ ERROR attempt to subtract with overflow
);
const VALS_U16: (u16,) = (
//~^ ERROR this constant cannot be used
u16::MIN - 1,
- //~^ ERROR attempt to subtract with overflow
);
const VALS_U32: (u32,) = (
//~^ ERROR this constant cannot be used
u32::MIN - 1,
- //~^ ERROR attempt to subtract with overflow
);
const VALS_U64: (u64,) =
//~^ ERROR this constant cannot be used
(
u64::MIN - 1,
- //~^ ERROR attempt to subtract with overflow
);
fn main() {
//~^ ERROR this constant cannot be used
(
i8::MAX + 1,
- //~^ ERROR attempt to add with overflow
);
const VALS_I16: (i16,) =
//~^ ERROR this constant cannot be used
(
i16::MAX + 1,
- //~^ ERROR attempt to add with overflow
);
const VALS_I32: (i32,) =
//~^ ERROR this constant cannot be used
(
i32::MAX + 1,
- //~^ ERROR attempt to add with overflow
);
const VALS_I64: (i64,) =
//~^ ERROR this constant cannot be used
(
i64::MAX + 1,
- //~^ ERROR attempt to add with overflow
);
const VALS_U8: (u8,) =
//~^ ERROR this constant cannot be used
(
u8::MAX + 1,
- //~^ ERROR attempt to add with overflow
);
const VALS_U16: (u16,) = (
//~^ ERROR this constant cannot be used
u16::MAX + 1,
- //~^ ERROR attempt to add with overflow
);
const VALS_U32: (u32,) = (
//~^ ERROR this constant cannot be used
u32::MAX + 1,
- //~^ ERROR attempt to add with overflow
);
const VALS_U64: (u64,) =
//~^ ERROR this constant cannot be used
(
u64::MAX + 1,
- //~^ ERROR attempt to add with overflow
);
fn main() {
//~^ ERROR this constant cannot be used
(
i8::MIN * 2,
- //~^ ERROR attempt to multiply with overflow
);
const VALS_I16: (i16,) =
//~^ ERROR this constant cannot be used
(
i16::MIN * 2,
- //~^ ERROR attempt to multiply with overflow
);
const VALS_I32: (i32,) =
//~^ ERROR this constant cannot be used
(
i32::MIN * 2,
- //~^ ERROR attempt to multiply with overflow
);
const VALS_I64: (i64,) =
//~^ ERROR this constant cannot be used
(
i64::MIN * 2,
- //~^ ERROR attempt to multiply with overflow
);
const VALS_U8: (u8,) =
//~^ ERROR this constant cannot be used
(
u8::MAX * 2,
- //~^ ERROR attempt to multiply with overflow
);
const VALS_U16: (u16,) = (
//~^ ERROR this constant cannot be used
u16::MAX * 2,
- //~^ ERROR attempt to multiply with overflow
);
const VALS_U32: (u32,) = (
//~^ ERROR this constant cannot be used
u32::MAX * 2,
- //~^ ERROR attempt to multiply with overflow
);
const VALS_U64: (u64,) =
//~^ ERROR this constant cannot be used
(
u64::MAX * 2,
- //~^ ERROR attempt to multiply with overflow
);
fn main() {
// except according to those terms.
// ignore-tidy-linelength
-// compile-flags: --edition=2015 -Zunstable-options
+// edition:2015
// tests that editions work with the tyvar warning-turned-error
// except according to those terms.
// ignore-tidy-linelength
-// compile-flags: --edition=2018 -Zunstable-options
+// edition:2018
// tests that editions work with the tyvar warning-turned-error
#[link(name = "rust_test_helpers", kind = "static")]
extern {
returns_isize!(rust_get_test_int);
- //~^ ERROR Macro invocations in `extern {}` blocks are experimental.
+ //~^ ERROR macro invocations in `extern {}` blocks are experimental.
takes_u32_returns_u32!(rust_dbg_extern_identity_u32);
- //~^ ERROR Macro invocations in `extern {}` blocks are experimental.
+ //~^ ERROR macro invocations in `extern {}` blocks are experimental.
emits_nothing!();
- //~^ ERROR Macro invocations in `extern {}` blocks are experimental.
+ //~^ ERROR macro invocations in `extern {}` blocks are experimental.
}
fn main() {
assert::<Rc<RefCell<i32>>>();
//~^ ERROR the type `std::cell::UnsafeCell<i32>` may contain interior mutability and a
- //~| ERROR the type `std::cell::UnsafeCell<usize>` may contain interior mutability and a
+ //~| ERROR the type `std::cell::UnsafeCell<isize>` may contain interior mutability and a
}
fn main() {
assert::<Arc<RefCell<i32>>>();
//~^ ERROR the type `std::cell::UnsafeCell<i32>` may contain interior mutability and a
- //~| ERROR the type `std::cell::UnsafeCell<usize>` may contain interior mutability and a
+ //~| ERROR the type `std::cell::UnsafeCell<isize>` may contain interior mutability and a
}
fn main() {
assert::<&RefCell<i32>>();
//~^ ERROR the type `std::cell::UnsafeCell<i32>` may contain interior mutability and a
- //~| ERROR the type `std::cell::UnsafeCell<usize>` may contain interior mutability and a
+ //~| ERROR the type `std::cell::UnsafeCell<isize>` may contain interior mutability and a
}
fn main() {
assert::<*mut RefCell<i32>>();
//~^ ERROR the type `std::cell::UnsafeCell<i32>` may contain interior mutability and a
- //~| ERROR the type `std::cell::UnsafeCell<usize>` may contain interior mutability and a
+ //~| ERROR the type `std::cell::UnsafeCell<isize>` may contain interior mutability and a
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: --edition=2018 -Zunstable-options
+// edition:2018
#![feature(extern_absolute_paths)]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: --edition=2018 -Zunstable-options
+// edition:2018
#![feature(extern_absolute_paths)]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: --edition=2018 -Zunstable-options
+// edition:2018
#![feature(extern_absolute_paths)]
// except according to those terms.
// aux-build:xcrate.rs
-// compile-flags: --edition=2018 -Zunstable-options
+// edition:2018
#![feature(crate_in_paths)]
#![feature(extern_absolute_paths)]
--- /dev/null
+-include ../tools.mk
+
+ifdef IS_WINDOWS
+# Do nothing on MSVC.
+all:
+ exit 0
+else
+all:
+ $(RUSTC) --emit=obj app.rs
+ nm $(TMPDIR)/app.o | $(CGREP) rust_begin_unwind
+ nm $(TMPDIR)/app.o | $(CGREP) rust_eh_personality
+ nm $(TMPDIR)/app.o | $(CGREP) rust_oom
+endif
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![crate_type = "bin"]
+#![feature(lang_items)]
+#![feature(panic_implementation)]
+#![no_main]
+#![no_std]
+
+use core::panic::PanicInfo;
+
+#[panic_implementation]
+fn panic(_: &PanicInfo) -> ! {
+ loop {}
+}
+
+#[lang = "eh_personality"]
+fn eh() {}
+
+#[lang = "oom"]
+fn oom() {}
def_info: None,
allow_internal_unstable: false,
allow_internal_unsafe: false,
+ local_inner_macros: false,
unstable_feature: None,
edition: hygiene::default_edition(),
});
fn encode_json<T: Encodable>(val: &T, wr: &mut Cursor<Vec<u8>>) {
write!(wr, "{}", json::as_json(val));
}
-fn encode_opaque<T: Encodable>(val: &T, wr: &mut Cursor<Vec<u8>>) {
+fn encode_opaque<T: Encodable>(val: &T, wr: Vec<u8>) {
let mut encoder = opaque::Encoder::new(wr);
val.encode(&mut encoder);
}
pub fn main() {
let target = Foo{baz: false,};
- let mut wr = Cursor::new(Vec::new());
let proto = WireProtocol::JSON;
match proto {
- WireProtocol::JSON => encode_json(&target, &mut wr),
- WireProtocol::Opaque => encode_opaque(&target, &mut wr)
+ WireProtocol::JSON => encode_json(&target, &mut Cursor::new(Vec::new())),
+ WireProtocol::Opaque => encode_opaque(&target, Vec::new())
}
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: --edition=2018
+// edition:2018
#![feature(arbitrary_self_types, async_await, await_macro, futures_api, pin)]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: --edition=2015
+// edition:2015
#![feature(raw_identifiers)]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: --edition=2018
+// edition:2018
#![feature(raw_identifiers)]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: --edition=2015
+// edition:2015
// aux-build:edition-kw-macro-2015.rs
#![feature(raw_identifiers)]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: --edition=2015
+// edition:2015
// aux-build:edition-kw-macro-2018.rs
#![feature(raw_identifiers)]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: --edition=2018
+// edition:2018
// aux-build:edition-kw-macro-2015.rs
#![feature(raw_identifiers)]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: --edition=2018
+// edition:2018
// aux-build:edition-kw-macro-2018.rs
#![feature(raw_identifiers)]
// except according to those terms.
// aux-build:xcrate.rs
-// compile-flags: --edition=2018 -Zunstable-options
+// edition:2018
#![feature(extern_absolute_paths)]
//
// Regression test for #47075.
-// compile-flags: --test --edition=2018 -Zunstable-options
+// edition:2018
+// compile-flags: --test
#![feature(extern_absolute_paths)]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: --edition=2018
+// edition:2018
#![feature(arbitrary_self_types, async_await, await_macro, futures_api, pin)]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: --edition=2015
+// edition:2015
#![feature(raw_identifiers)]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: --edition=2018
+// edition:2018
#![feature(raw_identifiers)]
-warning: attempt to subtract with overflow
- --> $DIR/conditional_array_execution.rs:15:19
+warning: this constant cannot be used
+ --> $DIR/conditional_array_execution.rs:15:1
|
LL | const FOO: u32 = [X - Y, Y - X][(X < Y) as usize];
- | ^^^^^
+ | ^^^^^^^^^^^^^^^^^^-----^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ | |
+ | attempt to subtract with overflow
|
note: lint level defined here
--> $DIR/conditional_array_execution.rs:11:9
LL | #![warn(const_err)]
| ^^^^^^^^^
-warning: this constant cannot be used
- --> $DIR/conditional_array_execution.rs:15:1
- |
-LL | const FOO: u32 = [X - Y, Y - X][(X < Y) as usize];
- | ^^^^^^^^^^^^^^^^^^-----^^^^^^^^^^^^^^^^^^^^^^^^^^^
- | |
- | attempt to subtract with overflow
-
-warning: referenced constant
- --> $DIR/conditional_array_execution.rs:20:20
+warning: referenced constant has errors
+ --> $DIR/conditional_array_execution.rs:19:20
|
LL | const FOO: u32 = [X - Y, Y - X][(X < Y) as usize];
| ----- attempt to subtract with overflow
| ^^^
warning: this expression will panic at runtime
- --> $DIR/conditional_array_execution.rs:20:20
+ --> $DIR/conditional_array_execution.rs:19:20
|
LL | println!("{}", FOO);
| ^^^ referenced constant has errors
-error[E0080]: referenced constant
- --> $DIR/conditional_array_execution.rs:20:5
+error[E0080]: referenced constant has errors
+ --> $DIR/conditional_array_execution.rs:19:5
|
LL | const FOO: u32 = [X - Y, Y - X][(X < Y) as usize];
| ----- attempt to subtract with overflow
= note: this error originates in a macro outside of the current crate (in Nightly builds, run with -Z external-macro-backtrace for more info)
error[E0080]: erroneous constant used
- --> $DIR/conditional_array_execution.rs:20:5
+ --> $DIR/conditional_array_execution.rs:19:5
|
LL | println!("{}", FOO);
| ^^^^^^^^^^^^^^^---^^
|
= note: this error originates in a macro outside of the current crate (in Nightly builds, run with -Z external-macro-backtrace for more info)
-error[E0080]: referenced constant
- --> $DIR/conditional_array_execution.rs:20:20
+error[E0080]: referenced constant has errors
+ --> $DIR/conditional_array_execution.rs:19:20
|
LL | const FOO: u32 = [X - Y, Y - X][(X < Y) as usize];
| ----- attempt to subtract with overflow
| ^^^
error[E0080]: erroneous constant used
- --> $DIR/conditional_array_execution.rs:20:20
+ --> $DIR/conditional_array_execution.rs:19:20
|
LL | println!("{}", FOO);
| ^^^ referenced constant has errors
const X: u32 = 5;
const Y: u32 = 6;
const FOO: u32 = [X - Y, Y - X][(X < Y) as usize];
-//~^ WARN attempt to subtract with overflow
-//~| WARN this constant cannot be used
+//~^ WARN this constant cannot be used
fn main() {
println!("{}", FOO);
-warning: attempt to subtract with overflow
- --> $DIR/conditional_array_execution.rs:15:19
+warning: this constant cannot be used
+ --> $DIR/conditional_array_execution.rs:15:1
|
LL | const FOO: u32 = [X - Y, Y - X][(X < Y) as usize];
- | ^^^^^
+ | ^^^^^^^^^^^^^^^^^^-----^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ | |
+ | attempt to subtract with overflow
|
note: lint level defined here
--> $DIR/conditional_array_execution.rs:11:9
LL | #![warn(const_err)]
| ^^^^^^^^^
-warning: this constant cannot be used
- --> $DIR/conditional_array_execution.rs:15:1
- |
-LL | const FOO: u32 = [X - Y, Y - X][(X < Y) as usize];
- | ^^^^^^^^^^^^^^^^^^-----^^^^^^^^^^^^^^^^^^^^^^^^^^^
- | |
- | attempt to subtract with overflow
-
-warning: referenced constant
- --> $DIR/conditional_array_execution.rs:20:20
+warning: referenced constant has errors
+ --> $DIR/conditional_array_execution.rs:19:20
|
LL | const FOO: u32 = [X - Y, Y - X][(X < Y) as usize];
| ----- attempt to subtract with overflow
| ^^^
warning: this expression will panic at runtime
- --> $DIR/conditional_array_execution.rs:20:20
+ --> $DIR/conditional_array_execution.rs:19:20
|
LL | println!("{}", FOO);
| ^^^ referenced constant has errors
-error[E0080]: referenced constant
- --> $DIR/conditional_array_execution.rs:20:20
+error[E0080]: referenced constant has errors
+ --> $DIR/conditional_array_execution.rs:19:20
|
LL | const FOO: u32 = [X - Y, Y - X][(X < Y) as usize];
| ----- attempt to subtract with overflow
| ^^^
error[E0080]: erroneous constant used
- --> $DIR/conditional_array_execution.rs:20:20
+ --> $DIR/conditional_array_execution.rs:19:20
|
LL | println!("{}", FOO);
| ^^^ referenced constant has errors
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// compile-pass
+
+pub trait Nullable {
+ const NULL: Self;
+
+ fn is_null(&self) -> bool;
+}
+
+impl<T> Nullable for *const T {
+ const NULL: Self = 0 as *const T;
+
+ fn is_null(&self) -> bool {
+ *self == Self::NULL
+ }
+}
+
+fn main() {
+}
-warning: attempt to subtract with overflow
- --> $DIR/issue-43197.rs:20:20
- |
-LL | const X: u32 = 0-1;
- | ^^^
- |
-note: lint level defined here
- --> $DIR/issue-43197.rs:11:9
- |
-LL | #![warn(const_err)]
- | ^^^^^^^^^
-
warning: this constant cannot be used
--> $DIR/issue-43197.rs:20:5
|
| ^^^^^^^^^^^^^^^---^
| |
| attempt to subtract with overflow
-
-warning: attempt to subtract with overflow
- --> $DIR/issue-43197.rs:23:24
|
-LL | const Y: u32 = foo(0-1);
- | ^^^
+note: lint level defined here
+ --> $DIR/issue-43197.rs:11:9
+ |
+LL | #![warn(const_err)]
+ | ^^^^^^^^^
warning: this constant cannot be used
- --> $DIR/issue-43197.rs:23:5
+ --> $DIR/issue-43197.rs:22:5
|
LL | const Y: u32 = foo(0-1);
| ^^^^^^^^^^^^^^^^^^^---^^
| |
| attempt to subtract with overflow
-warning: referenced constant
- --> $DIR/issue-43197.rs:26:23
+warning: referenced constant has errors
+ --> $DIR/issue-43197.rs:24:23
|
LL | const X: u32 = 0-1;
| --- attempt to subtract with overflow
| ^
warning: this expression will panic at runtime
- --> $DIR/issue-43197.rs:26:23
+ --> $DIR/issue-43197.rs:24:23
|
LL | println!("{} {}", X, Y);
| ^ referenced constant has errors
-warning: referenced constant
- --> $DIR/issue-43197.rs:26:26
+warning: referenced constant has errors
+ --> $DIR/issue-43197.rs:24:26
|
LL | const Y: u32 = foo(0-1);
| --- attempt to subtract with overflow
-...
+LL | //~^ WARN this constant cannot be used
LL | println!("{} {}", X, Y);
| ^
warning: this expression will panic at runtime
- --> $DIR/issue-43197.rs:26:26
+ --> $DIR/issue-43197.rs:24:26
|
LL | println!("{} {}", X, Y);
| ^ referenced constant has errors
-error[E0080]: referenced constant
- --> $DIR/issue-43197.rs:26:5
+error[E0080]: referenced constant has errors
+ --> $DIR/issue-43197.rs:24:5
|
LL | const X: u32 = 0-1;
| --- attempt to subtract with overflow
= note: this error originates in a macro outside of the current crate (in Nightly builds, run with -Z external-macro-backtrace for more info)
error[E0080]: erroneous constant used
- --> $DIR/issue-43197.rs:26:5
+ --> $DIR/issue-43197.rs:24:5
|
LL | println!("{} {}", X, Y);
| ^^^^^^^^^^^^^^^^^^-^^^^^
|
= note: this error originates in a macro outside of the current crate (in Nightly builds, run with -Z external-macro-backtrace for more info)
-error[E0080]: referenced constant
- --> $DIR/issue-43197.rs:26:26
+error[E0080]: referenced constant has errors
+ --> $DIR/issue-43197.rs:24:26
|
LL | const Y: u32 = foo(0-1);
| --- attempt to subtract with overflow
-...
+LL | //~^ WARN this constant cannot be used
LL | println!("{} {}", X, Y);
| ^
error[E0080]: erroneous constant used
- --> $DIR/issue-43197.rs:26:26
+ --> $DIR/issue-43197.rs:24:26
|
LL | println!("{} {}", X, Y);
| ^ referenced constant has errors
-error[E0080]: referenced constant
- --> $DIR/issue-43197.rs:26:23
+error[E0080]: referenced constant has errors
+ --> $DIR/issue-43197.rs:24:23
|
LL | const X: u32 = 0-1;
| --- attempt to subtract with overflow
| ^
error[E0080]: erroneous constant used
- --> $DIR/issue-43197.rs:26:23
+ --> $DIR/issue-43197.rs:24:23
|
LL | println!("{} {}", X, Y);
| ^ referenced constant has errors
fn main() {
const X: u32 = 0-1;
- //~^ WARN attempt to subtract with overflow
- //~| WARN this constant cannot be used
+ //~^ WARN this constant cannot be used
const Y: u32 = foo(0-1);
- //~^ WARN attempt to subtract with overflow
- //~| WARN this constant cannot be used
+ //~^ WARN this constant cannot be used
println!("{} {}", X, Y);
//~^ WARN this expression will panic at runtime
//~| WARN this expression will panic at runtime
-warning: attempt to subtract with overflow
- --> $DIR/issue-43197.rs:20:20
- |
-LL | const X: u32 = 0-1;
- | ^^^
- |
-note: lint level defined here
- --> $DIR/issue-43197.rs:11:9
- |
-LL | #![warn(const_err)]
- | ^^^^^^^^^
-
warning: this constant cannot be used
--> $DIR/issue-43197.rs:20:5
|
| ^^^^^^^^^^^^^^^---^
| |
| attempt to subtract with overflow
-
-warning: attempt to subtract with overflow
- --> $DIR/issue-43197.rs:23:24
|
-LL | const Y: u32 = foo(0-1);
- | ^^^
+note: lint level defined here
+ --> $DIR/issue-43197.rs:11:9
+ |
+LL | #![warn(const_err)]
+ | ^^^^^^^^^
warning: this constant cannot be used
- --> $DIR/issue-43197.rs:23:5
+ --> $DIR/issue-43197.rs:22:5
|
LL | const Y: u32 = foo(0-1);
| ^^^^^^^^^^^^^^^^^^^---^^
| |
| attempt to subtract with overflow
-warning: referenced constant
- --> $DIR/issue-43197.rs:26:23
+warning: referenced constant has errors
+ --> $DIR/issue-43197.rs:24:23
|
LL | const X: u32 = 0-1;
| --- attempt to subtract with overflow
| ^
warning: this expression will panic at runtime
- --> $DIR/issue-43197.rs:26:23
+ --> $DIR/issue-43197.rs:24:23
|
LL | println!("{} {}", X, Y);
| ^ referenced constant has errors
-warning: referenced constant
- --> $DIR/issue-43197.rs:26:26
+warning: referenced constant has errors
+ --> $DIR/issue-43197.rs:24:26
|
LL | const Y: u32 = foo(0-1);
| --- attempt to subtract with overflow
-...
+LL | //~^ WARN this constant cannot be used
LL | println!("{} {}", X, Y);
| ^
warning: this expression will panic at runtime
- --> $DIR/issue-43197.rs:26:26
+ --> $DIR/issue-43197.rs:24:26
|
LL | println!("{} {}", X, Y);
| ^ referenced constant has errors
-error[E0080]: referenced constant
- --> $DIR/issue-43197.rs:26:26
+error[E0080]: referenced constant has errors
+ --> $DIR/issue-43197.rs:24:26
|
LL | const Y: u32 = foo(0-1);
| --- attempt to subtract with overflow
-...
+LL | //~^ WARN this constant cannot be used
LL | println!("{} {}", X, Y);
| ^
error[E0080]: erroneous constant used
- --> $DIR/issue-43197.rs:26:26
+ --> $DIR/issue-43197.rs:24:26
|
LL | println!("{} {}", X, Y);
| ^ referenced constant has errors
-error[E0080]: referenced constant
- --> $DIR/issue-43197.rs:26:23
+error[E0080]: referenced constant has errors
+ --> $DIR/issue-43197.rs:24:23
|
LL | const X: u32 = 0-1;
| --- attempt to subtract with overflow
| ^
error[E0080]: erroneous constant used
- --> $DIR/issue-43197.rs:26:23
+ --> $DIR/issue-43197.rs:24:23
|
LL | println!("{} {}", X, Y);
| ^ referenced constant has errors
-error[E0080]: referenced constant
+error[E0080]: referenced constant has errors
--> $DIR/issue-44578.rs:35:5
|
LL | const AMT: usize = [A::AMT][(A::AMT > B::AMT) as usize];
|
= note: this error originates in a macro outside of the current crate (in Nightly builds, run with -Z external-macro-backtrace for more info)
-error[E0080]: referenced constant
+error[E0080]: referenced constant has errors
--> $DIR/issue-44578.rs:35:20
|
LL | const AMT: usize = [A::AMT][(A::AMT > B::AMT) as usize];
-error[E0080]: referenced constant
+error[E0080]: referenced constant has errors
--> $DIR/issue-44578.rs:35:20
|
LL | const AMT: usize = [A::AMT][(A::AMT > B::AMT) as usize];
-error[E0080]: referenced constant
+error[E0080]: referenced constant has errors
--> $DIR/issue-50814-2.rs:26:5
|
LL | const BAR: usize = [5, 6, 7][T::BOO];
-error[E0080]: referenced constant
+error[E0080]: referenced constant has errors
--> $DIR/issue-50814.rs:27:5
|
LL | const MAX: u8 = A::MAX + B::MAX;
#![crate_type = "lib"]
pub const Z: u32 = 0 - 1;
-//~^ WARN attempt to subtract with overflow
-//~| WARN this constant cannot be used
+//~^ WARN this constant cannot be used
pub type Foo = [i32; 0 - 1];
//~^ WARN attempt to subtract with overflow
-warning: attempt to subtract with overflow
- --> $DIR/pub_const_err.rs:16:20
+warning: this constant cannot be used
+ --> $DIR/pub_const_err.rs:16:1
|
LL | pub const Z: u32 = 0 - 1;
- | ^^^^^
+ | ^^^^^^^^^^^^^^^^^^^-----^
+ | |
+ | attempt to subtract with overflow
|
note: lint level defined here
--> $DIR/pub_const_err.rs:12:9
LL | #![warn(const_err)]
| ^^^^^^^^^
-warning: this constant cannot be used
- --> $DIR/pub_const_err.rs:16:1
- |
-LL | pub const Z: u32 = 0 - 1;
- | ^^^^^^^^^^^^^^^^^^^-----^
- | |
- | attempt to subtract with overflow
-
warning: attempt to subtract with overflow
- --> $DIR/pub_const_err.rs:20:22
+ --> $DIR/pub_const_err.rs:19:22
|
LL | pub type Foo = [i32; 0 - 1];
| ^^^^^
warning: this array length cannot be used
- --> $DIR/pub_const_err.rs:20:22
+ --> $DIR/pub_const_err.rs:19:22
|
LL | pub type Foo = [i32; 0 - 1];
| ^^^^^ attempt to subtract with overflow
#![warn(const_err)]
pub const Z: u32 = 0 - 1;
-//~^ WARN attempt to subtract with overflow
-//~| WARN this constant cannot be used
+//~^ WARN this constant cannot be used
pub type Foo = [i32; 0 - 1];
//~^ WARN attempt to subtract with overflow
-warning: attempt to subtract with overflow
- --> $DIR/pub_const_err_bin.rs:14:20
+warning: this constant cannot be used
+ --> $DIR/pub_const_err_bin.rs:14:1
|
LL | pub const Z: u32 = 0 - 1;
- | ^^^^^
+ | ^^^^^^^^^^^^^^^^^^^-----^
+ | |
+ | attempt to subtract with overflow
|
note: lint level defined here
--> $DIR/pub_const_err_bin.rs:12:9
LL | #![warn(const_err)]
| ^^^^^^^^^
-warning: this constant cannot be used
- --> $DIR/pub_const_err_bin.rs:14:1
- |
-LL | pub const Z: u32 = 0 - 1;
- | ^^^^^^^^^^^^^^^^^^^-----^
- | |
- | attempt to subtract with overflow
-
warning: attempt to subtract with overflow
- --> $DIR/pub_const_err_bin.rs:18:22
+ --> $DIR/pub_const_err_bin.rs:17:22
|
LL | pub type Foo = [i32; 0 - 1];
| ^^^^^
warning: this array length cannot be used
- --> $DIR/pub_const_err_bin.rs:18:22
+ --> $DIR/pub_const_err_bin.rs:17:22
|
LL | pub type Foo = [i32; 0 - 1];
| ^^^^^ attempt to subtract with overflow
const ONE: usize = 1;
const TWO: usize = 2;
const LEN: usize = ONE - TWO;
-//~^ ERROR E0080
-//~| ERROR attempt to subtract with overflow
fn main() {
let a: [i8; LEN] = unimplemented!();
//~^ ERROR E0080
//~| ERROR E0080
+//~| ERROR E0080
+//~| ERROR E0080
}
-error: attempt to subtract with overflow
- --> $DIR/const-len-underflow-separate-spans.rs:17:20
+error[E0080]: referenced constant has errors
+ --> $DIR/const-len-underflow-separate-spans.rs:20:17
|
LL | const LEN: usize = ONE - TWO;
- | ^^^^^^^^^
- |
- = note: #[deny(const_err)] on by default
+ | --------- attempt to subtract with overflow
+...
+LL | let a: [i8; LEN] = unimplemented!();
+ | ^^^
-error[E0080]: constant evaluation error
- --> $DIR/const-len-underflow-separate-spans.rs:17:20
+error[E0080]: could not evaluate constant
+ --> $DIR/const-len-underflow-separate-spans.rs:20:17
|
-LL | const LEN: usize = ONE - TWO;
- | ^^^^^^^^^ attempt to subtract with overflow
+LL | let a: [i8; LEN] = unimplemented!();
+ | ^^^ referenced constant has errors
-error[E0080]: referenced constant
- --> $DIR/const-len-underflow-separate-spans.rs:22:12
+error[E0080]: referenced constant has errors
+ --> $DIR/const-len-underflow-separate-spans.rs:20:12
|
LL | const LEN: usize = ONE - TWO;
| --------- attempt to subtract with overflow
| ^^^^^^^^^
error[E0080]: could not evaluate constant expression
- --> $DIR/const-len-underflow-separate-spans.rs:22:12
+ --> $DIR/const-len-underflow-separate-spans.rs:20:12
|
LL | let a: [i8; LEN] = unimplemented!();
| ^^^^^---^
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: --edition=2015
+// edition:2015
// aux-build:edition-kw-macro-2015.rs
// compile-pass
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: --edition=2015
+// edition:2015
// aux-build:edition-kw-macro-2015.rs
#![feature(raw_identifiers)]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: --edition=2015
+// edition:2015
// aux-build:edition-kw-macro-2018.rs
#![feature(raw_identifiers)]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: --edition=2015
+// edition:2015
// aux-build:edition-kw-macro-2018.rs
#![feature(raw_identifiers)]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: --edition=2018
+// edition:2018
// aux-build:edition-kw-macro-2015.rs
// compile-pass
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: --edition=2018
+// edition:2018
// aux-build:edition-kw-macro-2015.rs
#![feature(raw_identifiers)]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: --edition=2018
+// edition:2018
// aux-build:edition-kw-macro-2018.rs
#![feature(raw_identifiers)]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: --edition=2018
+// edition:2018
// aux-build:edition-kw-macro-2018.rs
#![feature(raw_identifiers)]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: --edition=2015
+// edition:2015
#![feature(futures_api)]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: --edition=2018
+// edition:2018
+
#![feature(futures_api)]
async fn foo() {} //~ ERROR async fn is unstable
error[E0658]: async fn is unstable (see issue #50547)
- --> $DIR/feature-gate-async-await.rs:14:1
+ --> $DIR/feature-gate-async-await.rs:15:1
|
LL | async fn foo() {} //~ ERROR async fn is unstable
| ^^^^^^^^^^^^^^^^^
= help: add #![feature(async_await)] to the crate attributes to enable
error[E0658]: async blocks are unstable (see issue #50547)
- --> $DIR/feature-gate-async-await.rs:17:13
+ --> $DIR/feature-gate-async-await.rs:18:13
|
LL | let _ = async {}; //~ ERROR async blocks are unstable
| ^^^^^^^^
= help: add #![feature(async_await)] to the crate attributes to enable
error[E0658]: async closures are unstable (see issue #50547)
- --> $DIR/feature-gate-async-await.rs:18:13
+ --> $DIR/feature-gate-async-await.rs:19:13
|
LL | let _ = async || {}; //~ ERROR async closures are unstable
| ^^^^^^^^^^^
// gate is not used.
macro_rules! m { ($(a)?) => {} }
-//~^ ERROR Using the `?` macro Kleene operator for "at most one" repetition is unstable
+//~^ ERROR using the `?` macro Kleene operator for "at most one" repetition is unstable
fn main() {
m!();
-error[E0658]: Using the `?` macro Kleene operator for "at most one" repetition is unstable (see issue #48075)
+error[E0658]: using the `?` macro Kleene operator for "at most one" repetition is unstable (see issue #48075)
--> $DIR/feature-gate-macro_at_most_once_rep.rs:14:20
|
LL | macro_rules! m { ($(a)?) => {} }
#[link(name = "rust_test_helpers", kind = "static")]
extern {
returns_isize!(rust_get_test_int);
- //~^ ERROR Macro invocations in `extern {}` blocks are experimental.
+ //~^ ERROR macro invocations in `extern {}` blocks are experimental.
takes_u32_returns_u32!(rust_dbg_extern_identity_u32);
- //~^ ERROR Macro invocations in `extern {}` blocks are experimental.
+ //~^ ERROR macro invocations in `extern {}` blocks are experimental.
emits_nothing!();
- //~^ ERROR Macro invocations in `extern {}` blocks are experimental.
+ //~^ ERROR macro invocations in `extern {}` blocks are experimental.
}
-error[E0658]: Macro invocations in `extern {}` blocks are experimental. (see issue #49476)
+error[E0658]: macro invocations in `extern {}` blocks are experimental. (see issue #49476)
--> $DIR/feature-gate-macros_in_extern.rs:29:5
|
LL | returns_isize!(rust_get_test_int);
|
= help: add #![feature(macros_in_extern)] to the crate attributes to enable
-error[E0658]: Macro invocations in `extern {}` blocks are experimental. (see issue #49476)
+error[E0658]: macro invocations in `extern {}` blocks are experimental. (see issue #49476)
--> $DIR/feature-gate-macros_in_extern.rs:31:5
|
LL | takes_u32_returns_u32!(rust_dbg_extern_identity_u32);
|
= help: add #![feature(macros_in_extern)] to the crate attributes to enable
-error[E0658]: Macro invocations in `extern {}` blocks are experimental. (see issue #49476)
+error[E0658]: macro invocations in `extern {}` blocks are experimental. (see issue #49476)
--> $DIR/feature-gate-macros_in_extern.rs:33:5
|
LL | emits_nothing!();
fn main() {
let _ : &(Send,) = &((),);
- //~^ ERROR Unsized tuple coercion is not stable enough
+ //~^ ERROR unsized tuple coercion is not stable enough
}
-error[E0658]: Unsized tuple coercion is not stable enough for use and is subject to change (see issue #42877)
+error[E0658]: unsized tuple coercion is not stable enough for use and is subject to change (see issue #42877)
--> $DIR/feature-gate-unsized_tuple_coercion.rs:12:24
|
LL | let _ : &(Send,) = &((),);
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#[macro_export]
+macro_rules! helper1 {
+ () => ( struct S; )
+}
+
+#[macro_export(local_inner_macros)]
+macro_rules! helper2 {
+ () => ( helper1!(); )
+}
+
+#[macro_export(local_inner_macros)]
+macro_rules! public_macro {
+ () => ( helper2!(); )
+}
+
+#[macro_export(local_inner_macros)]
+macro_rules! public_macro_dynamic {
+ ($helper: ident) => ( $helper!(); )
+}
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// compile-pass
+// aux-build:local_inner_macros.rs
+
+#![feature(use_extern_macros)]
+
+extern crate local_inner_macros;
+
+use local_inner_macros::{public_macro, public_macro_dynamic};
+
+public_macro!();
+
+macro_rules! local_helper {
+ () => ( struct Z; )
+}
+
+public_macro_dynamic!(local_helper);
+
+fn main() {
+ let s = S;
+ let z = Z;
+}
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// `local_inner_macros` has no effect if `feature(use_extern_macros)` is not enabled
+
+// aux-build:local_inner_macros.rs
+// error-pattern: cannot find macro `helper2!` in this scope
+
+#[macro_use(public_macro)]
+extern crate local_inner_macros;
+
+public_macro!();
+
+fn main() {}
--- /dev/null
+error: cannot find macro `helper2!` in this scope
+ --> $DIR/local_inner_macros_disabled.rs:19:1
+ |
+LL | public_macro!();
+ | ^^^^^^^^^^^^^^^^
+ |
+ = note: this error originates in a macro outside of the current crate (in Nightly builds, run with -Z external-macro-backtrace for more info)
+
+error: aborting due to previous error
+
--- /dev/null
+warning: not reporting region error due to nll
+ --> $DIR/static-return-lifetime-infered.rs:17:16
+ |
+LL | self.x.iter().map(|a| a.0)
+ | ^^^^
+
+warning: not reporting region error due to nll
+ --> $DIR/static-return-lifetime-infered.rs:21:16
+ |
+LL | self.x.iter().map(|a| a.0)
+ | ^^^^
+
+error: free region `` does not outlive free region `'static`
+ --> $DIR/static-return-lifetime-infered.rs:17:9
+ |
+LL | self.x.iter().map(|a| a.0)
+ | ^^^^^^^^^^^^^
+
+error: free region `'a` does not outlive free region `'static`
+ --> $DIR/static-return-lifetime-infered.rs:21:9
+ |
+LL | self.x.iter().map(|a| a.0)
+ | ^^^^^^^^^^^^^
+
+error: aborting due to 2 previous errors
+
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+struct A {
+ x: [(u32, u32); 10]
+}
+
+impl A {
+ fn iter_values_anon(&self) -> impl Iterator<Item=u32> {
+ self.x.iter().map(|a| a.0)
+ }
+ //~^^ ERROR cannot infer an appropriate lifetime
+ fn iter_values<'a>(&'a self) -> impl Iterator<Item=u32> {
+ self.x.iter().map(|a| a.0)
+ }
+ //~^^ ERROR cannot infer an appropriate lifetime
+}
+
+fn main() {}
--- /dev/null
+error: cannot infer an appropriate lifetime
+ --> $DIR/static-return-lifetime-infered.rs:17:16
+ |
+LL | fn iter_values_anon(&self) -> impl Iterator<Item=u32> {
+ | ----------------------- this return type evaluates to the `'static` lifetime...
+LL | self.x.iter().map(|a| a.0)
+ | ------ ^^^^
+ | |
+ | ...but this borrow...
+ |
+note: ...can't outlive the anonymous lifetime #1 defined on the method body at 16:5
+ --> $DIR/static-return-lifetime-infered.rs:16:5
+ |
+LL | / fn iter_values_anon(&self) -> impl Iterator<Item=u32> {
+LL | | self.x.iter().map(|a| a.0)
+LL | | }
+ | |_____^
+help: you can add a constraint to the return type to make it last less than `'static` and match the anonymous lifetime #1 defined on the method body at 16:5
+ |
+LL | fn iter_values_anon(&self) -> impl Iterator<Item=u32> + '_ {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: cannot infer an appropriate lifetime
+ --> $DIR/static-return-lifetime-infered.rs:21:16
+ |
+LL | fn iter_values<'a>(&'a self) -> impl Iterator<Item=u32> {
+ | ----------------------- this return type evaluates to the `'static` lifetime...
+LL | self.x.iter().map(|a| a.0)
+ | ------ ^^^^
+ | |
+ | ...but this borrow...
+ |
+note: ...can't outlive the lifetime 'a as defined on the method body at 20:5
+ --> $DIR/static-return-lifetime-infered.rs:20:5
+ |
+LL | fn iter_values<'a>(&'a self) -> impl Iterator<Item=u32> {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+help: you can add a constraint to the return type to make it last less than `'static` and match the lifetime 'a as defined on the method body at 20:5
+ |
+LL | fn iter_values<'a>(&'a self) -> impl Iterator<Item=u32> + 'a {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 2 previous errors
+
// tests that the anonymous_parameters lint is warn-by-default on the 2018 edition
// compile-pass
-// compile-flags: --edition=2018
+// edition:2018
// run-rustfix
trait Foo {
// tests that the anonymous_parameters lint is warn-by-default on the 2018 edition
// compile-pass
-// compile-flags: --edition=2018
+// edition:2018
// run-rustfix
trait Foo {
= note: where '_#1r: '_#0r
error: free region `ReFree(DefId(0/0:6 ~ propagate_approximated_shorter_to_static_no_bound[317d]::supply[0]), BrNamed(crate0:DefIndex(1:16), 'a))` does not outlive free region `ReStatic`
- --> $DIR/propagate-approximated-shorter-to-static-no-bound.rs:45:5
+ --> $DIR/propagate-approximated-shorter-to-static-no-bound.rs:45:47
|
-LL | / establish_relationships(&cell_a, &cell_b, |_outlives, x, y| {
+LL | establish_relationships(&cell_a, &cell_b, |_outlives, x, y| {
+ | _______________________________________________^
LL | | //~^ ERROR does not outlive free region
LL | |
LL | | // Only works if 'x: 'y:
LL | | demand_y(x, y, x.get()) //~ WARNING not reporting region error due to nll
LL | | });
- | |______^
+ | |_____^
note: No external requirements
--> $DIR/propagate-approximated-shorter-to-static-no-bound.rs:44:1
= note: where '_#1r: '_#0r
error: free region `ReFree(DefId(0/0:6 ~ propagate_approximated_shorter_to_static_wrong_bound[317d]::supply[0]), BrNamed(crate0:DefIndex(1:16), 'a))` does not outlive free region `ReStatic`
- --> $DIR/propagate-approximated-shorter-to-static-wrong-bound.rs:48:5
+ --> $DIR/propagate-approximated-shorter-to-static-wrong-bound.rs:48:47
|
-LL | / establish_relationships(&cell_a, &cell_b, |_outlives1, _outlives2, x, y| {
+LL | establish_relationships(&cell_a, &cell_b, |_outlives1, _outlives2, x, y| {
+ | _______________________________________________^
LL | | //~^ ERROR does not outlive free region
LL | | // Only works if 'x: 'y:
LL | | demand_y(x, y, x.get())
LL | | //~^ WARNING not reporting region error due to nll
LL | | });
- | |______^
+ | |_____^
note: No external requirements
--> $DIR/propagate-approximated-shorter-to-static-wrong-bound.rs:47:1
--- /dev/null
+// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+//
+// An additional regression test for the issue #50716 “NLL ignores lifetimes
+// bounds derived from `Sized` requirements” that checks that the fixed compiler
+// accepts this code fragment with both AST and MIR borrow checkers.
+//
+// revisions: ast mir
+//
+// compile-pass
+
+#![cfg_attr(mir, feature(nll))]
+
+struct Qey<Q: ?Sized>(Q);
+
+fn main() {}
--- /dev/null
+// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+//
+// Regression test for the issue #50716: NLL ignores lifetimes bounds
+// derived from `Sized` requirements
+
+#![feature(nll)]
+
+trait A {
+ type X: ?Sized;
+}
+
+fn foo<'a, T: 'static>(s: Box<<&'a T as A>::X>)
+where
+ for<'b> &'b T: A,
+ <&'static T as A>::X: Sized
+{
+ let _x = *s; //~ ERROR free region `'a` does not outlive free region `'static`
+}
+
+fn main() {}
--- /dev/null
+error: free region `'a` does not outlive free region `'static`
+ --> $DIR/issue-50716.rs:25:14
+ |
+LL | let _x = *s; //~ ERROR free region `'a` does not outlive free region `'static`
+ | ^^
+
+error: aborting due to previous error
+
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: --edition=2018
+// edition:2018
#![feature(arbitrary_self_types, async_await, await_macro, futures_api, pin)]
| - expected `()` because of default return type
...
LL | let u: &str = if true { s[..2] } else { s };
- | ^^^^^^ expected &str, found str
+ | ^^^^^^
+ | |
+ | expected &str, found str
+ | help: consider borrowing here: `&s[..2]`
|
= note: expected type `&str`
found type `str`
.extend(flags.split_whitespace().map(|s| s.to_owned()));
}
+ if let Some(edition) = config.parse_edition(ln) {
+ self.compile_flags.push(format!("--edition={}", edition));
+ }
+
if let Some(r) = config.parse_revisions(ln) {
self.revisions.extend(r);
}
self.compile_pass = config.parse_compile_pass(ln) || self.run_pass;
}
- if !self.skip_codegen {
- self.skip_codegen = config.parse_skip_codegen(ln);
- }
+ if !self.skip_codegen {
+ self.skip_codegen = config.parse_skip_codegen(ln);
+ }
if !self.disable_ui_testing_normalization {
self.disable_ui_testing_normalization =
fn parse_run_rustfix(&self, line: &str) -> bool {
self.parse_name_directive(line, "run-rustfix")
}
+
+ fn parse_edition(&self, line: &str) -> Option<String> {
+ self.parse_name_value_directive(line, "edition")
+ }
}
pub fn lldb_version_to_int(version_string: &str) -> isize {
.arg(out_dir)
.arg(&self.testpaths.file)
.args(&self.props.compile_flags);
+
if let Some(ref linker) = self.config.linker {
rustdoc
.arg("--linker")