//! heap allocation in Rust. Boxes provide ownership for this allocation, and
//! drop their contents when they go out of scope.
//!
-//! For non-zero-sized values, a [`Box`] will use the [`Global`] allocator for
-//! its allocation. It is valid to convert both ways between a [`Box`] and a
-//! raw pointer allocated with the [`Global`] allocator, given that the
-//! [`Layout`] used with the allocator is correct for the type. More precisely,
-//! a `value: *mut T` that has been allocated with the [`Global`] allocator
-//! with `Layout::for_value(&*value)` may be converted into a box using
-//! `Box::<T>::from_raw(value)`. Conversely, the memory backing a `value: *mut
-//! T` obtained from `Box::<T>::into_raw` may be deallocated using the
-//! [`Global`] allocator with `Layout::for_value(&*value)`.
-//!
//! # Examples
//!
//! Move a value from the stack to the heap by creating a [`Box`]:
//! for a `Cons`. By introducing a `Box`, which has a defined size, we know how
//! big `Cons` needs to be.
//!
+//! # Memory layout
+//!
+//! For non-zero-sized values, a [`Box`] will use the [`Global`] allocator for
+//! its allocation. It is valid to convert both ways between a [`Box`] and a
+//! raw pointer allocated with the [`Global`] allocator, given that the
+//! [`Layout`] used with the allocator is correct for the type. More precisely,
+//! a `value: *mut T` that has been allocated with the [`Global`] allocator
+//! with `Layout::for_value(&*value)` may be converted into a box using
+//! `Box::<T>::from_raw(value)`. Conversely, the memory backing a `value: *mut
+//! T` obtained from `Box::<T>::into_raw` may be deallocated using the
+//! [`Global`] allocator with `Layout::for_value(&*value)`.
+//!
+//!
//! [dereferencing]: ../../std/ops/trait.Deref.html
//! [`Box`]: struct.Box.html
//! [`Global`]: ../alloc/struct.Global.html
///
/// After calling this function, the raw pointer is owned by the
/// resulting `Box`. Specifically, the `Box` destructor will call
- /// the destructor of `T` and free the allocated memory. Since the
- /// way `Box` allocates and releases memory is unspecified, the
- /// only valid pointer to pass to this function is the one taken
- /// from another `Box` via the [`Box::into_raw`] function.
+ /// the destructor of `T` and free the allocated memory. For this
+ /// to be safe, the memory must have been allocated in accordance
+ /// with the [memory layout] used by `Box` .
+ ///
+ /// # Safety
///
/// This function is unsafe because improper use may lead to
/// memory problems. For example, a double-free may occur if the
/// function is called twice on the same raw pointer.
///
- /// [`Box::into_raw`]: struct.Box.html#method.into_raw
- ///
/// # Examples
- ///
+ /// Recreate a `Box` which was previously converted to a raw pointer
+ /// using [`Box::into_raw`]:
/// ```
/// let x = Box::new(5);
/// let ptr = Box::into_raw(x);
/// let x = unsafe { Box::from_raw(ptr) };
/// ```
+ /// Manually create a `Box` from scratch by using the global allocator:
+ /// ```
+ /// use std::alloc::{alloc, Layout};
+ ///
+ /// unsafe {
+ /// let ptr = alloc(Layout::new::<i32>()) as *mut i32;
+ /// *ptr = 5;
+ /// let x = Box::from_raw(ptr);
+ /// }
+ /// ```
+ ///
+ /// [memory layout]: index.html#memory-layout
+ /// [`Layout`]: ../alloc/struct.Layout.html
+ /// [`Box::into_raw`]: struct.Box.html#method.into_raw
#[stable(feature = "box_raw", since = "1.4.0")]
#[inline]
pub unsafe fn from_raw(raw: *mut T) -> Self {
///
/// After calling this function, the caller is responsible for the
/// memory previously managed by the `Box`. In particular, the
- /// caller should properly destroy `T` and release the memory. The
- /// proper way to do so is to convert the raw pointer back into a
- /// `Box` with the [`Box::from_raw`] function.
+ /// caller should properly destroy `T` and release the memory, taking
+ /// into account the [memory layout] used by `Box`. The easiest way to
+ /// do this is to convert the raw pointer back into a `Box` with the
+ /// [`Box::from_raw`] function, allowing the `Box` destructor to perform
+ /// the cleanup.
///
/// Note: this is an associated function, which means that you have
/// to call it as `Box::into_raw(b)` instead of `b.into_raw()`. This
/// is so that there is no conflict with a method on the inner type.
///
- /// [`Box::from_raw`]: struct.Box.html#method.from_raw
- ///
/// # Examples
- ///
+ /// Converting the raw pointer back into a `Box` with [`Box::from_raw`]
+ /// for automatic cleanup:
/// ```
- /// let x = Box::new(5);
+ /// let x = Box::new(String::from("Hello"));
/// let ptr = Box::into_raw(x);
+ /// let x = unsafe { Box::from_raw(ptr) };
+ /// ```
+ /// Manual cleanup by explicitly running the destructor and deallocating
+ /// the memory:
/// ```
+ /// use std::alloc::{dealloc, Layout};
+ /// use std::ptr;
+ ///
+ /// let x = Box::new(String::from("Hello"));
+ /// let p = Box::into_raw(x);
+ /// unsafe {
+ /// ptr::drop_in_place(p);
+ /// dealloc(p as *mut u8, Layout::new::<String>());
+ /// }
+ /// ```
+ ///
+ /// [memory layout]: index.html#memory-layout
+ /// [`Box::from_raw`]: struct.Box.html#method.from_raw
#[stable(feature = "box_raw", since = "1.4.0")]
#[inline]
pub fn into_raw(b: Box<T>) -> *mut T {
/// After calling this function, the caller is responsible for the
/// memory previously managed by the `Box`. In particular, the
/// caller should properly destroy `T` and release the memory. The
- /// proper way to do so is to convert the `NonNull<T>` pointer
+ /// easiest way to do so is to convert the `NonNull<T>` pointer
/// into a raw pointer and back into a `Box` with the [`Box::from_raw`]
/// function.
///
/// fn main() {
/// let x = Box::new(5);
/// let ptr = Box::into_raw_non_null(x);
+ ///
+ /// // Clean up the memory by converting the NonNull pointer back
+ /// // into a Box and letting the Box be dropped.
+ /// let x = unsafe { Box::from_raw(ptr.as_ptr()) };
/// }
/// ```
#[unstable(feature = "box_into_raw_non_null", issue = "47336")]
////////////////////////////////////////////////////////////////////////////////
// HACK(japaric) needed for the implementation of `vec!` macro during testing
-// NB see the hack module in this file for more details
+// N.B., see the `hack` module in this file for more details.
#[cfg(test)]
pub use hack::into_vec;
// HACK(japaric) needed for the implementation of `Vec::clone` during testing
-// NB see the hack module in this file for more details
+// N.B., see the `hack` module in this file for more details.
#[cfg(test)]
pub use hack::to_vec;
pub fn to_vec(&self) -> Vec<T>
where T: Clone
{
- // NB see hack module in this file
+ // N.B., see the `hack` module in this file for more details.
hack::to_vec(self)
}
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn into_vec(self: Box<Self>) -> Vec<T> {
- // NB see hack module in this file
+ // N.B., see the `hack` module in this file for more details.
hack::into_vec(self)
}
//! mutability:
//!
//! ```
+//! use std::cell::{RefCell, RefMut};
//! use std::collections::HashMap;
-//! use std::cell::RefCell;
//! use std::rc::Rc;
//!
//! fn main() {
//! let shared_map: Rc<RefCell<_>> = Rc::new(RefCell::new(HashMap::new()));
-//! shared_map.borrow_mut().insert("africa", 92388);
-//! shared_map.borrow_mut().insert("kyoto", 11837);
-//! shared_map.borrow_mut().insert("piccadilly", 11826);
-//! shared_map.borrow_mut().insert("marbles", 38);
+//! // Create a new block to limit the scope of the dynamic borrow
+//! {
+//! let mut map: RefMut<_> = shared_map.borrow_mut();
+//! map.insert("africa", 92388);
+//! map.insert("kyoto", 11837);
+//! map.insert("piccadilly", 11826);
+//! map.insert("marbles", 38);
+//! }
+//!
+//! // Note that if we had not let the previous borrow of the cache fall out
+//! // of scope then the subsequent borrow would cause a dynamic thread panic.
+//! // This is the major hazard of using `RefCell`.
+//! let total: i32 = shared_map.borrow().values().sum();
+//! println!("{}", total);
//! }
//! ```
//!
//!
//! impl Graph {
//! fn minimum_spanning_tree(&self) -> Vec<(i32, i32)> {
-//! // Create a new scope to contain the lifetime of the
-//! // dynamic borrow
-//! {
-//! // Take a reference to the inside of cache cell
-//! let mut cache = self.span_tree_cache.borrow_mut();
-//! if cache.is_some() {
-//! return cache.as_ref().unwrap().clone();
-//! }
-//!
-//! let span_tree = self.calc_span_tree();
-//! *cache = Some(span_tree);
-//! }
+//! self.span_tree_cache.borrow_mut()
+//! .get_or_insert_with(|| self.calc_span_tree())
+//! .clone()
+//! }
//!
-//! // Recursive call to return the just-cached value.
-//! // Note that if we had not let the previous borrow
-//! // of the cache fall out of scope then the subsequent
-//! // recursive borrow would cause a dynamic thread panic.
-//! // This is the major hazard of using `RefCell`.
-//! self.minimum_spanning_tree()
+//! fn calc_span_tree(&self) -> Vec<(i32, i32)> {
+//! // Expensive computation goes here
+//! vec![]
//! }
-//! # fn calc_span_tree(&self) -> Vec<(i32, i32)> { vec![] }
//! }
//! ```
//!
/// [`MaybeUninit<T>`]: union.MaybeUninit.html
/// [inv]: union.MaybeUninit.html#initialization-invariant
#[inline]
-#[rustc_deprecated(since = "1.40.0", reason = "use `mem::MaybeUninit` instead")]
+#[rustc_deprecated(since = "1.38.0", reason = "use `mem::MaybeUninit` instead")]
#[stable(feature = "rust1", since = "1.0.0")]
pub unsafe fn uninitialized<T>() -> T {
intrinsics::panic_if_uninhabited::<T>();
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
#[repr(transparent)]
#[rustc_layout_scalar_valid_range_start(1)]
+ #[cfg_attr(not(stage0), rustc_nonnull_optimization_guaranteed)]
pub struct $Ty($Int);
}
#[stable(feature = "nonnull", since = "1.25.0")]
#[repr(transparent)]
#[rustc_layout_scalar_valid_range_start(1)]
+#[cfg_attr(not(stage0), rustc_nonnull_optimization_guaranteed)]
pub struct NonNull<T: ?Sized> {
pointer: *const T,
}
use syntax::source_map::{respan, CompilerDesugaringKind, Spanned};
use syntax::source_map::CompilerDesugaringKind::IfTemporary;
use syntax::std_inject;
-use syntax::symbol::{keywords, Symbol, sym};
+use syntax::symbol::{kw, sym, Symbol};
use syntax::tokenstream::{TokenStream, TokenTree};
use syntax::parse::token::Token;
use syntax::visit::{self, Visitor};
hir::LifetimeParamKind::InBand,
),
ParamName::Fresh(_) => (
- keywords::UnderscoreLifetime.name().as_interned_str(),
+ kw::UnderscoreLifetime.as_interned_str(),
hir::LifetimeParamKind::Elided,
),
ParamName::Error => (
- keywords::UnderscoreLifetime.name().as_interned_str(),
+ kw::UnderscoreLifetime.as_interned_str(),
hir::LifetimeParamKind::Error,
),
};
P(hir::Path {
res,
segments: hir_vec![hir::PathSegment::from_ident(
- keywords::SelfUpper.ident()
+ Ident::with_empty_ctxt(kw::SelfUpper)
)],
span: t.span,
}),
trace!("registering existential type with id {:#?}", exist_ty_id);
let exist_ty_item = hir::Item {
hir_id: exist_ty_id,
- ident: keywords::Invalid.ident(),
+ ident: Ident::invalid(),
attrs: Default::default(),
node: exist_ty_item_kind,
vis: respan(span.shrink_to_lo(), hir::VisibilityKind::Inherited),
let (name, kind) = match name {
hir::LifetimeName::Underscore => (
- hir::ParamName::Plain(keywords::UnderscoreLifetime.ident()),
+ hir::ParamName::Plain(Ident::with_empty_ctxt(kw::UnderscoreLifetime)),
hir::LifetimeParamKind::Elided,
),
hir::LifetimeName::Param(param_name) => (
.iter()
.map(|arg| match arg.pat.node {
PatKind::Ident(_, ident, _) => ident,
- _ => Ident::new(keywords::Invalid.name(), arg.pat.span),
+ _ => Ident::new(kw::Invalid, arg.pat.span),
})
.collect()
}
fn lower_lifetime(&mut self, l: &Lifetime) -> hir::Lifetime {
let span = l.ident.span;
match l.ident {
- ident if ident.name == keywords::StaticLifetime.name() =>
+ ident if ident.name == kw::StaticLifetime =>
self.new_named_lifetime(l.id, span, hir::LifetimeName::Static),
- ident if ident.name == keywords::UnderscoreLifetime.name() =>
+ ident if ident.name == kw::UnderscoreLifetime =>
match self.anonymous_lifetime_mode {
AnonymousLifetimeMode::CreateParameter => {
let fresh_name = self.collect_fresh_in_band_lifetime(span);
// Don't expose `Self` (recovered "keyword used as ident" parse error).
// `rustc::ty` expects `Self` to be only used for a trait's `Self`.
// Instead, use `gensym("Self")` to create a distinct name that looks the same.
- let ident = if param.ident.name == keywords::SelfUpper.name() {
+ let ident = if param.ident.name == kw::SelfUpper {
param.ident.gensym()
} else {
param.ident
// Correctly resolve `self` imports.
if path.segments.len() > 1
- && path.segments.last().unwrap().ident.name == keywords::SelfLower.name()
+ && path.segments.last().unwrap().ident.name == kw::SelfLower
{
let _ = path.segments.pop();
if rename.is_none() {
use syntax::ast::*;
use syntax::ext::hygiene::Mark;
use syntax::visit;
-use syntax::symbol::keywords;
+use syntax::symbol::kw;
use syntax::symbol::Symbol;
use syntax::parse::token::{self, Token};
use syntax_pos::Span;
// information we encapsulate into, the better
let def_data = match i.node {
ItemKind::Impl(..) => DefPathData::Impl,
- ItemKind::Mod(..) if i.ident == keywords::Invalid.ident() => {
+ ItemKind::Mod(..) if i.ident.name == kw::Invalid => {
return visit::walk_item(self, i);
}
ItemKind::Mod(..) | ItemKind::Trait(..) | ItemKind::TraitAlias(..) |
pub fn ty_param_name(&self, id: HirId) -> Name {
match self.get_by_hir_id(id) {
Node::Item(&Item { node: ItemKind::Trait(..), .. }) |
- Node::Item(&Item { node: ItemKind::TraitAlias(..), .. }) => keywords::SelfUpper.name(),
+ Node::Item(&Item { node: ItemKind::TraitAlias(..), .. }) => kw::SelfUpper,
Node::GenericParam(param) => param.name.ident().name,
_ => bug!("ty_param_name: {} not a type parameter", self.hir_to_string(id)),
}
}
}
- /// Returns the name associated with the given NodeId's AST.
+ /// Returns the name associated with the given `NodeId`'s AST.
pub fn name(&self, id: NodeId) -> Name {
let hir_id = self.node_to_hir_id(id);
self.name_by_hir_id(hir_id)
use syntax::attr::{InlineAttr, OptimizeAttr};
use syntax::ext::hygiene::SyntaxContext;
use syntax::ptr::P;
-use syntax::symbol::{Symbol, keywords};
+use syntax::symbol::{Symbol, kw};
use syntax::tokenstream::TokenStream;
use syntax::util::parser::ExprPrecedence;
use crate::ty::AdtKind;
pub span: Span,
/// Either "`'a`", referring to a named lifetime definition,
- /// or "``" (i.e., `keywords::Invalid`), for elision placeholders.
+ /// or "``" (i.e., `kw::Invalid`), for elision placeholders.
///
/// HIR lowering inserts these placeholders in type paths that
/// refer to type definitions needing lifetime parameters,
pub fn ident(&self) -> Ident {
match *self {
ParamName::Plain(ident) => ident,
- ParamName::Error | ParamName::Fresh(_) => keywords::UnderscoreLifetime.ident(),
+ ParamName::Fresh(_) |
+ ParamName::Error => Ident::with_empty_ctxt(kw::UnderscoreLifetime),
}
}
impl LifetimeName {
pub fn ident(&self) -> Ident {
match *self {
- LifetimeName::Implicit => keywords::Invalid.ident(),
- LifetimeName::Error => keywords::Invalid.ident(),
- LifetimeName::Underscore => keywords::UnderscoreLifetime.ident(),
- LifetimeName::Static => keywords::StaticLifetime.ident(),
+ LifetimeName::Implicit | LifetimeName::Error => Ident::invalid(),
+ LifetimeName::Underscore => Ident::with_empty_ctxt(kw::UnderscoreLifetime),
+ LifetimeName::Static => Ident::with_empty_ctxt(kw::StaticLifetime),
LifetimeName::Param(param_name) => param_name.ident(),
}
}
impl Path {
pub fn is_global(&self) -> bool {
- !self.segments.is_empty() && self.segments[0].ident.name == keywords::PathRoot.name()
+ !self.segments.is_empty() && self.segments[0].ident.name == kw::PathRoot
}
}
ListStem,
}
-/// TraitRef's appear in impls.
+/// References to traits in impls.
///
-/// resolve maps each TraitRef's ref_id to its defining trait; that's all
-/// that the ref_id is for. Note that ref_id's value is not the NodeId of the
-/// trait being referred to but just a unique NodeId that serves as a key
+/// `resolve` maps each `TraitRef`'s `ref_id` to its defining trait; that's all
+/// that the `ref_id` is for. Note that `ref_id`'s value is not the `NodeId` of the
+/// trait being referred to but just a unique `NodeId` that serves as a key
/// within the resolution map.
#[derive(Clone, RustcEncodable, RustcDecodable, Debug, HashStable)]
pub struct TraitRef {
use syntax::print::pp::Breaks::{Consistent, Inconsistent};
use syntax::print::pprust::{self, PrintState};
use syntax::ptr::P;
-use syntax::symbol::keywords;
+use syntax::symbol::kw;
use syntax::util::parser::{self, AssocOp, Fixity};
use syntax_pos::{self, BytePos, FileName};
hir::VisibilityKind::Restricted { ref path, .. } => {
self.s.word("pub(")?;
if path.segments.len() == 1 &&
- path.segments[0].ident.name == keywords::Super.name() {
+ path.segments[0].ident.name == kw::Super {
// Special case: `super` can print like `pub(super)`.
self.s.word("super")?;
} else {
if i > 0 {
self.s.word("::")?
}
- if segment.ident.name != keywords::PathRoot.name() {
+ if segment.ident.name != kw::PathRoot {
self.print_ident(segment.ident)?;
segment.with_generic_args(|generic_args| {
self.print_generic_args(generic_args, segment.infer_types,
}
pub fn print_path_segment(&mut self, segment: &hir::PathSegment) -> io::Result<()> {
- if segment.ident.name != keywords::PathRoot.name() {
+ if segment.ident.name != kw::PathRoot {
self.print_ident(segment.ident)?;
segment.with_generic_args(|generic_args| {
self.print_generic_args(generic_args, segment.infer_types, false)
if i > 0 {
self.s.word("::")?
}
- if segment.ident.name != keywords::PathRoot.name() {
+ if segment.ident.name != kw::PathRoot {
self.print_ident(segment.ident)?;
segment.with_generic_args(|generic_args| {
self.print_generic_args(generic_args,
use rustc_target::spec::abi::Abi::RustIntrinsic;
use rustc_data_structures::indexed_vec::Idx;
-use syntax_pos::Span;
+use syntax_pos::{Span, sym};
use crate::hir::intravisit::{self, Visitor, NestedVisitorMap};
use crate::hir;
impl<'a, 'tcx> ExprVisitor<'a, 'tcx> {
fn def_id_is_transmute(&self, def_id: DefId) -> bool {
self.tcx.fn_sig(def_id).abi() == RustIntrinsic &&
- self.tcx.item_name(def_id) == "transmute"
+ self.tcx.item_name(def_id) == sym::transmute
}
fn check_transmute(&self, span: Span, from: Ty<'tcx>, to: Ty<'tcx>) {
use std::rc::Rc;
use syntax::ast::{self, NodeId};
use syntax::ptr::P;
-use syntax::symbol::{keywords, sym};
+use syntax::symbol::{kw, sym};
use syntax_pos::Span;
use crate::hir;
let sp = ident.span;
let var = self.variable(hir_id, sp);
// Ignore unused self.
- if ident.name != keywords::SelfLower.name() {
+ if ident.name != kw::SelfLower {
if !self.warn_about_unused(vec![sp], hir_id, entry_ln, var) {
if self.live_on_entry(entry_ln, var).is_none() {
self.report_dead_assign(hir_id, sp, var, true);
use syntax::ast;
use syntax::attr;
use syntax::ptr::P;
-use syntax::symbol::{keywords, sym};
+use syntax::symbol::{kw, sym};
use syntax_pos::Span;
use crate::hir::intravisit::{self, NestedVisitorMap, Visitor};
GenericParamKind::Lifetime { .. } => {
let (name, reg) = Region::early(&self.tcx.hir(), &mut index, ¶m);
if let hir::ParamName::Plain(param_name) = name {
- if param_name.name == keywords::UnderscoreLifetime.name() {
+ if param_name.name == kw::UnderscoreLifetime {
// Pick the elided lifetime "definition" if one exists
// and use it to make an elision scope.
elision = Some(reg);
} {
debug!("id = {:?} span = {:?} name = {:?}", id, span, name);
- if name == keywords::UnderscoreLifetime.ident() {
+ if name.name == kw::UnderscoreLifetime {
continue;
}
for (i, (lifetime_i, lifetime_i_name)) in lifetimes.iter().enumerate() {
if let hir::ParamName::Plain(_) = lifetime_i_name {
let name = lifetime_i_name.ident().name;
- if name == keywords::UnderscoreLifetime.name()
- || name == keywords::StaticLifetime.name()
+ if name == kw::UnderscoreLifetime
+ || name == kw::StaticLifetime
{
let mut err = struct_span_err!(
self.tcx.sess,
query predicates_defined_on(_: DefId)
-> Lrc<ty::GenericPredicates<'tcx>> {}
- /// Returns the predicates written explicit by the user.
+ /// Returns the predicates written explicitly by the user.
query explicit_predicates_of(_: DefId)
-> Lrc<ty::GenericPredicates<'tcx>> {}
_: DefId
) -> Result<DtorckConstraint<'tcx>, NoSolution> {}
- /// True if this is a const fn, use the `is_const_fn` to know whether your crate actually
- /// sees it as const fn (e.g., the const-fn-ness might be unstable and you might not have
- /// the feature gate active)
+ /// Returns `true` if this is a const fn, use the `is_const_fn` to know whether your crate
+ /// actually sees it as const fn (e.g., the const-fn-ness might be unstable and you might
+ /// not have the feature gate active).
///
/// **Do not call this function manually.** It is only meant to cache the base data for the
/// `is_const_fn` function.
desc { |tcx| "checking if item is const fn: `{}`", tcx.def_path_str(key) }
}
- /// Returns true if calls to the function may be promoted
+ /// Returns `true` if calls to the function may be promoted.
///
/// This is either because the function is e.g., a tuple-struct or tuple-variant
/// constructor, or because it has the `#[rustc_promotable]` attribute. The attribute should
query const_fn_is_allowed_fn_ptr(_: DefId) -> bool {}
- /// True if this is a foreign item (i.e., linked via `extern { ... }`).
+ /// Returns `true` if this is a foreign item (i.e., linked via `extern { ... }`).
query is_foreign_item(_: DefId) -> bool {}
/// Returns `Some(mutability)` if the node pointed to by `def_id` is a static item.
query static_mutability(_: DefId) -> Option<hir::Mutability> {}
- /// Get a map with the variance of every item; use `item_variance`
- /// instead.
+ /// Gets a map with the variance of every item; use `item_variance` instead.
query crate_variances(_: CrateNum) -> Lrc<ty::CrateVariancesMap<'tcx>> {
desc { "computing the variances for items in this crate" }
}
- /// Maps from def-id of a type or region parameter to its
- /// (inferred) variance.
+ /// Maps from the `DefId` of a type or region parameter to its (inferred) variance.
query variances_of(_: DefId) -> &'tcx [ty::Variance] {}
}
TypeChecking {
- /// Maps from def-id of a type to its (inferred) outlives.
+ /// Maps from thee `DefId` of a type to its (inferred) outlives.
query inferred_outlives_crate(_: CrateNum)
-> Lrc<ty::CratePredicatesMap<'tcx>> {
desc { "computing the inferred outlives predicates for items in this crate" }
}
Other {
- /// Maps from an impl/trait def-id to a list of the def-ids of its items
+ /// Maps from an impl/trait `DefId to a list of the `DefId`s of its items.
query associated_item_def_ids(_: DefId) -> Lrc<Vec<DefId>> {}
- /// Maps from a trait item to the trait item "descriptor"
+ /// Maps from a trait item to the trait item "descriptor".
query associated_item(_: DefId) -> ty::AssociatedItem {}
query impl_trait_ref(_: DefId) -> Option<ty::TraitRef<'tcx>> {}
}
TypeChecking {
- /// Maps a DefId of a type to a list of its inherent impls.
+ /// Maps a `DefId` of a type to a list of its inherent impls.
/// Contains implementations of methods that are inherent to a type.
/// Methods in these implementations don't need to be exported.
query inherent_impls(_: DefId) -> Lrc<Vec<DefId>> {
desc { |tcx| "linting {}", key.describe_as_module(tcx) }
}
- /// Checks the attributes in the module
+ /// Checks the attributes in the module.
query check_mod_attrs(key: DefId) -> () {
desc { |tcx| "checking attributes in {}", key.describe_as_module(tcx) }
}
desc { |tcx| "checking for unstable API usage in {}", key.describe_as_module(tcx) }
}
- /// Checks the loops in the module
+ /// Checks the loops in the module.
query check_mod_loops(key: DefId) -> () {
desc { |tcx| "checking loops in {}", key.describe_as_module(tcx) }
}
desc { |tcx| "collecting item types in {}", key.describe_as_module(tcx) }
}
- /// Caches CoerceUnsized kinds for impls on custom types.
+ /// Caches `CoerceUnsized` kinds for impls on custom types.
query coerce_unsized_info(_: DefId)
-> ty::adjustment::CoerceUnsizedInfo {}
}
BorrowChecking {
query borrowck(_: DefId) -> Lrc<BorrowCheckResult> {}
- /// Borrow checks the function body. If this is a closure, returns
+ /// Borrow-checks the function body. If this is a closure, returns
/// additional requirements that the closure's creator must verify.
query mir_borrowck(_: DefId) -> mir::BorrowCheckResult<'tcx> {}
}
}
Other {
- /// Evaluate a constant without running sanity checks
+ /// Evaluates a constant without running sanity checks.
///
/// **Do not use this** outside const eval. Const eval uses this to break query cycles
/// during validation. Please add a comment to every use site explaining why using
- /// `const_eval` isn't sufficient
+ /// `const_eval` isn't sufficient.
query const_eval_raw(key: ty::ParamEnvAnd<'tcx, GlobalId<'tcx>>)
-> ConstEvalRawResult<'tcx> {
no_force
}
Linking {
- // The DefIds of all non-generic functions and statics in the given crate
+ // The `DefId`s of all non-generic functions and statics in the given crate
// that can be reached from outside the crate.
//
// We expect this items to be available for being linked to.
//
- // This query can also be called for LOCAL_CRATE. In this case it will
+ // This query can also be called for `LOCAL_CRATE`. In this case it will
// compute which items will be reachable to other crates, taking into account
// the kind of crate that is currently compiled. Crates with only a
// C interface have fewer reachable things.
// Convert strings provided as --cfg [cfgspec] into a crate_cfg
pub fn parse_cfgspecs(cfgspecs: Vec<String>) -> FxHashSet<(String, Option<String>)> {
- syntax::with_globals(move || {
+ syntax::with_default_globals(move || {
let cfg = cfgspecs.into_iter().map(|s| {
let sess = parse::ParseSess::new(FilePathMapping::empty());
let filename = FileName::cfg_spec_source_code(&s);
// When the user supplies --test we should implicitly supply --cfg test
#[test]
fn test_switch_implies_cfg_test() {
- syntax::with_globals(|| {
+ syntax::with_default_globals(|| {
let matches = &match optgroups().parse(&["--test".to_string()]) {
Ok(m) => m,
Err(f) => panic!("test_switch_implies_cfg_test: {}", f),
#[test]
fn test_switch_implies_cfg_test_unless_cfg_test() {
use syntax::symbol::sym;
- syntax::with_globals(|| {
+ syntax::with_default_globals(|| {
let matches = &match optgroups().parse(&["--test".to_string(),
"--cfg=test".to_string()]) {
Ok(m) => m,
#[test]
fn test_can_print_warnings() {
- syntax::with_globals(|| {
+ syntax::with_default_globals(|| {
let matches = optgroups().parse(&["-Awarnings".to_string()]).unwrap();
let registry = errors::registry::Registry::new(&[]);
let (sessopts, _) = build_session_options_and_crate_config(&matches);
assert!(!sess.diagnostic().flags.can_emit_warnings);
});
- syntax::with_globals(|| {
+ syntax::with_default_globals(|| {
let matches = optgroups()
.parse(&["-Awarnings".to_string(), "-Dwarnings".to_string()])
.unwrap();
assert!(sess.diagnostic().flags.can_emit_warnings);
});
- syntax::with_globals(|| {
+ syntax::with_default_globals(|| {
let matches = optgroups().parse(&["-Adead_code".to_string()]).unwrap();
let registry = errors::registry::Registry::new(&[]);
let (sessopts, _) = build_session_options_and_crate_config(&matches);
}
}
- // We can only call poly_project_and_unify_type when our predicate's
- // Ty contains an inference variable - otherwise, there won't be anything to
- // unify
- if p.ty().skip_binder().has_infer_types() {
- debug!("Projecting and unifying projection predicate {:?}",
- predicate);
- match poly_project_and_unify_type(select, &obligation.with(p)) {
- Err(e) => {
- debug!(
- "evaluate_nested_obligations: Unable to unify predicate \
- '{:?}' '{:?}', bailing out",
- ty, e
- );
- return false;
- }
- Ok(Some(v)) => {
+ // There are three possible cases when we project a predicate:
+ //
+ // 1. We encounter an error. This means that it's impossible for
+ // our current type to implement the auto trait - there's bound
+ // that we could add to our ParamEnv that would 'fix' this kind
+ // of error, as it's not caused by an unimplemented type.
+ //
+ // 2. We succesfully project the predicate (Ok(Some(_))), generating
+ // some subobligations. We then process these subobligations
+ // like any other generated sub-obligations.
+ //
+ // 3. We receieve an 'ambiguous' result (Ok(None))
+ // If we were actually trying to compile a crate,
+ // we would need to re-process this obligation later.
+ // However, all we care about is finding out what bounds
+ // are needed for our type to implement a particular auto trait.
+ // We've already added this obligation to our computed ParamEnv
+ // above (if it was necessary). Therefore, we don't need
+ // to do any further processing of the obligation.
+ //
+ // Note that we *must* try to project *all* projection predicates
+ // we encounter, even ones without inference variable.
+ // This ensures that we detect any projection errors,
+ // which indicate that our type can *never* implement the given
+ // auto trait. In that case, we will generate an explicit negative
+ // impl (e.g. 'impl !Send for MyType'). However, we don't
+ // try to process any of the generated subobligations -
+ // they contain no new information, since we already know
+ // that our type implements the projected-through trait,
+ // and can lead to weird region issues.
+ //
+ // Normally, we'll generate a negative impl as a result of encountering
+ // a type with an explicit negative impl of an auto trait
+ // (for example, raw pointers have !Send and !Sync impls)
+ // However, through some **interesting** manipulations of the type
+ // system, it's actually possible to write a type that never
+ // implements an auto trait due to a projection error, not a normal
+ // negative impl error. To properly handle this case, we need
+ // to ensure that we catch any potential projection errors,
+ // and turn them into an explicit negative impl for our type.
+ debug!("Projecting and unifying projection predicate {:?}",
+ predicate);
+
+ match poly_project_and_unify_type(select, &obligation.with(p)) {
+ Err(e) => {
+ debug!(
+ "evaluate_nested_obligations: Unable to unify predicate \
+ '{:?}' '{:?}', bailing out",
+ ty, e
+ );
+ return false;
+ }
+ Ok(Some(v)) => {
+ // We only care about sub-obligations
+ // when we started out trying to unify
+ // some inference variables. See the comment above
+ // for more infomration
+ if p.ty().skip_binder().has_infer_types() {
if !self.evaluate_nested_obligations(
ty,
v.clone().iter().cloned(),
return false;
}
}
- Ok(None) => {
+ }
+ Ok(None) => {
+ // It's ok not to make progress when hvave no inference variables -
+ // in that case, we were only performing unifcation to check if an
+ // error occured (which would indicate that it's impossible for our
+ // type to implement the auto trait).
+ // However, we should always make progress (either by generating
+ // subobligations or getting an error) when we started off with
+ // inference variables
+ if p.ty().skip_binder().has_infer_types() {
panic!("Unexpected result when selecting {:?} {:?}", ty, obligation)
}
}
pub use self::specialize::specialization_graph::FutureCompatOverlapErrorKind;
pub use self::engine::{TraitEngine, TraitEngineExt};
pub use self::util::{elaborate_predicates, elaborate_trait_ref, elaborate_trait_refs};
-pub use self::util::{supertraits, supertrait_def_ids, transitive_bounds,
- Supertraits, SupertraitDefIds};
+pub use self::util::{
+ supertraits, supertrait_def_ids, transitive_bounds, Supertraits, SupertraitDefIds,
+};
+pub use self::util::{expand_trait_aliases, TraitAliasExpander};
pub use self::chalk_fulfill::{
CanonicalGoal as ChalkCanonicalGoal,
)
}
-impl<'tcx,O> Obligation<'tcx,O> {
+impl<'tcx, O> Obligation<'tcx, O> {
pub fn new(cause: ObligationCause<'tcx>,
param_env: ty::ParamEnv<'tcx>,
predicate: O)
// `{Self}` is allowed
Position::ArgumentNamed(s) if s == "Self" => (),
// `{ThisTraitsName}` is allowed
- Position::ArgumentNamed(s) if s == name => (),
+ Position::ArgumentNamed(s) if s == name.as_str() => (),
// `{from_method}` is allowed
Position::ArgumentNamed(s) if s == "from_method" => (),
// `{from_desugaring}` is allowed
Position::ArgumentNamed(s) if s == "from_desugaring" => (),
// So is `{A}` if A is a type parameter
- Position::ArgumentNamed(s) => match generics.params.iter().find(|param|
- param.name == s
- ) {
+ Position::ArgumentNamed(s) => match generics.params.iter().find(|param| {
+ param.name.as_str() == s
+ }) {
Some(_) => (),
None => {
span_err!(tcx.sess, span, E0230,
Piece::NextArgument(a) => match a.position {
Position::ArgumentNamed(s) => match generic_map.get(s) {
Some(val) => val,
- None if s == name => {
+ None if s == name.as_str() => {
&trait_str
}
None => {
bounds
);
- let matching_bound = util::elaborate_predicates(self.tcx(), bounds.predicates)
+ let elaborated_predicates = util::elaborate_predicates(self.tcx(), bounds.predicates);
+ let matching_bound = elaborated_predicates
.filter_to_traits()
.find(|bound| {
self.infcx.probe(|_| {
+use errors::DiagnosticBuilder;
+use smallvec::SmallVec;
+use syntax_pos::Span;
+
use crate::hir;
use crate::hir::def_id::DefId;
use crate::traits::specialize::specialization_graph::NodeItem;
}
}
-
-struct PredicateSet<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
+struct PredicateSet<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> {
tcx: TyCtxt<'a, 'gcx, 'tcx>,
set: FxHashSet<ty::Predicate<'tcx>>,
}
impl<'a, 'gcx, 'tcx> PredicateSet<'a, 'gcx, 'tcx> {
- fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>) -> PredicateSet<'a, 'gcx, 'tcx> {
- PredicateSet { tcx: tcx, set: Default::default() }
+ fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Self {
+ Self { tcx: tcx, set: Default::default() }
}
fn insert(&mut self, pred: &ty::Predicate<'tcx>) -> bool {
}
}
+impl<'a, 'gcx, 'tcx, T: AsRef<ty::Predicate<'tcx>>> Extend<T> for PredicateSet<'a, 'gcx, 'tcx> {
+ fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
+ for pred in iter {
+ self.insert(pred.as_ref());
+ }
+ }
+}
+
///////////////////////////////////////////////////////////////////////////
// `Elaboration` iterator
///////////////////////////////////////////////////////////////////////////
/// "Elaboration" is the process of identifying all the predicates that
/// are implied by a source predicate. Currently this basically means
-/// walking the "supertraits" and other similar assumptions. For
-/// example, if we know that `T : Ord`, the elaborator would deduce
-/// that `T : PartialOrd` holds as well. Similarly, if we have `trait
-/// Foo : 'static`, and we know that `T : Foo`, then we know that `T :
-/// 'static`.
-pub struct Elaborator<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
+/// walking the "supertraits" and other similar assumptions. For example,
+/// if we know that `T: Ord`, the elaborator would deduce that `T: PartialOrd`
+/// holds as well. Similarly, if we have `trait Foo: 'static`, and we know that
+/// `T: Foo`, then we know that `T: 'static`.
+pub struct Elaborator<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> {
stack: Vec<ty::Predicate<'tcx>>,
visited: PredicateSet<'a, 'gcx, 'tcx>,
}
trait_refs: impl Iterator<Item = ty::PolyTraitRef<'tcx>>)
-> Elaborator<'cx, 'gcx, 'tcx>
{
- let predicates = trait_refs.map(|trait_ref| trait_ref.to_predicate())
- .collect();
+ let predicates = trait_refs.map(|trait_ref| trait_ref.to_predicate()).collect();
elaborate_predicates(tcx, predicates)
}
{
let mut visited = PredicateSet::new(tcx);
predicates.retain(|pred| visited.insert(pred));
- Elaborator { stack: predicates, visited: visited }
+ Elaborator { stack: predicates, visited }
}
impl<'cx, 'gcx, 'tcx> Elaborator<'cx, 'gcx, 'tcx> {
FilterToTraits::new(self)
}
- fn push(&mut self, predicate: &ty::Predicate<'tcx>) {
+ fn elaborate(&mut self, predicate: &ty::Predicate<'tcx>) {
let tcx = self.visited.tcx;
match *predicate {
ty::Predicate::Trait(ref data) => {
- // Predicates declared on the trait.
+ // Get predicates declared on the trait.
let predicates = tcx.super_predicates_of(data.def_id());
- let mut predicates: Vec<_> =
- predicates.predicates
- .iter()
- .map(|(p, _)| p.subst_supertrait(tcx, &data.to_poly_trait_ref()))
- .collect();
-
+ let predicates = predicates.predicates
+ .iter()
+ .map(|(pred, _)| pred.subst_supertrait(tcx, &data.to_poly_trait_ref()));
debug!("super_predicates: data={:?} predicates={:?}",
- data, predicates);
+ data, predicates.clone());
- // Only keep those bounds that we haven't already
- // seen. This is necessary to prevent infinite
- // recursion in some cases. One common case is when
- // people define `trait Sized: Sized { }` rather than `trait
- // Sized { }`.
- predicates.retain(|r| self.visited.insert(r));
+ // Only keep those bounds that we haven't already seen.
+ // This is necessary to prevent infinite recursion in some
+ // cases. One common case is when people define
+ // `trait Sized: Sized { }` rather than `trait Sized { }`.
+ let visited = &mut self.visited;
+ let predicates = predicates.filter(|pred| visited.insert(pred));
self.stack.extend(predicates);
}
// predicates.
}
ty::Predicate::Subtype(..) => {
- // Currently, we do not "elaborate" predicates like `X
- // <: Y`, though conceivably we might.
+ // Currently, we do not "elaborate" predicates like `X <: Y`,
+ // though conceivably we might.
}
ty::Predicate::Projection(..) => {
// Nothing to elaborate in a projection predicate.
// Currently, we do not elaborate const-evaluatable
// predicates.
}
-
ty::Predicate::RegionOutlives(..) => {
// Nothing to elaborate from `'a: 'b`.
}
-
ty::Predicate::TypeOutlives(ref data) => {
// We know that `T: 'a` for some type `T`. We can
// often elaborate this. For example, if we know that
tcx.push_outlives_components(ty_max, &mut components);
self.stack.extend(
components
- .into_iter()
- .filter_map(|component| match component {
- Component::Region(r) => if r.is_late_bound() {
- None
- } else {
- Some(ty::Predicate::RegionOutlives(
- ty::Binder::dummy(ty::OutlivesPredicate(r, r_min))))
- },
-
- Component::Param(p) => {
- let ty = tcx.mk_ty_param(p.index, p.name);
- Some(ty::Predicate::TypeOutlives(
- ty::Binder::dummy(ty::OutlivesPredicate(ty, r_min))))
- },
-
- Component::UnresolvedInferenceVariable(_) => {
- None
- },
-
- Component::Projection(_) |
- Component::EscapingProjection(_) => {
- // We can probably do more here. This
- // corresponds to a case like `<T as
- // Foo<'a>>::U: 'b`.
- None
- },
- })
- .filter(|p| visited.insert(p)));
+ .into_iter()
+ .filter_map(|component| match component {
+ Component::Region(r) => if r.is_late_bound() {
+ None
+ } else {
+ Some(ty::Predicate::RegionOutlives(
+ ty::Binder::dummy(ty::OutlivesPredicate(r, r_min))))
+ }
+
+ Component::Param(p) => {
+ let ty = tcx.mk_ty_param(p.index, p.name);
+ Some(ty::Predicate::TypeOutlives(
+ ty::Binder::dummy(ty::OutlivesPredicate(ty, r_min))))
+ }
+
+ Component::UnresolvedInferenceVariable(_) => {
+ None
+ }
+
+ Component::Projection(_) |
+ Component::EscapingProjection(_) => {
+ // We can probably do more here. This
+ // corresponds to a case like `<T as
+ // Foo<'a>>::U: 'b`.
+ None
+ }
+ })
+ .filter(|p| visited.insert(p))
+ );
}
}
}
fn next(&mut self) -> Option<ty::Predicate<'tcx>> {
// Extract next item from top-most stack frame, if any.
- let next_predicate = match self.stack.pop() {
- Some(predicate) => predicate,
- None => {
- // No more stack frames. Done.
- return None;
- }
- };
- self.push(&next_predicate);
- return Some(next_predicate);
+ if let Some(pred) = self.stack.pop() {
+ self.elaborate(&pred);
+ Some(pred)
+ } else {
+ None
+ }
}
}
pub fn supertraits<'cx, 'gcx, 'tcx>(tcx: TyCtxt<'cx, 'gcx, 'tcx>,
trait_ref: ty::PolyTraitRef<'tcx>)
- -> Supertraits<'cx, 'gcx, 'tcx>
-{
+ -> Supertraits<'cx, 'gcx, 'tcx> {
elaborate_trait_ref(tcx, trait_ref).filter_to_traits()
}
pub fn transitive_bounds<'cx, 'gcx, 'tcx>(tcx: TyCtxt<'cx, 'gcx, 'tcx>,
bounds: impl Iterator<Item = ty::PolyTraitRef<'tcx>>)
- -> Supertraits<'cx, 'gcx, 'tcx>
-{
+ -> Supertraits<'cx, 'gcx, 'tcx> {
elaborate_trait_refs(tcx, bounds).filter_to_traits()
}
///////////////////////////////////////////////////////////////////////////
-// Iterator over def-ids of supertraits
+// `TraitAliasExpander` iterator
+///////////////////////////////////////////////////////////////////////////
+
+/// "Trait alias expansion" is the process of expanding a sequence of trait
+/// references into another sequence by transitively following all trait
+/// aliases. e.g. If you have bounds like `Foo + Send`, a trait alias
+/// `trait Foo = Bar + Sync;`, and another trait alias
+/// `trait Bar = Read + Write`, then the bounds would expand to
+/// `Read + Write + Sync + Send`.
+/// Expansion is done via a DFS (depth-first search), and the `visited` field
+/// is used to avoid cycles.
+pub struct TraitAliasExpander<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> {
+ tcx: TyCtxt<'a, 'gcx, 'tcx>,
+ stack: Vec<TraitAliasExpansionInfo<'tcx>>,
+}
+
+/// Stores information about the expansion of a trait via a path of zero or more trait aliases.
+#[derive(Debug, Clone)]
+pub struct TraitAliasExpansionInfo<'tcx> {
+ pub path: SmallVec<[(ty::PolyTraitRef<'tcx>, Span); 4]>,
+}
+
+impl<'tcx> TraitAliasExpansionInfo<'tcx> {
+ fn new(trait_ref: ty::PolyTraitRef<'tcx>, span: Span) -> Self {
+ Self {
+ path: smallvec![(trait_ref, span)]
+ }
+ }
+
+ /// Adds diagnostic labels to `diag` for the expansion path of a trait through all intermediate
+ /// trait aliases.
+ pub fn label_with_exp_info(&self,
+ diag: &mut DiagnosticBuilder<'_>,
+ top_label: &str,
+ use_desc: &str
+ ) {
+ diag.span_label(self.top().1, top_label);
+ if self.path.len() > 1 {
+ for (_, sp) in self.path.iter().rev().skip(1).take(self.path.len() - 2) {
+ diag.span_label(*sp, format!("referenced here ({})", use_desc));
+ }
+ }
+ diag.span_label(self.bottom().1,
+ format!("trait alias used in trait object type ({})", use_desc));
+ }
+
+ pub fn trait_ref(&self) -> &ty::PolyTraitRef<'tcx> {
+ &self.top().0
+ }
+
+ pub fn top(&self) -> &(ty::PolyTraitRef<'tcx>, Span) {
+ self.path.last().unwrap()
+ }
+
+ pub fn bottom(&self) -> &(ty::PolyTraitRef<'tcx>, Span) {
+ self.path.first().unwrap()
+ }
+
+ fn clone_and_push(&self, trait_ref: ty::PolyTraitRef<'tcx>, span: Span) -> Self {
+ let mut path = self.path.clone();
+ path.push((trait_ref, span));
+
+ Self {
+ path
+ }
+ }
+}
+
+pub fn expand_trait_aliases<'cx, 'gcx, 'tcx>(
+ tcx: TyCtxt<'cx, 'gcx, 'tcx>,
+ trait_refs: impl IntoIterator<Item = (ty::PolyTraitRef<'tcx>, Span)>
+) -> TraitAliasExpander<'cx, 'gcx, 'tcx> {
+ let items: Vec<_> = trait_refs
+ .into_iter()
+ .map(|(trait_ref, span)| TraitAliasExpansionInfo::new(trait_ref, span))
+ .collect();
+ TraitAliasExpander { tcx, stack: items }
+}
+
+impl<'cx, 'gcx, 'tcx> TraitAliasExpander<'cx, 'gcx, 'tcx> {
+ /// If `item` is a trait alias and its predicate has not yet been visited, then expands `item`
+ /// to the definition, pushes the resulting expansion onto `self.stack`, and returns `false`.
+ /// Otherwise, immediately returns `true` if `item` is a regular trait, or `false` if it is a
+ /// trait alias.
+ /// The return value indicates whether `item` should be yielded to the user.
+ fn expand(&mut self, item: &TraitAliasExpansionInfo<'tcx>) -> bool {
+ let tcx = self.tcx;
+ let trait_ref = item.trait_ref();
+ let pred = trait_ref.to_predicate();
+
+ debug!("expand_trait_aliases: trait_ref={:?}", trait_ref);
+
+ // Don't recurse if this bound is not a trait alias.
+ let is_alias = tcx.is_trait_alias(trait_ref.def_id());
+ if !is_alias {
+ return true;
+ }
+
+ // Don't recurse if this trait alias is already on the stack for the DFS search.
+ let anon_pred = anonymize_predicate(tcx, &pred);
+ if item.path.iter().rev().skip(1)
+ .any(|(tr, _)| anonymize_predicate(tcx, &tr.to_predicate()) == anon_pred) {
+ return false;
+ }
+
+ // Get components of trait alias.
+ let predicates = tcx.super_predicates_of(trait_ref.def_id());
+
+ let items = predicates.predicates
+ .iter()
+ .rev()
+ .filter_map(|(pred, span)| {
+ pred.subst_supertrait(tcx, &trait_ref)
+ .to_opt_poly_trait_ref()
+ .map(|trait_ref| item.clone_and_push(trait_ref, *span))
+ });
+ debug!("expand_trait_aliases: items={:?}", items.clone());
+
+ self.stack.extend(items);
+
+ false
+ }
+}
+
+impl<'cx, 'gcx, 'tcx> Iterator for TraitAliasExpander<'cx, 'gcx, 'tcx> {
+ type Item = TraitAliasExpansionInfo<'tcx>;
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (self.stack.len(), None)
+ }
+
+ fn next(&mut self) -> Option<TraitAliasExpansionInfo<'tcx>> {
+ while let Some(item) = self.stack.pop() {
+ if self.expand(&item) {
+ return Some(item);
+ }
+ }
+ None
+ }
+}
+
+///////////////////////////////////////////////////////////////////////////
+// Iterator over def-IDs of supertraits
+///////////////////////////////////////////////////////////////////////////
pub struct SupertraitDefIds<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
tcx: TyCtxt<'a, 'gcx, 'tcx>,
self.stack.extend(
predicates.predicates
.iter()
- .filter_map(|(p, _)| p.to_opt_poly_trait_ref())
- .map(|t| t.def_id())
+ .filter_map(|(pred, _)| pred.to_opt_poly_trait_ref())
+ .map(|trait_ref| trait_ref.def_id())
.filter(|&super_def_id| visited.insert(super_def_id)));
Some(def_id)
}
type Item = ty::PolyTraitRef<'tcx>;
fn next(&mut self) -> Option<ty::PolyTraitRef<'tcx>> {
- loop {
- match self.base_iterator.next() {
- None => {
- return None;
- }
- Some(ty::Predicate::Trait(data)) => {
- return Some(data.to_poly_trait_ref());
- }
- Some(_) => {}
+ while let Some(pred) = self.base_iterator.next() {
+ if let ty::Predicate::Trait(data) = pred {
+ return Some(data.to_poly_trait_ref());
}
}
+ None
}
fn size_hint(&self) -> (usize, Option<usize>) {
use syntax::attr;
use syntax::source_map::MultiSpan;
use syntax::feature_gate;
-use syntax::symbol::{Symbol, keywords, InternedString, sym};
+use syntax::symbol::{Symbol, InternedString, kw, sym};
use syntax_pos::Span;
use crate::hir;
#[inline]
pub fn mk_self_type(self) -> Ty<'tcx> {
- self.mk_ty_param(0, keywords::SelfUpper.name().as_interned_str())
+ self.mk_ty_param(0, kw::SelfUpper.as_interned_str())
}
pub fn mk_param_from_def(self, param: &ty::GenericParamDef) -> Kind<'tcx> {
use syntax::ast::{self, Name, Ident, NodeId};
use syntax::attr;
use syntax::ext::hygiene::Mark;
-use syntax::symbol::{keywords, sym, Symbol, LocalInternedString, InternedString};
+use syntax::symbol::{kw, sym, Symbol, LocalInternedString, InternedString};
use syntax_pos::Span;
use smallvec;
/// Does this early bound region have a name? Early bound regions normally
/// always have names except when using anonymous lifetimes (`'_`).
pub fn has_name(&self) -> bool {
- self.name != keywords::UnderscoreLifetime.name().as_interned_str()
+ self.name != kw::UnderscoreLifetime.as_interned_str()
}
}
#[derive(Clone, Copy, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, HashStable)]
pub enum Predicate<'tcx> {
- /// Corresponds to `where Foo: Bar<A,B,C>`. `Foo` here would be
+ /// Corresponds to `where Foo: Bar<A, B, C>`. `Foo` here would be
/// the `Self` type of the trait reference and `A`, `B`, and `C`
/// would be the type parameters.
Trait(PolyTraitPredicate<'tcx>),
- /// where `'a: 'b`
+ /// `where 'a: 'b`
RegionOutlives(PolyRegionOutlivesPredicate<'tcx>),
- /// where `T: 'a`
+ /// `where T: 'a`
TypeOutlives(PolyTypeOutlivesPredicate<'tcx>),
- /// where `<T as TraitRef>::Name == X`, approximately.
+ /// `where <T as TraitRef>::Name == X`, approximately.
/// See the `ProjectionPredicate` struct for details.
Projection(PolyProjectionPredicate<'tcx>),
/// no syntax: `T` well-formed
WellFormed(Ty<'tcx>),
- /// trait must be object-safe
+ /// Trait must be object-safe.
ObjectSafe(DefId),
/// No direct syntax. May be thought of as `where T: FnFoo<...>`
self.trait_ref.def_id
}
- pub fn input_types<'a>(&'a self) -> impl DoubleEndedIterator<Item=Ty<'tcx>> + 'a {
+ pub fn input_types<'a>(&'a self) -> impl DoubleEndedIterator<Item = Ty<'tcx>> + 'a {
self.trait_ref.input_types()
}
pub fn discriminants(
&'a self,
tcx: TyCtxt<'a, 'gcx, 'tcx>,
- ) -> impl Iterator<Item=(VariantIdx, Discr<'tcx>)> + Captures<'gcx> + 'a {
+ ) -> impl Iterator<Item = (VariantIdx, Discr<'tcx>)> + Captures<'gcx> + 'a {
let repr_type = self.repr.discr_type();
let initial = repr_type.initial_discriminant(tcx.global_tcx());
let mut prev_discr = None::<Discr<'tcx>>;
}
}
- pub fn item_name(self, id: DefId) -> InternedString {
+ pub fn item_name(self, id: DefId) -> Symbol {
if id.index == CRATE_DEF_INDEX {
- self.original_crate_name(id.krate).as_interned_str()
+ self.original_crate_name(id.krate)
} else {
let def_key = self.def_key(id);
match def_key.disambiguated_data.data {
}),
_ => def_key.disambiguated_data.data.get_opt_name().unwrap_or_else(|| {
bug!("item_name: no name for {:?}", self.def_path(id));
- }),
+ }).as_symbol(),
}
}
}
use crate::ty::subst::{Kind, Subst, UnpackedKind};
use crate::mir::interpret::ConstValue;
use rustc_target::spec::abi::Abi;
-use syntax::symbol::{keywords, InternedString};
+use syntax::symbol::{kw, InternedString};
use std::cell::Cell;
use std::fmt::{self, Write as _};
if self.tcx.sess.rust_2018() {
// We add the `crate::` keyword on Rust 2018, only when desired.
if SHOULD_PREFIX_WITH_CRATE.with(|flag| flag.get()) {
- write!(self, "{}", keywords::Crate.name())?;
+ write!(self, "{}", kw::Crate)?;
self.empty_path = false;
}
}
match *region {
ty::ReEarlyBound(ref data) => {
- data.name != "" && data.name != "'_"
+ data.name.as_symbol() != kw::Invalid &&
+ data.name.as_symbol() != kw::UnderscoreLifetime
}
ty::ReLateBound(_, br) |
ty::ReFree(ty::FreeRegion { bound_region: br, .. }) |
ty::RePlaceholder(ty::Placeholder { name: br, .. }) => {
if let ty::BrNamed(_, name) = br {
- if name != "" && name != "'_" {
+ if name.as_symbol() != kw::Invalid &&
+ name.as_symbol() != kw::UnderscoreLifetime {
return true;
}
}
// `explain_region()` or `note_and_explain_region()`.
match *region {
ty::ReEarlyBound(ref data) => {
- if data.name != "" {
+ if data.name.as_symbol() != kw::Invalid {
p!(write("{}", data.name));
return Ok(self);
}
ty::ReFree(ty::FreeRegion { bound_region: br, .. }) |
ty::RePlaceholder(ty::Placeholder { name: br, .. }) => {
if let ty::BrNamed(_, name) = br {
- if name != "" && name != "'_" {
+ if name.as_symbol() != kw::Invalid &&
+ name.as_symbol() != kw::UnderscoreLifetime {
p!(write("{}", name));
return Ok(self);
}
rustc_query_append! { [define_queries!][ <'tcx>
Other {
- /// Run analysis passes on the crate
+ /// Runs analysis passes on the crate.
[] fn analysis: Analysis(CrateNum) -> Result<(), ErrorReported>,
-
},
]}
use std::ops::Range;
use rustc_target::spec::abi;
use syntax::ast::{self, Ident};
-use syntax::symbol::{keywords, InternedString};
+use syntax::symbol::{kw, InternedString};
use serialize;
use self::InferTy::*;
}
pub fn for_self() -> ParamTy {
- ParamTy::new(0, keywords::SelfUpper.name().as_interned_str())
+ ParamTy::new(0, kw::SelfUpper.as_interned_str())
}
pub fn for_def(def: &ty::GenericParamDef) -> ParamTy {
// FIXME(#50125): Ignoring `Self` with `index != 0` might lead to weird behavior elsewhere,
// but this should only be possible when using `-Z continue-parse-after-error` like
// `compile-fail/issue-36638.rs`.
- self.name.as_symbol() == keywords::SelfUpper.name() && self.index == 0
+ self.name.as_symbol() == kw::SelfUpper && self.index == 0
}
}
base::{ExtCtxt, Resolver},
build::AstBuilder,
expand::ExpansionConfig,
- hygiene::{self, Mark, SyntaxContext},
+ hygiene::{Mark, SyntaxContext},
},
mut_visit::{self, MutVisitor},
parse::ParseSess,
ptr::P,
- symbol::{keywords, Symbol, sym}
+ symbol::{kw, sym, Symbol}
};
use syntax_pos::Span;
].into()),
allow_internal_unsafe: false,
local_inner_macros: false,
- edition: hygiene::default_edition(),
+ edition: self.sess.edition,
});
// Tie the span to the macro expansion info we just created
// We will generate a new submodule. To `use` the static from that module, we need to get
// the `super::...` path.
- let super_path =
- f.cx.path(f.span, vec![Ident::with_empty_ctxt(keywords::Super.name()), f.global]);
+ let super_path = f.cx.path(f.span, vec![Ident::with_empty_ctxt(kw::Super), f.global]);
// Generate the items in the submodule
let mut items = vec![
use rustc::ty::layout::{self, Align, Integer, IntegerExt, LayoutOf,
PrimitiveExt, Size, TyLayout, VariantIdx};
use rustc::ty::subst::UnpackedKind;
-use rustc::session::config;
+use rustc::session::config::{self, DebugInfo};
use rustc::util::nodemap::FxHashMap;
use rustc_fs_util::path_to_c_string;
use rustc_data_structures::small_c_str::SmallCStr;
use rustc_target::abi::HasDataLayout;
use libc::{c_uint, c_longlong};
+use std::collections::hash_map::Entry;
use std::ffi::CString;
use std::fmt::{self, Write};
use std::hash::{Hash, Hasher};
use std::ptr;
use std::path::{Path, PathBuf};
use syntax::ast;
-use syntax::symbol::{Interner, InternedString, Symbol};
+use syntax::symbol::{Interner, InternedString};
use syntax_pos::{self, Span, FileName};
impl PartialEq for llvm::Metadata {
file_name,
defining_crate);
- let file_name = &file_name.to_string();
- let file_name_symbol = Symbol::intern(file_name);
- if defining_crate == LOCAL_CRATE {
- let directory = &cx.sess().working_dir.0.to_string_lossy();
- file_metadata_raw(cx, file_name, Some(file_name_symbol),
- directory, Some(Symbol::intern(directory)))
+ let file_name = Some(file_name.to_string());
+ let directory = if defining_crate == LOCAL_CRATE {
+ Some(cx.sess().working_dir.0.to_string_lossy().to_string())
} else {
// If the path comes from an upstream crate we assume it has been made
// independent of the compiler's working directory one way or another.
- file_metadata_raw(cx, file_name, Some(file_name_symbol), "", None)
- }
+ None
+ };
+ file_metadata_raw(cx, file_name, directory)
}
pub fn unknown_file_metadata(cx: &CodegenCx<'ll, '_>) -> &'ll DIFile {
- file_metadata_raw(cx, "<unknown>", None, "", None)
+ file_metadata_raw(cx, None, None)
}
fn file_metadata_raw(cx: &CodegenCx<'ll, '_>,
- file_name: &str,
- file_name_symbol: Option<Symbol>,
- directory: &str,
- directory_symbol: Option<Symbol>)
+ file_name: Option<String>,
+ directory: Option<String>)
-> &'ll DIFile {
- let key = (file_name_symbol, directory_symbol);
+ let key = (file_name, directory);
+
+ match debug_context(cx).created_files.borrow_mut().entry(key) {
+ Entry::Occupied(o) => return o.get(),
+ Entry::Vacant(v) => {
+ let (file_name, directory) = v.key();
+ debug!("file_metadata: file_name: {:?}, directory: {:?}", file_name, directory);
+
+ let file_name = SmallCStr::new(
+ if let Some(file_name) = file_name { &file_name } else { "<unknown>" });
+ let directory = SmallCStr::new(
+ if let Some(directory) = directory { &directory } else { "" });
+
+ let file_metadata = unsafe {
+ llvm::LLVMRustDIBuilderCreateFile(DIB(cx),
+ file_name.as_ptr(),
+ directory.as_ptr())
+ };
- if let Some(file_metadata) = debug_context(cx).created_files.borrow().get(&key) {
- return *file_metadata;
+ v.insert(file_metadata);
+ file_metadata
+ }
}
-
- debug!("file_metadata: file_name: {}, directory: {}", file_name, directory);
-
- let file_name = SmallCStr::new(file_name);
- let directory = SmallCStr::new(directory);
-
- let file_metadata = unsafe {
- llvm::LLVMRustDIBuilderCreateFile(DIB(cx),
- file_name.as_ptr(),
- directory.as_ptr())
- };
-
- let mut created_files = debug_context(cx).created_files.borrow_mut();
- created_files.insert(key, file_metadata);
- file_metadata
}
fn basic_type_metadata(cx: &CodegenCx<'ll, 'tcx>, t: Ty<'tcx>) -> &'ll DIType {
let producer = CString::new(producer).unwrap();
let flags = "\0";
let split_name = "\0";
- let kind = DebugEmissionKind::from_generic(tcx.sess.opts.debuginfo);
+
+ // FIXME(#60020):
+ //
+ // This should actually be
+ //
+ // ```
+ // let kind = DebugEmissionKind::from_generic(tcx.sess.opts.debuginfo);
+ // ```
+ //
+ // that is, we should set LLVM's emission kind to `LineTablesOnly` if
+ // we are compiling with "limited" debuginfo. However, some of the
+ // existing tools relied on slightly more debuginfo being generated than
+ // would be the case with `LineTablesOnly`, and we did not want to break
+ // these tools in a "drive-by fix", without a good idea or plan about
+ // what limited debuginfo should exactly look like. So for now we keep
+ // the emission kind as `FullDebug`.
+ //
+ // See https://github.com/rust-lang/rust/issues/60020 for details.
+ let kind = DebugEmissionKind::FullDebug;
+ assert!(tcx.sess.opts.debuginfo != DebugInfo::None);
unsafe {
let file_metadata = llvm::LLVMRustDIBuilderCreateFile(
use syntax_pos::{self, Span, Pos};
use syntax::ast;
-use syntax::symbol::{Symbol, InternedString};
+use syntax::symbol::InternedString;
use rustc::ty::layout::{self, LayoutOf, HasTyCtxt};
use rustc_codegen_ssa::traits::*;
llcontext: &'a llvm::Context,
llmod: &'a llvm::Module,
builder: &'a mut DIBuilder<'a>,
- created_files: RefCell<FxHashMap<(Option<Symbol>, Option<Symbol>), &'a DIFile>>,
+ created_files: RefCell<FxHashMap<(Option<String>, Option<String>), &'a DIFile>>,
created_enum_disr_types: RefCell<FxHashMap<(DefId, layout::Primitive), &'a DIType>>,
type_map: RefCell<TypeMap<'a, 'tcx>>,
use crate::traits::*;
use syntax_pos::{DUMMY_SP, NO_EXPANSION, BytePos, Span};
-use syntax::symbol::keywords;
+use syntax::symbol::kw;
use std::iter;
};
bx.declare_local(
&fx.debug_context,
- arg_decl.name.unwrap_or(keywords::Invalid.name()),
+ arg_decl.name.unwrap_or(kw::Invalid),
arg_ty, scope,
variable_access,
VariableKind::ArgumentVariable(arg_index + 1),
bx.declare_local(
&fx.debug_context,
- arg_decl.name.unwrap_or(keywords::Invalid.name()),
+ arg_decl.name.unwrap_or(kw::Invalid),
arg.layout.ty,
scope,
variable_access,
) -> Option<OperandRef<'tcx, Bx::Value>> {
debug!("maybe_codegen_consume_direct(place={:?})", place);
- // watch out for locals that do not have an
- // alloca; they are handled somewhat differently
- if let mir::Place::Base(mir::PlaceBase::Local(index)) = *place {
- match self.locals[index] {
- LocalRef::Operand(Some(o)) => {
- return Some(o);
- }
- LocalRef::Operand(None) => {
- bug!("use of {:?} before def", place);
- }
- LocalRef::Place(..) | LocalRef::UnsizedPlace(..) => {
- // use path below
- }
- }
- }
+ place.iterate(|place_base, place_projection| {
+ if let mir::PlaceBase::Local(index) = place_base {
+ match self.locals[*index] {
+ LocalRef::Operand(Some(mut o)) => {
+ // Moves out of scalar and scalar pair fields are trivial.
+ for proj in place_projection {
+ match proj.elem {
+ mir::ProjectionElem::Field(ref f, _) => {
+ o = o.extract_field(bx, f.index());
+ }
+ mir::ProjectionElem::Index(_) |
+ mir::ProjectionElem::ConstantIndex { .. } => {
+ // ZSTs don't require any actual memory access.
+ // FIXME(eddyb) deduplicate this with the identical
+ // checks in `codegen_consume` and `extract_field`.
+ let elem = o.layout.field(bx.cx(), 0);
+ if elem.is_zst() {
+ o = OperandRef::new_zst(bx, elem);
+ } else {
+ return None;
+ }
+ }
+ _ => return None,
+ }
+ }
- // Moves out of scalar and scalar pair fields are trivial.
- if let &mir::Place::Projection(ref proj) = place {
- if let Some(o) = self.maybe_codegen_consume_direct(bx, &proj.base) {
- match proj.elem {
- mir::ProjectionElem::Field(ref f, _) => {
- return Some(o.extract_field(bx, f.index()));
+ Some(o)
}
- mir::ProjectionElem::Index(_) |
- mir::ProjectionElem::ConstantIndex { .. } => {
- // ZSTs don't require any actual memory access.
- // FIXME(eddyb) deduplicate this with the identical
- // checks in `codegen_consume` and `extract_field`.
- let elem = o.layout.field(bx.cx(), 0);
- if elem.is_zst() {
- return Some(OperandRef::new_zst(bx, elem));
- }
+ LocalRef::Operand(None) => {
+ bug!("use of {:?} before def", place);
+ }
+ LocalRef::Place(..) | LocalRef::UnsizedPlace(..) => {
+ // watch out for locals that do not have an
+ // alloca; they are handled somewhat differently
+ None
}
- _ => {}
}
+ } else {
+ None
}
- }
-
- None
+ })
}
pub fn codegen_consume(
return name.as_interned_str();
}
// Don't mangle foreign items.
- return tcx.item_name(def_id);
+ return tcx.item_name(def_id).as_interned_str();
}
if let Some(name) = &attrs.export_name {
if attrs.flags.contains(CodegenFnAttrFlags::NO_MANGLE) {
// Don't mangle
- return tcx.item_name(def_id);
+ return tcx.item_name(def_id).as_interned_str();
}
// We want to compute the "type" of this item. Unfortunately, some
-/// A simple static assertion macro. The first argument should be a unique
-/// ALL_CAPS identifier that describes the condition.
+/// A simple static assertion macro.
#[macro_export]
-#[allow_internal_unstable(type_ascription)]
+#[allow_internal_unstable(type_ascription, underscore_const_names)]
macro_rules! static_assert {
- ($name:ident: $test:expr) => {
+ ($test:expr) => {
// Use the bool to access an array such that if the bool is false, the access
// is out-of-bounds.
#[allow(dead_code)]
- static $name: () = [()][!($test: bool) as usize];
+ const _: () = [()][!($test: bool) as usize];
}
}
use std::sync::{Arc, Mutex};
use syntax;
use syntax::source_map::{FileLoader, SourceMap};
+use syntax_pos::edition;
pub type Result<T> = result::Result<T, ErrorReported>;
{
let stderr = config.stderr.take();
util::spawn_thread_pool(
+ config.opts.edition,
config.opts.debugging_opts.threads,
&stderr,
|| run_compiler_in_existing_thread_pool(config, f),
)
}
-pub fn default_thread_pool<F, R>(f: F) -> R
+pub fn default_thread_pool<F, R>(edition: edition::Edition, f: F) -> R
where
F: FnOnce() -> R + Send,
R: Send,
{
- util::spawn_thread_pool(None, &None, f)
+ util::spawn_thread_pool(edition, None, &None, f)
}
use syntax::util::lev_distance::find_best_match_for_name;
use syntax::symbol::Symbol;
use syntax::feature_gate::AttributeType;
-use syntax_pos::{FileName, hygiene};
+use syntax_pos::{FileName, edition::Edition, hygiene};
use syntax_ext;
use serialize::json;
pub fn parse<'a>(sess: &'a Session, input: &Input) -> PResult<'a, ast::Crate> {
sess.diagnostic()
.set_continue_after_error(sess.opts.debugging_opts.continue_parse_after_error);
- hygiene::set_default_edition(sess.edition());
-
sess.profiler(|p| p.start_activity("parsing"));
let krate = time(sess, "parsing", || match *input {
Input::File(ref file) => parse::parse_crate_from_file(file, &sess.parse_sess),
crate_loader,
&resolver_arenas,
);
- syntax_ext::register_builtins(&mut resolver, plugin_info.syntax_exts);
+ syntax_ext::register_builtins(&mut resolver, plugin_info.syntax_exts, sess.edition());
// Expand all macros
sess.profiler(|p| p.start_activity("macro expansion"));
use syntax::source_map::{FileLoader, RealFileLoader, SourceMap};
use syntax::symbol::{Symbol, sym};
use syntax::{self, ast, attr};
+use syntax_pos::edition::Edition;
#[cfg(not(parallel_compiler))]
use std::{thread, panic};
#[cfg(not(parallel_compiler))]
pub fn spawn_thread_pool<F: FnOnce() -> R + Send, R: Send>(
+ edition: Edition,
_threads: Option<usize>,
stderr: &Option<Arc<Mutex<Vec<u8>>>>,
f: F,
}
scoped_thread(cfg, || {
- syntax::with_globals( || {
+ syntax::with_globals(edition, || {
ty::tls::GCX_PTR.set(&Lock::new(0), || {
if let Some(stderr) = stderr {
io::set_panic(Some(box Sink(stderr.clone())));
#[cfg(parallel_compiler)]
pub fn spawn_thread_pool<F: FnOnce() -> R + Send, R: Send>(
+ edition: Edition,
threads: Option<usize>,
stderr: &Option<Arc<Mutex<Vec<u8>>>>,
f: F,
let with_pool = move |pool: &ThreadPool| pool.install(move || f());
- syntax::with_globals(|| {
+ syntax::with_globals(edition, || {
syntax::GLOBALS.with(|syntax_globals| {
syntax_pos::GLOBALS.with(|syntax_pos_globals| {
// The main handler runs for each Rayon worker thread and sets up
use syntax::feature_gate::{AttributeGate, AttributeTemplate, AttributeType};
use syntax::feature_gate::{Stability, deprecated_attributes};
use syntax_pos::{BytePos, Span, SyntaxContext};
-use syntax::symbol::{Symbol, keywords, sym};
+use syntax::symbol::{Symbol, kw, sym};
use syntax::errors::{Applicability, DiagnosticBuilder};
use syntax::print::pprust::expr_to_string;
use syntax::visit::FnKind;
for arg in sig.decl.inputs.iter() {
match arg.pat.node {
ast::PatKind::Ident(_, ident, None) => {
- if ident.name == keywords::Invalid.name() {
+ if ident.name == kw::Invalid {
let ty_snip = cx
.sess
.source_map()
fn def_id_is_transmute(cx: &LateContext<'_, '_>, def_id: DefId) -> bool {
cx.tcx.fn_sig(def_id).abi() == RustIntrinsic &&
- cx.tcx.item_name(def_id) == "transmute"
+ cx.tcx.item_name(def_id) == sym::transmute
}
}
}
#![allow(non_snake_case)]
use rustc::hir::{ExprKind, Node};
+use crate::hir::def_id::DefId;
use rustc::hir::lowering::is_range_literal;
use rustc::ty::subst::SubstsRef;
use rustc::ty::{self, AdtKind, ParamEnv, Ty, TyCtxt};
-use rustc::ty::layout::{self, IntegerExt, LayoutOf, VariantIdx};
+use rustc::ty::layout::{self, IntegerExt, LayoutOf, VariantIdx, SizeSkeleton};
use rustc::{lint, util};
use rustc_data_structures::indexed_vec::Idx;
use util::nodemap::FxHashSet;
use std::cmp;
use std::{i8, i16, i32, i64, u8, u16, u32, u64, f32, f64};
-use syntax::{ast, attr};
+use syntax::{ast, attr, source_map};
use syntax::errors::Applicability;
+use syntax::symbol::sym;
use rustc_target::spec::abi::Abi;
use syntax_pos::Span;
-use syntax::source_map;
use rustc::hir;
},
}
+fn is_zst<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, did: DefId, ty: Ty<'tcx>) -> bool {
+ tcx.layout_of(tcx.param_env(did).and(ty)).map(|layout| layout.is_zst()).unwrap_or(false)
+}
+
+fn ty_is_known_nonnull<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, ty: Ty<'tcx>) -> bool {
+ match ty.sty {
+ ty::FnPtr(_) => true,
+ ty::Ref(..) => true,
+ ty::Adt(field_def, substs) if field_def.repr.transparent() && field_def.is_struct() => {
+ for field in &field_def.non_enum_variant().fields {
+ let field_ty = tcx.normalize_erasing_regions(
+ ParamEnv::reveal_all(),
+ field.ty(tcx, substs),
+ );
+ if is_zst(tcx, field.did, field_ty) {
+ continue;
+ }
+
+ let attrs = tcx.get_attrs(field_def.did);
+ if attrs.iter().any(|a| a.check_name(sym::rustc_nonnull_optimization_guaranteed)) ||
+ ty_is_known_nonnull(tcx, field_ty) {
+ return true;
+ }
+ }
+
+ false
+ }
+ _ => false,
+ }
+}
+
/// Check if this enum can be safely exported based on the
/// "nullable pointer optimization". Currently restricted
-/// to function pointers and references, but could be
-/// expanded to cover NonZero raw pointers and newtypes.
+/// to function pointers, references, core::num::NonZero*,
+/// core::ptr::NonNull, and #[repr(transparent)] newtypes.
/// FIXME: This duplicates code in codegen.
fn is_repr_nullable_ptr<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def: &'tcx ty::AdtDef,
+ ty: Ty<'tcx>,
+ ty_def: &'tcx ty::AdtDef,
substs: SubstsRef<'tcx>)
-> bool {
- if def.variants.len() == 2 {
- let data_idx;
+ if ty_def.variants.len() != 2 {
+ return false;
+ }
- let zero = VariantIdx::new(0);
- let one = VariantIdx::new(1);
+ let get_variant_fields = |index| &ty_def.variants[VariantIdx::new(index)].fields;
+ let variant_fields = [get_variant_fields(0), get_variant_fields(1)];
+ let fields = if variant_fields[0].is_empty() {
+ &variant_fields[1]
+ } else if variant_fields[1].is_empty() {
+ &variant_fields[0]
+ } else {
+ return false;
+ };
- if def.variants[zero].fields.is_empty() {
- data_idx = one;
- } else if def.variants[one].fields.is_empty() {
- data_idx = zero;
- } else {
- return false;
- }
+ if fields.len() != 1 {
+ return false;
+ }
- if def.variants[data_idx].fields.len() == 1 {
- match def.variants[data_idx].fields[0].ty(tcx, substs).sty {
- ty::FnPtr(_) => {
- return true;
- }
- ty::Ref(..) => {
- return true;
- }
- _ => {}
- }
- }
+ let field_ty = fields[0].ty(tcx, substs);
+ if !ty_is_known_nonnull(tcx, field_ty) {
+ return false;
}
- false
+
+ // At this point, the field's type is known to be nonnull and the parent enum is Option-like.
+ // If the computed size for the field and the enum are different, the nonnull optimization isn't
+ // being applied (and we've got a problem somewhere).
+ let compute_size_skeleton = |t| SizeSkeleton::compute(t, tcx, ParamEnv::reveal_all()).unwrap();
+ if !compute_size_skeleton(ty).same_size(compute_size_skeleton(field_ty)) {
+ bug!("improper_ctypes: Option nonnull optimization not applied?");
+ }
+
+ true
}
impl<'a, 'tcx> ImproperCTypesVisitor<'a, 'tcx> {
);
// repr(transparent) types are allowed to have arbitrary ZSTs, not just
// PhantomData -- skip checking all ZST fields
- if def.repr.transparent() {
- let is_zst = cx
- .layout_of(cx.param_env(field.did).and(field_ty))
- .map(|layout| layout.is_zst())
- .unwrap_or(false);
- if is_zst {
- continue;
- }
+ if def.repr.transparent() && is_zst(cx, field.did, field_ty) {
+ continue;
}
let r = self.check_type_for_ffi(cache, field_ty);
match r {
// discriminant.
if !def.repr.c() && def.repr.int.is_none() {
// Special-case types like `Option<extern fn()>`.
- if !is_repr_nullable_ptr(cx, def, substs) {
+ if !is_repr_nullable_ptr(cx, ty, def, substs) {
return FfiUnsafe {
ty: ty,
reason: "enum has no representation hint",
use syntax::errors::Applicability;
use syntax::feature_gate::{AttributeType, BuiltinAttribute, BUILTIN_ATTRIBUTE_MAP};
use syntax::print::pprust;
-use syntax::symbol::{keywords, sym};
+use syntax::symbol::{kw, sym};
use syntax::symbol::Symbol;
use syntax::util::parser;
use syntax_pos::Span;
match items[0].0.kind {
ast::UseTreeKind::Simple(rename, ..) => {
let orig_ident = items[0].0.prefix.segments.last().unwrap().ident;
- if orig_ident.name == keywords::SelfLower.name() {
+ if orig_ident.name == kw::SelfLower {
return;
}
node_ident = rename.unwrap_or(orig_ident);
let mut keyword_stream = quote! {};
let mut symbols_stream = quote! {};
let mut prefill_stream = quote! {};
- let mut from_str_stream = quote! {};
let mut counter = 0u32;
let mut keys = HashSet::<String>::new();
#value,
});
keyword_stream.extend(quote! {
- pub const #name: Keyword = Keyword {
- ident: Ident::with_empty_ctxt(super::Symbol::new(#counter))
- };
- });
- from_str_stream.extend(quote! {
- #value => Ok(#name),
+ pub const #name: Symbol = Symbol::new(#counter);
});
counter += 1;
}
macro_rules! keywords {
() => {
#keyword_stream
-
- impl std::str::FromStr for Keyword {
- type Err = ();
-
- fn from_str(s: &str) -> Result<Self, ()> {
- match s {
- #from_str_stream
- _ => Err(()),
- }
- }
- }
}
}
fn verify_no_symbol_conflicts(&self,
span: Span,
- root: &CrateRoot) {
+ root: &CrateRoot<'_>) {
// Check for (potential) conflicts with the local crate
if self.local_crate_name == root.name &&
self.sess.local_crate_disambiguator() == root.disambiguator {
// Go through the crate metadata and load any crates that it references
fn resolve_crate_deps(&mut self,
root: &Option<CratePaths>,
- crate_root: &CrateRoot,
+ crate_root: &CrateRoot<'_>,
metadata: &MetadataBlob,
krate: CrateNum,
span: Span,
/// implemented as dynamic libraries, but we have a possible future where
/// custom derive (and other macro-1.1 style features) are implemented via
/// executables and custom IPC.
- fn load_derive_macros(&mut self, root: &CrateRoot, dylib: Option<PathBuf>, span: Span)
+ fn load_derive_macros(&mut self, root: &CrateRoot<'_>, dylib: Option<PathBuf>, span: Span)
-> Vec<(ast::Name, Lrc<SyntaxExtension>)> {
use std::{env, mem};
use crate::dynamic_lib::DynamicLibrary;
/// Used for decoding interpret::AllocIds in a cached & thread-safe manner.
pub alloc_decoding_state: AllocDecodingState,
- pub root: schema::CrateRoot,
+ // NOTE(eddyb) we pass `'static` to a `'tcx` parameter because this
+ // lifetime is only used behind `Lazy` / `LazySeq`, and therefore
+ // acts like an universal (`for<'tcx>`), that is paired up with
+ // whichever `TyCtxt` is being used to decode those values.
+ pub root: schema::CrateRoot<'static>,
/// For each public item in this crate, we encode a key. When the
/// crate is loaded, we read all the keys and put them in this
used_crate_source => { Lrc::new(cdata.source.clone()) }
- exported_symbols => {
- let cnum = cdata.cnum;
- assert!(cnum != LOCAL_CRATE);
-
- Arc::new(cdata.exported_symbols(tcx))
- }
+ exported_symbols => { Arc::new(cdata.exported_symbols(tcx)) }
}
pub fn provide<'tcx>(providers: &mut Providers<'tcx>) {
return;
}
- let child = child.res.def_id();
-
- match visible_parent_map.entry(child) {
- Entry::Occupied(mut entry) => {
- // If `child` is defined in crate `cnum`, ensure
- // that it is mapped to a parent in `cnum`.
- if child.krate == cnum && entry.get().krate != cnum {
+ if let Some(child) = child.res.opt_def_id() {
+ match visible_parent_map.entry(child) {
+ Entry::Occupied(mut entry) => {
+ // If `child` is defined in crate `cnum`, ensure
+ // that it is mapped to a parent in `cnum`.
+ if child.krate == cnum && entry.get().krate != cnum {
+ entry.insert(parent);
+ }
+ }
+ Entry::Vacant(entry) => {
entry.insert(parent);
+ bfs_queue.push_back(child);
}
}
- Entry::Vacant(entry) => {
- entry.insert(parent);
- bfs_queue.push_back(child);
- }
}
};
let data = self.get_crate_data(id.krate);
if let Some(ref proc_macros) = data.proc_macros {
return LoadedMacro::ProcMacro(proc_macros[id.index.to_proc_macro_index()].1.clone());
- } else if data.name == sym::proc_macro && data.item_name(id.index) == "quote" {
+ } else if data.name == sym::proc_macro && data.item_name(id.index) == sym::quote {
use syntax::ext::base::SyntaxExtension;
use syntax_ext::proc_macro_impl::BangProcMacro;
use syntax::attr;
use syntax::ast::{self, Ident};
use syntax::source_map;
-use syntax::symbol::{InternedString, sym};
+use syntax::symbol::{Symbol, sym};
use syntax::ext::base::{MacroKind, SyntaxExtension};
use syntax::ext::hygiene::Mark;
use syntax_pos::{self, Span, BytePos, Pos, DUMMY_SP, NO_EXPANSION};
implement_ty_decoder!( DecodeContext<'a, 'tcx> );
-impl<'a, 'tcx> MetadataBlob {
+impl<'tcx> MetadataBlob {
pub fn is_compatible(&self) -> bool {
self.raw_bytes().starts_with(METADATA_HEADER)
}
Lazy::with_position(METADATA_HEADER.len() + 4).decode(self)
}
- pub fn get_root(&self) -> CrateRoot {
+ pub fn get_root(&self) -> CrateRoot<'tcx> {
let slice = self.raw_bytes();
let offset = METADATA_HEADER.len();
let pos = (((slice[offset + 0] as u32) << 24) | ((slice[offset + 1] as u32) << 16) |
/// |- proc macro #0 (DefIndex 1:N)
/// |- proc macro #1 (DefIndex 1:N+1)
/// \- ...
-crate fn proc_macro_def_path_table(crate_root: &CrateRoot,
+crate fn proc_macro_def_path_table(crate_root: &CrateRoot<'_>,
proc_macros: &[(ast::Name, Lrc<SyntaxExtension>)])
-> DefPathTable
{
fn maybe_entry(&self, item_id: DefIndex) -> Option<Lazy<Entry<'tcx>>> {
assert!(!self.is_proc_macro(item_id));
- self.root.index.lookup(self.blob.raw_bytes(), item_id)
+ self.root.entries_index.lookup(self.blob.raw_bytes(), item_id)
}
fn entry(&self, item_id: DefIndex) -> Entry<'tcx> {
}
}
- pub fn item_name(&self, item_index: DefIndex) -> InternedString {
+ pub fn item_name(&self, item_index: DefIndex) -> Symbol {
self.def_key(item_index)
.disambiguated_data
.data
.get_opt_name()
.expect("no name in item_name")
+ .as_symbol()
}
pub fn def_kind(&self, index: DefIndex) -> Option<DefKind> {
ty::VariantDef::new(
tcx,
- Ident::from_interned_str(self.item_name(index)),
+ Ident::with_empty_ctxt(self.item_name(index)),
variant_did,
ctor_did,
data.discr,
let f = self.entry(index);
ty::FieldDef {
did: self.local_def_id(index),
- ident: Ident::from_interned_str(self.item_name(index)),
+ ident: Ident::with_empty_ctxt(self.item_name(index)),
vis: f.visibility.decode(self)
}
}).collect(),
if let Some(kind) = self.def_kind(child_index) {
callback(def::Export {
res: Res::Def(kind, self.local_def_id(child_index)),
- ident: Ident::from_interned_str(self.item_name(child_index)),
+ ident: Ident::with_empty_ctxt(self.item_name(child_index)),
vis: self.get_visibility(child_index),
span: self.entry(child_index).span.decode((self, sess)),
});
self.entry(id)
.children
.decode(self)
- .map(|index| self.item_name(index).as_symbol())
+ .map(|index| self.item_name(index))
.collect()
}
// link those in so we skip those crates.
vec![]
} else {
- let lazy_seq: LazySeq<(ExportedSymbol<'tcx>, SymbolExportLevel)> =
- LazySeq::with_position_and_length(self.root.exported_symbols.position,
- self.root.exported_symbols.len);
- lazy_seq.decode((self, tcx)).collect()
+ self.root.exported_symbols.decode((self, tcx)).collect()
}
}
use crate::index::Index;
-use crate::index_builder::{FromId, IndexBuilder, Untracked};
-use crate::isolated_encoder::IsolatedEncoder;
use crate::schema::*;
use rustc::middle::cstore::{LinkagePreference, NativeLibrary,
use syntax::ast;
use syntax::attr;
use syntax::source_map::Spanned;
-use syntax::symbol::{keywords, sym};
-use syntax_pos::{self, hygiene, FileName, SourceFile, Span};
+use syntax::symbol::{kw, sym};
+use syntax_pos::{self, FileName, SourceFile, Span};
use log::{debug, trace};
use rustc::hir::{self, PatKind};
opaque: opaque::Encoder,
pub tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ entries_index: Index<'tcx>,
+
lazy_state: LazyState,
type_shorthands: FxHashMap<Ty<'tcx>, usize>,
predicate_shorthands: FxHashMap<ty::Predicate<'tcx>, usize>,
})
}
- // Encodes something that corresponds to a single DepNode::GlobalMetaData
- // and registers the Fingerprint in the `metadata_hashes` map.
- pub fn tracked<'x, DATA, R>(&'x mut self,
- op: fn(&mut IsolatedEncoder<'x, 'a, 'tcx>, DATA) -> R,
- data: DATA)
- -> R {
- op(&mut IsolatedEncoder::new(self), data)
+ /// Emit the data for a `DefId` to the metadata. The function to
+ /// emit the data is `op`, and it will be given `data` as
+ /// arguments. This `record` function will call `op` to generate
+ /// the `Entry` (which may point to other encoded information)
+ /// and will then record the `Lazy<Entry>` for use in the index.
+ // FIXME(eddyb) remove this.
+ pub fn record<DATA>(&mut self,
+ id: DefId,
+ op: impl FnOnce(&mut Self, DATA) -> Entry<'tcx>,
+ data: DATA)
+ {
+ assert!(id.is_local());
+
+ let entry = op(self, data);
+ let entry = self.lazy(&entry);
+ self.entries_index.record(id, entry);
}
- fn encode_info_for_items(&mut self) -> Index {
+ fn encode_info_for_items(&mut self) {
let krate = self.tcx.hir().krate();
- let mut index = IndexBuilder::new(self);
let vis = Spanned { span: syntax_pos::DUMMY_SP, node: hir::VisibilityKind::Public };
- index.record(DefId::local(CRATE_DEF_INDEX),
- IsolatedEncoder::encode_info_for_mod,
- FromId(hir::CRATE_HIR_ID, (&krate.module, &krate.attrs, &vis)));
- let mut visitor = EncodeVisitor { index };
- krate.visit_all_item_likes(&mut visitor.as_deep_visitor());
+ self.record(DefId::local(CRATE_DEF_INDEX),
+ EncodeContext::encode_info_for_mod,
+ (hir::CRATE_HIR_ID, &krate.module, &krate.attrs, &vis));
+ krate.visit_all_item_likes(&mut self.as_deep_visitor());
for macro_def in &krate.exported_macros {
- visitor.visit_macro_def(macro_def);
+ self.visit_macro_def(macro_def);
}
- visitor.index.into_items()
}
fn encode_def_path_table(&mut self) -> Lazy<DefPathTable> {
self.lazy_seq_ref(adapted.iter().map(|rc| &**rc))
}
- fn encode_crate_root(&mut self) -> Lazy<CrateRoot> {
+ fn encode_crate_root(&mut self) -> Lazy<CrateRoot<'tcx>> {
let mut i = self.position();
- let crate_deps = self.tracked(IsolatedEncoder::encode_crate_deps, ());
- let dylib_dependency_formats = self.tracked(
- IsolatedEncoder::encode_dylib_dependency_formats,
- ());
+ let crate_deps = self.encode_crate_deps();
+ let dylib_dependency_formats = self.encode_dylib_dependency_formats();
let dep_bytes = self.position() - i;
// Encode the lib features.
i = self.position();
- let lib_features = self.tracked(IsolatedEncoder::encode_lib_features, ());
+ let lib_features = self.encode_lib_features();
let lib_feature_bytes = self.position() - i;
// Encode the language items.
i = self.position();
- let lang_items = self.tracked(IsolatedEncoder::encode_lang_items, ());
- let lang_items_missing = self.tracked(
- IsolatedEncoder::encode_lang_items_missing,
- ());
+ let lang_items = self.encode_lang_items();
+ let lang_items_missing = self.encode_lang_items_missing();
let lang_item_bytes = self.position() - i;
// Encode the native libraries used
i = self.position();
- let native_libraries = self.tracked(
- IsolatedEncoder::encode_native_libraries,
- ());
+ let native_libraries = self.encode_native_libraries();
let native_lib_bytes = self.position() - i;
- let foreign_modules = self.tracked(
- IsolatedEncoder::encode_foreign_modules,
- ());
+ let foreign_modules = self.encode_foreign_modules();
// Encode source_map
i = self.position();
// Encode the def IDs of impls, for coherence checking.
i = self.position();
- let impls = self.tracked(IsolatedEncoder::encode_impls, ());
+ let impls = self.encode_impls();
let impl_bytes = self.position() - i;
// Encode exported symbols info.
i = self.position();
let exported_symbols = self.tcx.exported_symbols(LOCAL_CRATE);
- let exported_symbols = self.tracked(
- IsolatedEncoder::encode_exported_symbols,
- &exported_symbols);
+ let exported_symbols = self.encode_exported_symbols(&exported_symbols);
let exported_symbols_bytes = self.position() - i;
let tcx = self.tcx;
// Encode the items.
i = self.position();
- let items = self.encode_info_for_items();
+ self.encode_info_for_items();
let item_bytes = self.position() - i;
// Encode the allocation index
self.lazy_seq(interpret_alloc_index)
};
- // Index the items
i = self.position();
- let index = items.write_index(&mut self.opaque);
- let index_bytes = self.position() - i;
+ let entries_index = self.entries_index.write_index(&mut self.opaque);
+ let entries_index_bytes = self.position() - i;
let attrs = tcx.hir().krate_attrs();
let is_proc_macro = tcx.sess.crate_types.borrow().contains(&CrateType::ProcMacro);
hash: tcx.crate_hash(LOCAL_CRATE),
disambiguator: tcx.sess.local_crate_disambiguator(),
panic_strategy: tcx.sess.panic_strategy(),
- edition: hygiene::default_edition(),
+ edition: tcx.sess.edition(),
has_global_allocator: has_global_allocator,
has_panic_handler: has_panic_handler,
has_default_lib_allocator: has_default_lib_allocator,
impls,
exported_symbols,
interpret_alloc_index,
- index,
+ entries_index,
});
let total_bytes = self.position();
println!(" exp. symbols bytes: {}", exported_symbols_bytes);
println!(" def-path table bytes: {}", def_path_table_bytes);
println!(" item bytes: {}", item_bytes);
- println!(" index bytes: {}", index_bytes);
+ println!(" entries index bytes: {}", entries_index_bytes);
println!(" zero bytes: {}", zero_bytes);
println!(" total bytes: {}", total_bytes);
}
}
}
-// These are methods for encoding various things. They are meant to be used with
-// IndexBuilder::record() and EncodeContext::tracked(). They actually
-// would not have to be methods of IsolatedEncoder (free standing functions
-// taking IsolatedEncoder as first argument would be just fine) but by making
-// them methods we don't have to repeat the lengthy `<'a, 'b: 'a, 'tcx: 'b>`
-// clause again and again.
-impl<'a, 'b: 'a, 'tcx: 'b> IsolatedEncoder<'a, 'b, 'tcx> {
+impl EncodeContext<'_, 'tcx> {
fn encode_variances_of(&mut self, def_id: DefId) -> LazySeq<ty::Variance> {
- debug!("IsolatedEncoder::encode_variances_of({:?})", def_id);
+ debug!("EncodeContext::encode_variances_of({:?})", def_id);
let tcx = self.tcx;
- self.lazy_seq_from_slice(&tcx.variances_of(def_id))
+ self.lazy_seq_ref(&tcx.variances_of(def_id)[..])
}
fn encode_item_type(&mut self, def_id: DefId) -> Lazy<Ty<'tcx>> {
let tcx = self.tcx;
let ty = tcx.type_of(def_id);
- debug!("IsolatedEncoder::encode_item_type({:?}) => {:?}", def_id, ty);
+ debug!("EncodeContext::encode_item_type({:?}) => {:?}", def_id, ty);
self.lazy(&ty)
}
- /// Encode data for the given variant of the given ADT. The
- /// index of the variant is untracked: this is ok because we
- /// will have to lookup the adt-def by its id, and that gives us
- /// the right to access any information in the adt-def (including,
- /// e.g., the length of the various vectors).
fn encode_enum_variant_info(
&mut self,
- (enum_did, Untracked(index)): (DefId, Untracked<VariantIdx>),
+ (enum_did, index): (DefId, VariantIdx),
) -> Entry<'tcx> {
let tcx = self.tcx;
let def = tcx.adt_def(enum_did);
let variant = &def.variants[index];
let def_id = variant.def_id;
- debug!("IsolatedEncoder::encode_enum_variant_info({:?})", def_id);
+ debug!("EncodeContext::encode_enum_variant_info({:?})", def_id);
let data = VariantData {
ctor_kind: variant.ctor_kind,
}
}
- /// Encode the constructor for the given variant of the given ADT. See
- /// `encode_enum_variant_info` for an explanation about why the index is untracked.
fn encode_enum_variant_ctor(
&mut self,
- (enum_did, Untracked(index)): (DefId, Untracked<VariantIdx>),
+ (enum_did, index): (DefId, VariantIdx),
) -> Entry<'tcx> {
let tcx = self.tcx;
let def = tcx.adt_def(enum_did);
let variant = &def.variants[index];
let def_id = variant.ctor_def_id.unwrap();
- debug!("IsolatedEncoder::encode_enum_variant_ctor({:?})", def_id);
+ debug!("EncodeContext::encode_enum_variant_ctor({:?})", def_id);
let data = VariantData {
ctor_kind: variant.ctor_kind,
}
}
- fn encode_info_for_mod(&mut self,
- FromId(id, (md, attrs, vis)): FromId<(&hir::Mod,
- &[ast::Attribute],
- &hir::Visibility)>)
- -> Entry<'tcx> {
+ fn encode_info_for_mod(
+ &mut self,
+ (id, md, attrs, vis): (hir::HirId, &hir::Mod, &[ast::Attribute], &hir::Visibility),
+ ) -> Entry<'tcx> {
let tcx = self.tcx;
let def_id = tcx.hir().local_def_id_from_hir_id(id);
- debug!("IsolatedEncoder::encode_info_for_mod({:?})", def_id);
+ debug!("EncodeContext::encode_info_for_mod({:?})", def_id);
let data = ModData {
reexports: match tcx.module_exports(def_id) {
- Some(ref exports) => self.lazy_seq_from_slice(exports.as_slice()),
+ Some(ref exports) => self.lazy_seq_ref(&exports[..]),
_ => LazySeq::empty(),
},
};
}
}
- /// Encode data for the given field of the given variant of the
- /// given ADT. The indices of the variant/field are untracked:
- /// this is ok because we will have to lookup the adt-def by its
- /// id, and that gives us the right to access any information in
- /// the adt-def (including, e.g., the length of the various
- /// vectors).
- fn encode_field(&mut self,
- (adt_def_id, Untracked((variant_index, field_index))): (DefId,
- Untracked<(VariantIdx,
- usize)>))
- -> Entry<'tcx> {
+ fn encode_field(
+ &mut self,
+ (adt_def_id, variant_index, field_index): (DefId, VariantIdx, usize),
+ ) -> Entry<'tcx> {
let tcx = self.tcx;
let variant = &tcx.adt_def(adt_def_id).variants[variant_index];
let field = &variant.fields[field_index];
let def_id = field.did;
- debug!("IsolatedEncoder::encode_field({:?})", def_id);
+ debug!("EncodeContext::encode_field({:?})", def_id);
let variant_id = tcx.hir().as_local_hir_id(variant.def_id).unwrap();
let variant_data = tcx.hir().expect_variant_data(variant_id);
}
fn encode_struct_ctor(&mut self, (adt_def_id, def_id): (DefId, DefId)) -> Entry<'tcx> {
- debug!("IsolatedEncoder::encode_struct_ctor({:?})", def_id);
+ debug!("EncodeContext::encode_struct_ctor({:?})", def_id);
let tcx = self.tcx;
let adt_def = tcx.adt_def(adt_def_id);
let variant = adt_def.non_enum_variant();
}
fn encode_generics(&mut self, def_id: DefId) -> Lazy<ty::Generics> {
- debug!("IsolatedEncoder::encode_generics({:?})", def_id);
+ debug!("EncodeContext::encode_generics({:?})", def_id);
let tcx = self.tcx;
self.lazy(tcx.generics_of(def_id))
}
fn encode_predicates(&mut self, def_id: DefId) -> Lazy<ty::GenericPredicates<'tcx>> {
- debug!("IsolatedEncoder::encode_predicates({:?})", def_id);
+ debug!("EncodeContext::encode_predicates({:?})", def_id);
let tcx = self.tcx;
self.lazy(&tcx.predicates_of(def_id))
}
fn encode_predicates_defined_on(&mut self, def_id: DefId) -> Lazy<ty::GenericPredicates<'tcx>> {
- debug!("IsolatedEncoder::encode_predicates_defined_on({:?})", def_id);
+ debug!("EncodeContext::encode_predicates_defined_on({:?})", def_id);
let tcx = self.tcx;
self.lazy(&tcx.predicates_defined_on(def_id))
}
fn encode_info_for_trait_item(&mut self, def_id: DefId) -> Entry<'tcx> {
- debug!("IsolatedEncoder::encode_info_for_trait_item({:?})", def_id);
+ debug!("EncodeContext::encode_info_for_trait_item({:?})", def_id);
let tcx = self.tcx;
let hir_id = tcx.hir().as_local_hir_id(def_id).unwrap();
}
fn encode_info_for_impl_item(&mut self, def_id: DefId) -> Entry<'tcx> {
- debug!("IsolatedEncoder::encode_info_for_impl_item({:?})", def_id);
+ debug!("EncodeContext::encode_info_for_impl_item({:?})", def_id);
let tcx = self.tcx;
let hir_id = self.tcx.hir().as_local_hir_id(def_id).unwrap();
self.lazy_seq(body.arguments.iter().map(|arg| {
match arg.pat.node {
PatKind::Binding(_, _, ident, _) => ident.name,
- _ => keywords::Invalid.name(),
+ _ => kw::Invalid,
}
}))
})
// Encodes the inherent implementations of a structure, enumeration, or trait.
fn encode_inherent_implementations(&mut self, def_id: DefId) -> LazySeq<DefIndex> {
- debug!("IsolatedEncoder::encode_inherent_implementations({:?})", def_id);
+ debug!("EncodeContext::encode_inherent_implementations({:?})", def_id);
let implementations = self.tcx.inherent_impls(def_id);
if implementations.is_empty() {
LazySeq::empty()
}
fn encode_stability(&mut self, def_id: DefId) -> Option<Lazy<attr::Stability>> {
- debug!("IsolatedEncoder::encode_stability({:?})", def_id);
+ debug!("EncodeContext::encode_stability({:?})", def_id);
self.tcx.lookup_stability(def_id).map(|stab| self.lazy(stab))
}
fn encode_deprecation(&mut self, def_id: DefId) -> Option<Lazy<attr::Deprecation>> {
- debug!("IsolatedEncoder::encode_deprecation({:?})", def_id);
+ debug!("EncodeContext::encode_deprecation({:?})", def_id);
self.tcx.lookup_deprecation(def_id).map(|depr| self.lazy(&depr))
}
fn encode_info_for_item(&mut self, (def_id, item): (DefId, &'tcx hir::Item)) -> Entry<'tcx> {
let tcx = self.tcx;
- debug!("IsolatedEncoder::encode_info_for_item({:?})", def_id);
+ debug!("EncodeContext::encode_info_for_item({:?})", def_id);
let kind = match item.node {
hir::ItemKind::Static(_, hir::MutMutable, _) => EntryKind::MutStatic,
EntryKind::Fn(self.lazy(&data))
}
hir::ItemKind::Mod(ref m) => {
- return self.encode_info_for_mod(FromId(item.hir_id, (m, &item.attrs, &item.vis)));
+ return self.encode_info_for_mod((item.hir_id, m, &item.attrs, &item.vis));
}
hir::ItemKind::ForeignMod(_) => EntryKind::ForeignMod,
hir::ItemKind::GlobalAsm(..) => EntryKind::GlobalAsm,
fn encode_info_for_ty_param(
&mut self,
- (def_id, Untracked(encode_type)): (DefId, Untracked<bool>),
+ (def_id, encode_type): (DefId, bool),
) -> Entry<'tcx> {
- debug!("IsolatedEncoder::encode_info_for_ty_param({:?})", def_id);
+ debug!("EncodeContext::encode_info_for_ty_param({:?})", def_id);
self.encode_info_for_generic_param(def_id, EntryKind::TypeParam, encode_type)
}
&mut self,
def_id: DefId,
) -> Entry<'tcx> {
- debug!("IsolatedEncoder::encode_info_for_const_param({:?})", def_id);
+ debug!("EncodeContext::encode_info_for_const_param({:?})", def_id);
self.encode_info_for_generic_param(def_id, EntryKind::ConstParam, true)
}
fn encode_info_for_closure(&mut self, def_id: DefId) -> Entry<'tcx> {
- debug!("IsolatedEncoder::encode_info_for_closure({:?})", def_id);
+ debug!("EncodeContext::encode_info_for_closure({:?})", def_id);
let tcx = self.tcx;
let tables = self.tcx.typeck_tables_of(def_id);
}
fn encode_info_for_anon_const(&mut self, def_id: DefId) -> Entry<'tcx> {
- debug!("IsolatedEncoder::encode_info_for_anon_const({:?})", def_id);
+ debug!("EncodeContext::encode_info_for_anon_const({:?})", def_id);
let tcx = self.tcx;
let id = tcx.hir().as_local_hir_id(def_id).unwrap();
let body_id = tcx.hir().body_owned_by(id);
}
fn encode_attributes(&mut self, attrs: &[ast::Attribute]) -> LazySeq<ast::Attribute> {
- // NOTE: This must use lazy_seq_from_slice(), not lazy_seq() because
- // we rely on the HashStable specialization for [Attribute]
- // to properly filter things out.
- self.lazy_seq_from_slice(attrs)
+ self.lazy_seq_ref(attrs)
}
- fn encode_native_libraries(&mut self, _: ()) -> LazySeq<NativeLibrary> {
+ fn encode_native_libraries(&mut self) -> LazySeq<NativeLibrary> {
let used_libraries = self.tcx.native_libraries(LOCAL_CRATE);
self.lazy_seq(used_libraries.iter().cloned())
}
- fn encode_foreign_modules(&mut self, _: ()) -> LazySeq<ForeignModule> {
+ fn encode_foreign_modules(&mut self) -> LazySeq<ForeignModule> {
let foreign_modules = self.tcx.foreign_modules(LOCAL_CRATE);
self.lazy_seq(foreign_modules.iter().cloned())
}
- fn encode_crate_deps(&mut self, _: ()) -> LazySeq<CrateDep> {
+ fn encode_crate_deps(&mut self) -> LazySeq<CrateDep> {
let crates = self.tcx.crates();
let mut deps = crates
self.lazy_seq_ref(deps.iter().map(|&(_, ref dep)| dep))
}
- fn encode_lib_features(&mut self, _: ()) -> LazySeq<(ast::Name, Option<ast::Name>)> {
+ fn encode_lib_features(&mut self) -> LazySeq<(ast::Name, Option<ast::Name>)> {
let tcx = self.tcx;
let lib_features = tcx.lib_features();
self.lazy_seq(lib_features.to_vec())
}
- fn encode_lang_items(&mut self, _: ()) -> LazySeq<(DefIndex, usize)> {
+ fn encode_lang_items(&mut self) -> LazySeq<(DefIndex, usize)> {
let tcx = self.tcx;
let lang_items = tcx.lang_items();
let lang_items = lang_items.items().iter();
}))
}
- fn encode_lang_items_missing(&mut self, _: ()) -> LazySeq<lang_items::LangItem> {
+ fn encode_lang_items_missing(&mut self) -> LazySeq<lang_items::LangItem> {
let tcx = self.tcx;
self.lazy_seq_ref(&tcx.lang_items().missing)
}
/// Encodes an index, mapping each trait to its (local) implementations.
- fn encode_impls(&mut self, _: ()) -> LazySeq<TraitImpls> {
- debug!("IsolatedEncoder::encode_impls()");
+ fn encode_impls(&mut self) -> LazySeq<TraitImpls> {
+ debug!("EncodeContext::encode_impls()");
let tcx = self.tcx;
let mut visitor = ImplVisitor {
tcx,
TraitImpls {
trait_id: (trait_def_id.krate.as_u32(), trait_def_id.index),
- impls: self.lazy_seq_from_slice(&impls[..]),
+ impls: self.lazy_seq_ref(&impls),
}
})
.collect();
- self.lazy_seq_from_slice(&all_impls[..])
+ self.lazy_seq_ref(&all_impls)
}
// Encodes all symbols exported from this crate into the metadata.
// symbol associated with them (they weren't translated) or if they're an FFI
// definition (as that's not defined in this crate).
fn encode_exported_symbols(&mut self,
- exported_symbols: &[(ExportedSymbol<'_>, SymbolExportLevel)])
- -> EncodedExportedSymbols {
+ exported_symbols: &[(ExportedSymbol<'tcx>, SymbolExportLevel)])
+ -> LazySeq<(ExportedSymbol<'tcx>, SymbolExportLevel)> {
// The metadata symbol name is special. It should not show up in
// downstream crates.
let metadata_symbol_name = SymbolName::new(&metadata_symbol_name(self.tcx));
- let lazy_seq = self.lazy_seq(exported_symbols
+ self.lazy_seq(exported_symbols
.iter()
.filter(|&&(ref exported_symbol, _)| {
match *exported_symbol {
_ => true,
}
})
- .cloned());
-
- EncodedExportedSymbols {
- len: lazy_seq.len,
- position: lazy_seq.position,
- }
+ .cloned())
}
- fn encode_dylib_dependency_formats(&mut self, _: ()) -> LazySeq<Option<LinkagePreference>> {
+ fn encode_dylib_dependency_formats(&mut self) -> LazySeq<Option<LinkagePreference>> {
match self.tcx.sess.dependency_formats.borrow().get(&config::CrateType::Dylib) {
Some(arr) => {
self.lazy_seq(arr.iter().map(|slot| {
-> Entry<'tcx> {
let tcx = self.tcx;
- debug!("IsolatedEncoder::encode_info_for_foreign_item({:?})", def_id);
+ debug!("EncodeContext::encode_info_for_foreign_item({:?})", def_id);
let kind = match nitem.node {
hir::ForeignItemKind::Fn(_, ref names, _) => {
}
}
-struct EncodeVisitor<'a, 'b: 'a, 'tcx: 'b> {
- index: IndexBuilder<'a, 'b, 'tcx>,
-}
-
-impl<'a, 'b, 'tcx> Visitor<'tcx> for EncodeVisitor<'a, 'b, 'tcx> {
+impl Visitor<'tcx> for EncodeContext<'_, 'tcx> {
fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
- NestedVisitorMap::OnlyBodies(&self.index.tcx.hir())
+ NestedVisitorMap::OnlyBodies(&self.tcx.hir())
}
fn visit_expr(&mut self, ex: &'tcx hir::Expr) {
intravisit::walk_expr(self, ex);
- self.index.encode_info_for_expr(ex);
+ self.encode_info_for_expr(ex);
}
fn visit_item(&mut self, item: &'tcx hir::Item) {
intravisit::walk_item(self, item);
- let def_id = self.index.tcx.hir().local_def_id_from_hir_id(item.hir_id);
+ let def_id = self.tcx.hir().local_def_id_from_hir_id(item.hir_id);
match item.node {
hir::ItemKind::ExternCrate(_) |
- hir::ItemKind::Use(..) => (), // ignore these
- _ => self.index.record(def_id, IsolatedEncoder::encode_info_for_item, (def_id, item)),
+ hir::ItemKind::Use(..) => {} // ignore these
+ _ => self.record(def_id, EncodeContext::encode_info_for_item, (def_id, item)),
}
- self.index.encode_addl_info_for_item(item);
+ self.encode_addl_info_for_item(item);
}
fn visit_foreign_item(&mut self, ni: &'tcx hir::ForeignItem) {
intravisit::walk_foreign_item(self, ni);
- let def_id = self.index.tcx.hir().local_def_id_from_hir_id(ni.hir_id);
- self.index.record(def_id,
- IsolatedEncoder::encode_info_for_foreign_item,
+ let def_id = self.tcx.hir().local_def_id_from_hir_id(ni.hir_id);
+ self.record(def_id,
+ EncodeContext::encode_info_for_foreign_item,
(def_id, ni));
}
fn visit_variant(&mut self,
intravisit::walk_variant(self, v, g, id);
if let Some(ref discr) = v.node.disr_expr {
- let def_id = self.index.tcx.hir().local_def_id_from_hir_id(discr.hir_id);
- self.index.record(def_id, IsolatedEncoder::encode_info_for_anon_const, def_id);
+ let def_id = self.tcx.hir().local_def_id_from_hir_id(discr.hir_id);
+ self.record(def_id, EncodeContext::encode_info_for_anon_const, def_id);
}
}
fn visit_generics(&mut self, generics: &'tcx hir::Generics) {
intravisit::walk_generics(self, generics);
- self.index.encode_info_for_generics(generics);
+ self.encode_info_for_generics(generics);
}
fn visit_ty(&mut self, ty: &'tcx hir::Ty) {
intravisit::walk_ty(self, ty);
- self.index.encode_info_for_ty(ty);
+ self.encode_info_for_ty(ty);
}
fn visit_macro_def(&mut self, macro_def: &'tcx hir::MacroDef) {
- let def_id = self.index.tcx.hir().local_def_id_from_hir_id(macro_def.hir_id);
- self.index.record(def_id, IsolatedEncoder::encode_info_for_macro_def, macro_def);
+ let def_id = self.tcx.hir().local_def_id_from_hir_id(macro_def.hir_id);
+ self.record(def_id, EncodeContext::encode_info_for_macro_def, macro_def);
}
}
-impl<'a, 'b, 'tcx> IndexBuilder<'a, 'b, 'tcx> {
+impl EncodeContext<'_, 'tcx> {
fn encode_fields(&mut self, adt_def_id: DefId) {
let def = self.tcx.adt_def(adt_def_id);
for (variant_index, variant) in def.variants.iter_enumerated() {
for (field_index, field) in variant.fields.iter().enumerate() {
self.record(field.did,
- IsolatedEncoder::encode_field,
- (adt_def_id, Untracked((variant_index, field_index))));
+ EncodeContext::encode_field,
+ (adt_def_id, variant_index, field_index));
}
}
}
GenericParamKind::Type { ref default, .. } => {
self.record(
def_id,
- IsolatedEncoder::encode_info_for_ty_param,
- (def_id, Untracked(default.is_some())),
+ EncodeContext::encode_info_for_ty_param,
+ (def_id, default.is_some()),
);
}
GenericParamKind::Const { .. } => {
- self.record(def_id, IsolatedEncoder::encode_info_for_const_param, def_id);
+ self.record(def_id, EncodeContext::encode_info_for_const_param, def_id);
}
}
}
match ty.node {
hir::TyKind::Array(_, ref length) => {
let def_id = self.tcx.hir().local_def_id_from_hir_id(length.hir_id);
- self.record(def_id, IsolatedEncoder::encode_info_for_anon_const, def_id);
+ self.record(def_id, EncodeContext::encode_info_for_anon_const, def_id);
}
_ => {}
}
match expr.node {
hir::ExprKind::Closure(..) => {
let def_id = self.tcx.hir().local_def_id_from_hir_id(expr.hir_id);
- self.record(def_id, IsolatedEncoder::encode_info_for_closure, def_id);
+ self.record(def_id, EncodeContext::encode_info_for_closure, def_id);
}
_ => {}
}
let def = self.tcx.adt_def(def_id);
for (i, variant) in def.variants.iter_enumerated() {
self.record(variant.def_id,
- IsolatedEncoder::encode_enum_variant_info,
- (def_id, Untracked(i)));
+ EncodeContext::encode_enum_variant_info,
+ (def_id, i));
if let Some(ctor_def_id) = variant.ctor_def_id {
self.record(ctor_def_id,
- IsolatedEncoder::encode_enum_variant_ctor,
- (def_id, Untracked(i)));
+ EncodeContext::encode_enum_variant_ctor,
+ (def_id, i));
}
}
}
if let Some(ctor_hir_id) = struct_def.ctor_hir_id() {
let ctor_def_id = self.tcx.hir().local_def_id_from_hir_id(ctor_hir_id);
self.record(ctor_def_id,
- IsolatedEncoder::encode_struct_ctor,
+ EncodeContext::encode_struct_ctor,
(def_id, ctor_def_id));
}
}
hir::ItemKind::Impl(..) => {
for &trait_item_def_id in self.tcx.associated_item_def_ids(def_id).iter() {
self.record(trait_item_def_id,
- IsolatedEncoder::encode_info_for_impl_item,
+ EncodeContext::encode_info_for_impl_item,
trait_item_def_id);
}
}
hir::ItemKind::Trait(..) => {
for &item_def_id in self.tcx.associated_item_def_ids(def_id).iter() {
self.record(item_def_id,
- IsolatedEncoder::encode_info_for_trait_item,
+ EncodeContext::encode_info_for_trait_item,
item_def_id);
}
}
// Will be filled with the root position after encoding everything.
encoder.emit_raw_bytes(&[0, 0, 0, 0]);
- let (root, mut result) = {
+ // Since encoding metadata is not in a query, and nothing is cached,
+ // there's no need to do dep-graph tracking for any of it.
+ let (root, mut result) = tcx.dep_graph.with_ignore(move || {
let mut ecx = EncodeContext {
opaque: encoder,
tcx,
+ entries_index: Index::new(tcx.hir().definitions().def_index_count()),
lazy_state: LazyState::NoNode,
type_shorthands: Default::default(),
predicate_shorthands: Default::default(),
// culminating in the `CrateRoot` which points to all of it.
let root = ecx.encode_crate_root();
(root, ecx.opaque.into_inner())
- };
+ });
// Encode the root position.
let header = METADATA_HEADER.len();
use rustc::hir::def_id::{DefId, DefIndex};
use rustc_serialize::opaque::Encoder;
+use std::marker::PhantomData;
use std::u32;
use log::debug;
/// `u32::MAX`. Whenever an index is visited, we fill in the
/// appropriate spot by calling `record_position`. We should never
/// visit the same index twice.
-pub struct Index {
+pub struct Index<'tcx> {
positions: Vec<u8>,
+ _marker: PhantomData<&'tcx ()>,
}
-impl Index {
- pub fn new(max_index: usize) -> Index {
+impl Index<'tcx> {
+ pub fn new(max_index: usize) -> Self {
Index {
positions: vec![0xff; max_index * 4],
+ _marker: PhantomData,
}
}
- pub fn record(&mut self, def_id: DefId, entry: Lazy<Entry<'_>>) {
+ pub fn record(&mut self, def_id: DefId, entry: Lazy<Entry<'tcx>>) {
assert!(def_id.is_local());
self.record_index(def_id.index, entry);
}
- pub fn record_index(&mut self, item: DefIndex, entry: Lazy<Entry<'_>>) {
+ pub fn record_index(&mut self, item: DefIndex, entry: Lazy<Entry<'tcx>>) {
assert!(entry.position < (u32::MAX as usize));
let position = entry.position as u32;
let array_index = item.index();
position.write_to_bytes_at(positions, array_index)
}
- pub fn write_index(&self, buf: &mut Encoder) -> LazySeq<Index> {
+ pub fn write_index(&self, buf: &mut Encoder) -> LazySeq<Self> {
let pos = buf.position();
// First we write the length of the lower range ...
}
}
-impl<'tcx> LazySeq<Index> {
+impl LazySeq<Index<'tcx>> {
/// Given the metadata, extract out the offset of a particular
/// DefIndex (if any).
#[inline(never)]
+++ /dev/null
-//! Builder types for generating the "item data" section of the
-//! metadata. This section winds up looking like this:
-//!
-//! ```
-//! <common::data> // big list of item-like things...
-//! <common::data_item> // ...for most `DefId`s, there is an entry.
-//! </common::data_item>
-//! </common::data>
-//! ```
-//!
-//! As we generate this listing, we collect the offset of each
-//! `data_item` entry and store it in an index. Then, when we load the
-//! metadata, we can skip right to the metadata for a particular item.
-//!
-//! In addition to the offset, we need to track the data that was used
-//! to generate the contents of each `data_item`. This is so that we
-//! can figure out which HIR nodes contributed to that data for
-//! incremental compilation purposes.
-//!
-//! The `IndexBuilder` facilitates both of these. It is created
-//! with an `EncodingContext` (`ecx`), which it encapsulates.
-//! It has one main method, `record()`. You invoke `record`
-//! like so to create a new `data_item` element in the list:
-//!
-//! ```
-//! index.record(some_def_id, callback_fn, data)
-//! ```
-//!
-//! What record will do is to (a) record the current offset, (b) emit
-//! the `common::data_item` tag, and then call `callback_fn` with the
-//! given data as well as the `EncodingContext`. Once `callback_fn`
-//! returns, the `common::data_item` tag will be closed.
-//!
-//! `EncodingContext` does not offer the `record` method, so that we
-//! can ensure that `common::data_item` elements are never nested.
-//!
-//! In addition, while the `callback_fn` is executing, we will push a
-//! task `MetaData(some_def_id)`, which can then observe the
-//! reads/writes that occur in the task. For this reason, the `data`
-//! argument that is given to the `callback_fn` must implement the
-//! trait `DepGraphRead`, which indicates how to register reads on the
-//! data in this new task (note that many types of data, such as
-//! `DefId`, do not currently require any reads to be registered,
-//! since they are not derived from a HIR node). This is also why we
-//! give a callback fn, rather than taking a closure: it allows us to
-//! easily control precisely what data is given to that fn.
-
-use crate::encoder::EncodeContext;
-use crate::index::Index;
-use crate::schema::*;
-use crate::isolated_encoder::IsolatedEncoder;
-
-use rustc::hir;
-use rustc::hir::def_id::DefId;
-use rustc::ty::TyCtxt;
-use syntax::ast;
-
-use std::ops::{Deref, DerefMut};
-
-/// Builder that can encode new items, adding them into the index.
-/// Item encoding cannot be nested.
-pub struct IndexBuilder<'a, 'b: 'a, 'tcx: 'b> {
- items: Index,
- pub ecx: &'a mut EncodeContext<'b, 'tcx>,
-}
-
-impl<'a, 'b, 'tcx> Deref for IndexBuilder<'a, 'b, 'tcx> {
- type Target = EncodeContext<'b, 'tcx>;
- fn deref(&self) -> &Self::Target {
- self.ecx
- }
-}
-
-impl<'a, 'b, 'tcx> DerefMut for IndexBuilder<'a, 'b, 'tcx> {
- fn deref_mut(&mut self) -> &mut Self::Target {
- self.ecx
- }
-}
-
-impl<'a, 'b, 'tcx> IndexBuilder<'a, 'b, 'tcx> {
- pub fn new(ecx: &'a mut EncodeContext<'b, 'tcx>) -> Self {
- IndexBuilder {
- items: Index::new(ecx.tcx.hir().definitions().def_index_count()),
- ecx,
- }
- }
-
- /// Emit the data for a `DefId` to the metadata. The function to
- /// emit the data is `op`, and it will be given `data` as
- /// arguments. This `record` function will call `op` to generate
- /// the `Entry` (which may point to other encoded information)
- /// and will then record the `Lazy<Entry>` for use in the index.
- ///
- /// In addition, it will setup a dep-graph task to track what data
- /// `op` accesses to generate the metadata, which is later used by
- /// incremental compilation to compute a hash for the metadata and
- /// track changes.
- ///
- /// The reason that `op` is a function pointer, and not a closure,
- /// is that we want to be able to completely track all data it has
- /// access to, so that we can be sure that `DATA: DepGraphRead`
- /// holds, and that it is therefore not gaining "secret" access to
- /// bits of HIR or other state that would not be trackd by the
- /// content system.
- pub fn record<'x, DATA>(&'x mut self,
- id: DefId,
- op: fn(&mut IsolatedEncoder<'x, 'b, 'tcx>, DATA) -> Entry<'tcx>,
- data: DATA)
- where DATA: DepGraphRead
- {
- assert!(id.is_local());
-
- // We don't track this since we are explicitly computing the incr. comp.
- // hashes anyway. In theory we could do some tracking here and use it to
- // avoid rehashing things (and instead cache the hashes) but it's
- // unclear whether that would be a win since hashing is cheap enough.
- self.ecx.tcx.dep_graph.with_ignore(move || {
- let mut entry_builder = IsolatedEncoder::new(self.ecx);
- let entry = op(&mut entry_builder, data);
- let entry = entry_builder.lazy(&entry);
-
- self.items.record(id, entry);
- })
- }
-
- pub fn into_items(self) -> Index {
- self.items
- }
-}
-
-/// Trait used for data that can be passed from outside a dep-graph
-/// task. The data must either be of some safe type, such as a
-/// `DefId` index, or implement the `read` method so that it can add
-/// a read of whatever dep-graph nodes are appropriate.
-pub trait DepGraphRead {
- fn read(&self, tcx: TyCtxt<'_, '_, '_>);
-}
-
-impl DepGraphRead for DefId {
- fn read(&self, _tcx: TyCtxt<'_, '_, '_>) {}
-}
-
-impl DepGraphRead for ast::NodeId {
- fn read(&self, _tcx: TyCtxt<'_, '_, '_>) {}
-}
-
-impl<T> DepGraphRead for Option<T>
- where T: DepGraphRead
-{
- fn read(&self, tcx: TyCtxt<'_, '_, '_>) {
- match *self {
- Some(ref v) => v.read(tcx),
- None => (),
- }
- }
-}
-
-impl<T> DepGraphRead for [T]
- where T: DepGraphRead
-{
- fn read(&self, tcx: TyCtxt<'_, '_, '_>) {
- for i in self {
- i.read(tcx);
- }
- }
-}
-
-macro_rules! read_tuple {
- ($($name:ident),*) => {
- impl<$($name),*> DepGraphRead for ($($name),*)
- where $($name: DepGraphRead),*
- {
- #[allow(non_snake_case)]
- fn read(&self, tcx: TyCtxt<'_, '_, '_>) {
- let &($(ref $name),*) = self;
- $($name.read(tcx);)*
- }
- }
- }
-}
-read_tuple!(A, B);
-read_tuple!(A, B, C);
-
-macro_rules! read_hir {
- ($t:ty) => {
- impl<'tcx> DepGraphRead for &'tcx $t {
- fn read(&self, tcx: TyCtxt<'_, '_, '_>) {
- tcx.hir().read(self.hir_id);
- }
- }
- }
-}
-read_hir!(hir::Item);
-read_hir!(hir::ImplItem);
-read_hir!(hir::TraitItem);
-read_hir!(hir::ForeignItem);
-read_hir!(hir::MacroDef);
-
-/// Leaks access to a value of type T without any tracking. This is
-/// suitable for ambiguous types like `usize`, which *could* represent
-/// tracked data (e.g., if you read it out of a HIR node) or might not
-/// (e.g., if it's an index). Adding in an `Untracked` is an
-/// assertion, essentially, that the data does not need to be tracked
-/// (or that read edges will be added by some other way).
-///
-/// A good idea is to add to each use of `Untracked` an explanation of
-/// why this value is ok.
-pub struct Untracked<T>(pub T);
-
-impl<T> DepGraphRead for Untracked<T> {
- fn read(&self, _tcx: TyCtxt<'_, '_, '_>) {}
-}
-
-/// Newtype that can be used to package up misc data extracted from a
-/// HIR node that doesn't carry its own ID. This will allow an
-/// arbitrary `T` to be passed in, but register a read on the given
-/// `NodeId`.
-pub struct FromId<T>(pub hir::HirId, pub T);
-
-impl<T> DepGraphRead for FromId<T> {
- fn read(&self, tcx: TyCtxt<'_, '_, '_>) {
- tcx.hir().read(self.0);
- }
-}
+++ /dev/null
-use crate::encoder::EncodeContext;
-use crate::schema::{Lazy, LazySeq};
-use rustc::ty::TyCtxt;
-use rustc_serialize::Encodable;
-
-/// The IsolatedEncoder provides facilities to write to crate metadata while
-/// making sure that anything going through it is also feed into an ICH hasher.
-pub struct IsolatedEncoder<'a, 'b: 'a, 'tcx: 'b> {
- pub tcx: TyCtxt<'b, 'tcx, 'tcx>,
- ecx: &'a mut EncodeContext<'b, 'tcx>,
-}
-
-impl<'a, 'b: 'a, 'tcx: 'b> IsolatedEncoder<'a, 'b, 'tcx> {
-
- pub fn new(ecx: &'a mut EncodeContext<'b, 'tcx>) -> Self {
- let tcx = ecx.tcx;
- IsolatedEncoder {
- tcx,
- ecx,
- }
- }
-
- pub fn lazy<T>(&mut self, value: &T) -> Lazy<T>
- where T: Encodable
- {
- self.ecx.lazy(value)
- }
-
- pub fn lazy_seq<I, T>(&mut self, iter: I) -> LazySeq<T>
- where I: IntoIterator<Item = T>,
- T: Encodable
- {
- self.ecx.lazy_seq(iter)
- }
-
- pub fn lazy_seq_ref<'x, I, T>(&mut self, iter: I) -> LazySeq<T>
- where I: IntoIterator<Item = &'x T>,
- T: 'x + Encodable
- {
- self.ecx.lazy_seq_ref(iter)
- }
-
- pub fn lazy_seq_from_slice<T>(&mut self, slice: &[T]) -> LazySeq<T>
- where T: Encodable
- {
- self.ecx.lazy_seq_ref(slice.iter())
- }
-}
#![feature(box_patterns)]
#![feature(drain_filter)]
+#![feature(in_band_lifetimes)]
#![feature(libc)]
#![feature(nll)]
#![feature(proc_macro_internals)]
mod error_codes;
-mod index_builder;
mod index;
mod encoder;
mod decoder;
mod cstore_impl;
-mod isolated_encoder;
mod schema;
mod native_libs;
mod link_args;
use rustc::hir;
use rustc::hir::def::{self, CtorKind};
-use rustc::hir::def_id::{DefIndex, DefId, CrateNum};
-use rustc::ich::StableHashingContext;
+use rustc::hir::def_id::{DefIndex, DefId};
+use rustc::middle::exported_symbols::{ExportedSymbol, SymbolExportLevel};
use rustc::middle::cstore::{DepKind, LinkagePreference, NativeLibrary, ForeignModule};
use rustc::middle::lang_items;
use rustc::mir;
use syntax_pos::{self, Span};
use std::marker::PhantomData;
-use std::mem;
-
-use rustc_data_structures::stable_hasher::{StableHasher, HashStable,
- StableHasherResult};
pub fn rustc_version() -> String {
format!("rustc {}",
impl<T> serialize::UseSpecializedEncodable for Lazy<T> {}
impl<T> serialize::UseSpecializedDecodable for Lazy<T> {}
-impl<CTX, T> HashStable<CTX> for Lazy<T> {
- fn hash_stable<W: StableHasherResult>(&self,
- _: &mut CTX,
- _: &mut StableHasher<W>) {
- // There's nothing to do. Whatever got encoded within this Lazy<>
- // wrapper has already been hashed.
- }
-}
-
/// A sequence of type T referred to by its absolute position
/// in the metadata and length, and which can be decoded lazily.
/// The sequence is a single node for the purposes of `Lazy`.
impl<T> serialize::UseSpecializedEncodable for LazySeq<T> {}
impl<T> serialize::UseSpecializedDecodable for LazySeq<T> {}
-impl<CTX, T> HashStable<CTX> for LazySeq<T> {
- fn hash_stable<W: StableHasherResult>(&self,
- _: &mut CTX,
- _: &mut StableHasher<W>) {
- // There's nothing to do. Whatever got encoded within this Lazy<>
- // wrapper has already been hashed.
- }
-}
-
/// Encoding / decoding state for `Lazy` and `LazySeq`.
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub enum LazyState {
}
#[derive(RustcEncodable, RustcDecodable)]
-pub struct CrateRoot {
+pub struct CrateRoot<'tcx> {
pub name: Symbol,
pub triple: TargetTriple,
pub extra_filename: String,
pub source_map: LazySeq<syntax_pos::SourceFile>,
pub def_path_table: Lazy<hir::map::definitions::DefPathTable>,
pub impls: LazySeq<TraitImpls>,
- pub exported_symbols: EncodedExportedSymbols,
+ pub exported_symbols: LazySeq<(ExportedSymbol<'tcx>, SymbolExportLevel)>,
pub interpret_alloc_index: LazySeq<u32>,
- pub index: LazySeq<index::Index>,
+ pub entries_index: LazySeq<index::Index<'tcx>>,
pub compiler_builtins: bool,
pub needs_allocator: bool,
pub extra_filename: String,
}
-impl_stable_hash_for!(struct CrateDep {
- name,
- hash,
- kind,
- extra_filename
-});
-
#[derive(RustcEncodable, RustcDecodable)]
pub struct TraitImpls {
pub trait_id: (u32, DefIndex),
pub impls: LazySeq<DefIndex>,
}
-impl<'a, 'gcx> HashStable<StableHashingContext<'a>> for TraitImpls {
- fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a>,
- hasher: &mut StableHasher<W>) {
- let TraitImpls {
- trait_id: (krate, def_index),
- ref impls,
- } = *self;
-
- DefId {
- krate: CrateNum::from_u32(krate),
- index: def_index
- }.hash_stable(hcx, hasher);
- impls.hash_stable(hcx, hasher);
- }
-}
-
#[derive(RustcEncodable, RustcDecodable)]
pub struct Entry<'tcx> {
pub kind: EntryKind<'tcx>,
pub mir: Option<Lazy<mir::Mir<'tcx>>>,
}
-impl_stable_hash_for!(struct Entry<'tcx> {
- kind,
- visibility,
- span,
- attributes,
- children,
- stability,
- deprecation,
- ty,
- inherent_impls,
- variances,
- generics,
- predicates,
- predicates_defined_on,
- mir
-});
-
#[derive(Copy, Clone, RustcEncodable, RustcDecodable)]
pub enum EntryKind<'tcx> {
Const(ConstQualif, Lazy<RenderedConst>),
TraitAlias(Lazy<TraitAliasData<'tcx>>),
}
-impl<'a, 'gcx> HashStable<StableHashingContext<'a>> for EntryKind<'gcx> {
- fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a>,
- hasher: &mut StableHasher<W>) {
- mem::discriminant(self).hash_stable(hcx, hasher);
- match *self {
- EntryKind::ImmStatic |
- EntryKind::MutStatic |
- EntryKind::ForeignImmStatic |
- EntryKind::ForeignMutStatic |
- EntryKind::ForeignMod |
- EntryKind::GlobalAsm |
- EntryKind::ForeignType |
- EntryKind::Field |
- EntryKind::Existential |
- EntryKind::Type |
- EntryKind::TypeParam |
- EntryKind::ConstParam => {
- // Nothing else to hash here.
- }
- EntryKind::Const(qualif, ref const_data) => {
- qualif.hash_stable(hcx, hasher);
- const_data.hash_stable(hcx, hasher);
- }
- EntryKind::Enum(ref repr_options) => {
- repr_options.hash_stable(hcx, hasher);
- }
- EntryKind::Variant(ref variant_data) => {
- variant_data.hash_stable(hcx, hasher);
- }
- EntryKind::Struct(ref variant_data, ref repr_options) |
- EntryKind::Union(ref variant_data, ref repr_options) => {
- variant_data.hash_stable(hcx, hasher);
- repr_options.hash_stable(hcx, hasher);
- }
- EntryKind::Fn(ref fn_data) |
- EntryKind::ForeignFn(ref fn_data) => {
- fn_data.hash_stable(hcx, hasher);
- }
- EntryKind::Mod(ref mod_data) => {
- mod_data.hash_stable(hcx, hasher);
- }
- EntryKind::MacroDef(ref macro_def) => {
- macro_def.hash_stable(hcx, hasher);
- }
- EntryKind::Generator(data) => {
- data.hash_stable(hcx, hasher);
- }
- EntryKind::Closure(closure_data) => {
- closure_data.hash_stable(hcx, hasher);
- }
- EntryKind::Trait(ref trait_data) => {
- trait_data.hash_stable(hcx, hasher);
- }
- EntryKind::TraitAlias(ref trait_alias_data) => {
- trait_alias_data.hash_stable(hcx, hasher);
- }
- EntryKind::Impl(ref impl_data) => {
- impl_data.hash_stable(hcx, hasher);
- }
- EntryKind::Method(ref method_data) => {
- method_data.hash_stable(hcx, hasher);
- }
- EntryKind::AssociatedExistential(associated_container) |
- EntryKind::AssociatedType(associated_container) => {
- associated_container.hash_stable(hcx, hasher);
- }
- EntryKind::AssociatedConst(associated_container, qualif, ref const_data) => {
- associated_container.hash_stable(hcx, hasher);
- qualif.hash_stable(hcx, hasher);
- const_data.hash_stable(hcx, hasher);
- }
- }
- }
-}
-
/// Additional data for EntryKind::Const and EntryKind::AssociatedConst
#[derive(Clone, Copy, RustcEncodable, RustcDecodable)]
pub struct ConstQualif {
pub ast_promotable: bool,
}
-impl_stable_hash_for!(struct ConstQualif { mir, ast_promotable });
-
/// Contains a constant which has been rendered to a String.
/// Used by rustdoc.
#[derive(RustcEncodable, RustcDecodable)]
pub struct RenderedConst(pub String);
-impl<'a> HashStable<StableHashingContext<'a>> for RenderedConst {
- #[inline]
- fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a>,
- hasher: &mut StableHasher<W>) {
- self.0.hash_stable(hcx, hasher);
- }
-}
-
#[derive(RustcEncodable, RustcDecodable)]
pub struct ModData {
pub reexports: LazySeq<def::Export<hir::HirId>>,
}
-impl_stable_hash_for!(struct ModData { reexports });
-
#[derive(RustcEncodable, RustcDecodable)]
pub struct MacroDef {
pub body: String,
pub legacy: bool,
}
-impl_stable_hash_for!(struct MacroDef { body, legacy });
-
#[derive(RustcEncodable, RustcDecodable)]
pub struct FnData<'tcx> {
pub constness: hir::Constness,
pub sig: Lazy<ty::PolyFnSig<'tcx>>,
}
-impl_stable_hash_for!(struct FnData<'tcx> { constness, arg_names, sig });
-
#[derive(RustcEncodable, RustcDecodable)]
pub struct VariantData<'tcx> {
pub ctor_kind: CtorKind,
pub ctor_sig: Option<Lazy<ty::PolyFnSig<'tcx>>>,
}
-impl_stable_hash_for!(struct VariantData<'tcx> {
- ctor_kind,
- discr,
- ctor,
- ctor_sig
-});
-
#[derive(RustcEncodable, RustcDecodable)]
pub struct TraitData<'tcx> {
pub unsafety: hir::Unsafety,
pub super_predicates: Lazy<ty::GenericPredicates<'tcx>>,
}
-impl_stable_hash_for!(struct TraitData<'tcx> {
- unsafety,
- paren_sugar,
- has_auto_impl,
- is_marker,
- super_predicates
-});
-
#[derive(RustcEncodable, RustcDecodable)]
pub struct TraitAliasData<'tcx> {
pub super_predicates: Lazy<ty::GenericPredicates<'tcx>>,
}
-impl_stable_hash_for!(struct TraitAliasData<'tcx> {
- super_predicates
-});
-
#[derive(RustcEncodable, RustcDecodable)]
pub struct ImplData<'tcx> {
pub polarity: hir::ImplPolarity,
pub trait_ref: Option<Lazy<ty::TraitRef<'tcx>>>,
}
-impl_stable_hash_for!(struct ImplData<'tcx> {
- polarity,
- defaultness,
- parent_impl,
- coerce_unsized_info,
- trait_ref
-});
-
/// Describes whether the container of an associated item
/// is a trait or an impl and whether, in a trait, it has
ImplFinal,
}
-impl_stable_hash_for!(enum crate::schema::AssociatedContainer {
- TraitRequired,
- TraitWithDefault,
- ImplDefault,
- ImplFinal
-});
-
impl AssociatedContainer {
pub fn with_def_id(&self, def_id: DefId) -> ty::AssociatedItemContainer {
match *self {
pub container: AssociatedContainer,
pub has_self: bool,
}
-impl_stable_hash_for!(struct MethodData<'tcx> { fn_data, container, has_self });
#[derive(RustcEncodable, RustcDecodable)]
pub struct ClosureData<'tcx> {
pub sig: Lazy<ty::PolyFnSig<'tcx>>,
}
-impl_stable_hash_for!(struct ClosureData<'tcx> { sig });
#[derive(RustcEncodable, RustcDecodable)]
pub struct GeneratorData<'tcx> {
pub layout: mir::GeneratorLayout<'tcx>,
}
-impl_stable_hash_for!(struct GeneratorData<'tcx> { layout });
// Tags used for encoding Spans:
pub const TAG_VALID_SPAN: u8 = 0;
pub const TAG_INVALID_SPAN: u8 = 1;
-
-#[derive(RustcEncodable, RustcDecodable)]
-pub struct EncodedExportedSymbols {
- pub position: usize,
- pub len: usize,
-}
}
impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> {
-
/// Adds a suggestion when a closure is invoked twice with a moved variable or when a closure
/// is moved after being invoked.
///
diag.warn(
"this error has been downgraded to a warning for backwards \
compatibility with previous releases",
- );
- diag.warn(
+ ).warn(
"this represents potential undefined behavior in your code and \
this warning will become a hard error in the future",
+ ).note(
+ "for more information, try `rustc --explain E0729`"
);
}
}
use rustc::ty::{self, Const, DefIdTree, Ty, TyS, TyCtxt};
use rustc_data_structures::indexed_vec::Idx;
use syntax_pos::Span;
-use syntax_pos::symbol::keywords;
+use syntax_pos::symbol::kw;
use crate::dataflow::move_paths::InitLocation;
use crate::borrow_check::MirBorrowckCtxt;
// Deliberately fall into this case for all implicit self types,
// so that we don't fall in to the next case with them.
*kind == mir::ImplicitSelfKind::MutRef
- } else if Some(keywords::SelfLower.name()) == local_decl.name {
+ } else if Some(kw::SelfLower) == local_decl.name {
// Otherwise, check if the name is the self kewyord - in which case
// we have an explicit self. Do the same thing in this case and check
// for a `self: &mut Self` to suggest removing the `&mut`.
use rustc_errors::{Diagnostic, DiagnosticBuilder};
use std::collections::VecDeque;
use syntax::errors::Applicability;
-use syntax::symbol::keywords;
+use syntax::symbol::kw;
use syntax_pos::Span;
mod region_name;
"add_static_impl_trait_suggestion: has_static_predicate={:?}",
has_static_predicate
);
- let static_str = keywords::StaticLifetime.name();
+ let static_str = kw::StaticLifetime;
// If there is a static predicate, then the only sensible suggestion is to replace
// fr with `'static`.
if has_static_predicate {
use rustc::ty::{self, RegionKind, RegionVid, Ty, TyCtxt};
use rustc::ty::print::RegionHighlightMode;
use rustc_errors::DiagnosticBuilder;
-use syntax::symbol::keywords;
+use syntax::symbol::kw;
use syntax_pos::Span;
use syntax_pos::symbol::InternedString;
}
ty::ReStatic => Some(RegionName {
- name: keywords::StaticLifetime.name().as_interned_str(),
+ name: kw::StaticLifetime.as_interned_str(),
source: RegionNameSource::Static
}),
use std::u32;
use rustc_target::spec::abi::Abi;
use syntax::attr::{self, UnwindAttr};
-use syntax::symbol::keywords;
+use syntax::symbol::kw;
use syntax_pos::Span;
use super::lints;
fn_span: Span,
arg_count: usize,
+ is_generator: bool,
/// The current set of scopes, updated as we traverse;
/// see the `scope` module for more details.
ty::UpvarCapture::ByRef(..) => true,
};
let mut debuginfo = UpvarDebuginfo {
- debug_name: keywords::Invalid.name(),
+ debug_name: kw::Invalid,
by_ref,
};
let mut mutability = Mutability::Not;
return_ty,
return_ty_span,
upvar_debuginfo,
- upvar_mutbls);
+ upvar_mutbls,
+ body.is_generator);
let call_site_scope = region::Scope {
id: body.value.hir_id.local_id,
const_ty_span,
vec![],
vec![],
+ false,
);
let mut block = START_BLOCK;
let owner_id = hir.tcx().hir().body_owner(body_id);
let span = hir.tcx().hir().span(owner_id);
let ty = hir.tcx().types.err;
- let mut builder = Builder::new(hir, span, 0, Safety::Safe, ty, span, vec![], vec![]);
+ let mut builder = Builder::new(hir, span, 0, Safety::Safe, ty, span, vec![], vec![], false);
let source_info = builder.source_info(span);
builder.cfg.terminate(START_BLOCK, source_info, TerminatorKind::Unreachable);
builder.finish(None)
return_ty: Ty<'tcx>,
return_span: Span,
__upvar_debuginfo_codegen_only_do_not_use: Vec<UpvarDebuginfo>,
- upvar_mutbls: Vec<Mutability>)
+ upvar_mutbls: Vec<Mutability>,
+ is_generator: bool)
-> Builder<'a, 'gcx, 'tcx> {
let lint_level = LintLevel::Explicit(hir.root_lint_level);
let mut builder = Builder {
cfg: CFG { basic_blocks: IndexVec::new() },
fn_span: span,
arg_count,
+ is_generator,
scopes: vec![],
block_context: BlockContext::new(),
source_scopes: IndexVec::new(),
use rustc::ty::Ty;
use rustc::hir;
use rustc::mir::*;
-use syntax_pos::{Span};
+use syntax_pos::{Span, DUMMY_SP};
use rustc_data_structures::fx::FxHashMap;
use std::collections::hash_map::Entry;
+use std::mem;
#[derive(Debug)]
pub struct Scope<'tcx> {
/// * polluting the cleanup MIR with StorageDead creates
/// landing pads even though there's no actual destructors
/// * freeing up stack space has no effect during unwinding
+ /// Note that for generators we do emit StorageDeads, for the
+ /// use of optimizations in the MIR generator transform.
needs_cleanup: bool,
/// set of places to drop when exiting this scope. This starts
/// This path terminates in GeneratorDrop. Returns the start of the path.
/// None indicates there’s no cleanup to do at this point.
pub fn generator_drop_cleanup(&mut self) -> Option<BasicBlock> {
- if !self.scopes.iter().any(|scope| scope.needs_cleanup) {
- return None;
- }
-
// Fill in the cache for unwinds
self.diverge_cleanup_gen(true);
let result = block;
while let Some(scope) = scopes.next() {
- if !scope.needs_cleanup {
+ if !scope.needs_cleanup && !self.is_generator {
continue;
}
for scope in self.scopes[first_uncached..].iter_mut() {
target = build_diverge_scope(&mut self.cfg, scope.region_scope_span,
- scope, target, generator_drop);
+ scope, target, generator_drop, self.is_generator);
}
target
// drops panic (panicking while unwinding will abort, so there's no need for
// another set of arrows). The drops for the unwind path should have already
// been generated by `diverge_cleanup_gen`.
- //
- // The code in this function reads from right to left.
- // Storage dead drops have to be done left to right (since we can only push
- // to the end of a Vec). So, we find the next drop and then call
- // push_storage_deads which will iterate backwards through them so that
- // they are added in the correct order.
let mut unwind_blocks = scope.drops.iter().rev().filter_map(|drop_data| {
if let DropKind::Value { cached_block } = drop_data.kind {
block = next;
}
DropKind::Storage => {
- // We do not need to emit StorageDead for generator drops
- if generator_drop {
- continue
- }
-
// Drop the storage for both value and storage drops.
// Only temps and vars need their storage dead.
match drop_data.location {
span: Span,
scope: &mut Scope<'tcx>,
mut target: BasicBlock,
- generator_drop: bool)
+ generator_drop: bool,
+ is_generator: bool)
-> BasicBlock
{
// Build up the drops in **reverse** order. The end result will
scope: source_scope
};
- // Next, build up the drops. Here we iterate the vector in
+ // We keep track of StorageDead statements to prepend to our current block
+ // and store them here, in reverse order.
+ let mut storage_deads = vec![];
+
+ let mut target_built_by_us = false;
+
+ // Build up the drops. Here we iterate the vector in
// *forward* order, so that we generate drops[0] first (right to
// left in diagram above).
for (j, drop_data) in scope.drops.iter_mut().enumerate() {
debug!("build_diverge_scope drop_data[{}]: {:?}", j, drop_data);
// Only full value drops are emitted in the diverging path,
- // not StorageDead.
+ // not StorageDead, except in the case of generators.
//
// Note: This may not actually be what we desire (are we
// "freeing" stack storage as we unwind, or merely observing a
// frozen stack)? In particular, the intent may have been to
// match the behavior of clang, but on inspection eddyb says
// this is not what clang does.
- let cached_block = match drop_data.kind {
- DropKind::Value { ref mut cached_block } => cached_block.ref_mut(generator_drop),
- DropKind::Storage => continue
- };
- target = if let Some(cached_block) = *cached_block {
- cached_block
- } else {
- let block = cfg.start_new_cleanup_block();
- cfg.terminate(block, source_info(drop_data.span),
- TerminatorKind::Drop {
- location: drop_data.location.clone(),
- target,
- unwind: None
- });
- *cached_block = Some(block);
- block
+ match drop_data.kind {
+ DropKind::Storage if is_generator => {
+ // Only temps and vars need their storage dead.
+ match drop_data.location {
+ Place::Base(PlaceBase::Local(index)) => {
+ storage_deads.push(Statement {
+ source_info: source_info(drop_data.span),
+ kind: StatementKind::StorageDead(index)
+ });
+ }
+ _ => unreachable!(),
+ };
+ }
+ DropKind::Storage => {}
+ DropKind::Value { ref mut cached_block } => {
+ let cached_block = cached_block.ref_mut(generator_drop);
+ target = if let Some(cached_block) = *cached_block {
+ storage_deads.clear();
+ target_built_by_us = false;
+ cached_block
+ } else {
+ push_storage_deads(
+ cfg, &mut target, &mut storage_deads, target_built_by_us, source_scope);
+ let block = cfg.start_new_cleanup_block();
+ cfg.terminate(block, source_info(drop_data.span),
+ TerminatorKind::Drop {
+ location: drop_data.location.clone(),
+ target,
+ unwind: None
+ });
+ *cached_block = Some(block);
+ target_built_by_us = true;
+ block
+ };
+ }
};
}
-
+ push_storage_deads(cfg, &mut target, &mut storage_deads, target_built_by_us, source_scope);
*scope.cached_unwind.ref_mut(generator_drop) = Some(target);
+ assert!(storage_deads.is_empty());
debug!("build_diverge_scope({:?}, {:?}) = {:?}", scope, span, target);
target
}
+
+fn push_storage_deads(cfg: &mut CFG<'tcx>,
+ target: &mut BasicBlock,
+ storage_deads: &mut Vec<Statement<'tcx>>,
+ target_built_by_us: bool,
+ source_scope: SourceScope) {
+ if storage_deads.is_empty() { return; }
+ if !target_built_by_us {
+ // We cannot add statements to an existing block, so we create a new
+ // block for our StorageDead statements.
+ let block = cfg.start_new_cleanup_block();
+ let source_info = SourceInfo { span: DUMMY_SP, scope: source_scope };
+ cfg.terminate(block, source_info, TerminatorKind::Goto { target: *target });
+ *target = block;
+ }
+ let statements = &mut cfg.block_data_mut(*target).statements;
+ storage_deads.reverse();
+ debug!("push_storage_deads({:?}), storage_deads={:?}, statements={:?}",
+ *target, storage_deads, statements);
+ storage_deads.append(statements);
+ mem::swap(statements, storage_deads);
+ assert!(storage_deads.is_empty());
+}
}
fn terminator_effect(&self,
- _sets: &mut BlockSets<'_, Local>,
- _loc: Location) {
- // Terminators have no effect
+ sets: &mut BlockSets<'_, Local>,
+ loc: Location) {
+ match &self.mir[loc.block].terminator().kind {
+ TerminatorKind::Drop { location, .. } => if let Some(l) = location.local() {
+ sets.kill(l);
+ }
+ _ => (),
+ }
}
fn propagate_call_return(
```
"##,
+E0729: r##"
+Support for Non-Lexical Lifetimes (NLL) has been included in the Rust compiler
+since 1.31, and has been enabled on the 2015 edition since 1.36. The new borrow
+checker for NLL uncovered some bugs in the old borrow checker, which in some
+cases allowed unsound code to compile, resulting in memory safety issues.
+
+### What do I do?
+
+Change your code so the warning does no longer trigger. For backwards
+compatibility, this unsound code may still compile (with a warning) right now.
+However, at some point in the future, the compiler will no longer accept this
+code and will throw a hard error.
+
+### Shouldn't you fix the old borrow checker?
+
+The old borrow checker has known soundness issues that are basically impossible
+to fix. The new NLL-based borrow checker is the fix.
+
+### Can I turn these warnings into errors by denying a lint?
+
+No.
+
+### When are these warnings going to turn into errors?
+
+No formal timeline for turning the warnings into errors has been set. See
+[GitHub issue 58781](https://github.com/rust-lang/rust/issues/58781) for more
+information.
+
+### Why do I get this message with code that doesn't involve borrowing?
+
+There are some known bugs that trigger this message.
+"##,
}
register_diagnostics! {
use rustc_data_structures::indexed_vec::{IndexVec, Idx};
use rustc_target::spec::abi::Abi;
-use syntax_pos::Span;
+use syntax_pos::{Span, sym};
use std::fmt;
use std::iter;
}
ty::InstanceDef::CloneShim(def_id, ty) => {
let name = tcx.item_name(def_id);
- if name == "clone" {
+ if name == sym::clone {
build_clone_shim(tcx, def_id, ty)
- } else if name == "clone_from" {
+ } else if name == sym::clone_from {
debug!("make_shim({:?}: using default trait implementation", instance);
return tcx.optimized_mir(def_id);
} else {
// Create a statement which reads the discriminant into a temporary
fn get_discr(&self, mir: &mut Mir<'tcx>) -> (Statement<'tcx>, Place<'tcx>) {
let temp_decl = LocalDecl::new_internal(self.tcx.types.isize, mir.span);
- let temp = Place::Base(PlaceBase::Local(Local::new(mir.local_decls.len())));
- mir.local_decls.push(temp_decl);
+ let local_decls_len = mir.local_decls.push(temp_decl);
+ let temp = Place::Base(PlaceBase::Local(local_decls_len));
let self_place = Place::Base(PlaceBase::Local(self_arg()));
let assign = Statement {
// Ensure the `IDX` values are sequential (`0..QUALIF_COUNT`).
macro_rules! static_assert_seq_qualifs {
($i:expr => $first:ident $(, $rest:ident)*) => {
- static_assert!(SEQ_QUALIFS: {
+ static_assert!({
static_assert_seq_qualifs!($i + 1 => $($rest),*);
$first::IDX == $i
});
};
($i:expr =>) => {
- static_assert!(SEQ_QUALIFS: QUALIF_COUNT == $i);
+ static_assert!(QUALIF_COUNT == $i);
};
}
static_assert_seq_qualifs!(
context: PlaceContext,
location: Location) {
debug!("visit_place: place={:?} context={:?} location={:?}", place, context, location);
- self.super_place(place, context, location);
- match *place {
- Place::Base(PlaceBase::Local(_)) => {}
- Place::Base(PlaceBase::Static(box Static{ kind: StaticKind::Promoted(_), .. })) => {
- unreachable!()
- }
- Place::Base(PlaceBase::Static(box Static{ kind: StaticKind::Static(def_id), .. })) => {
- if self.tcx
- .get_attrs(def_id)
- .iter()
- .any(|attr| attr.check_name(sym::thread_local)) {
- if self.mode != Mode::Fn {
- span_err!(self.tcx.sess, self.span, E0625,
- "thread-local statics cannot be \
- accessed at compile-time");
- }
- return;
+ place.iterate(|place_base, place_projections| {
+ match place_base {
+ PlaceBase::Local(_) => {}
+ PlaceBase::Static(box Static{ kind: StaticKind::Promoted(_), .. }) => {
+ unreachable!()
}
+ PlaceBase::Static(box Static{ kind: StaticKind::Static(def_id), .. }) => {
+ if self.tcx
+ .get_attrs(*def_id)
+ .iter()
+ .any(|attr| attr.check_name(sym::thread_local)) {
+ if self.mode != Mode::Fn {
+ span_err!(self.tcx.sess, self.span, E0625,
+ "thread-local statics cannot be \
+ accessed at compile-time");
+ }
+ return;
+ }
- // Only allow statics (not consts) to refer to other statics.
- if self.mode == Mode::Static || self.mode == Mode::StaticMut {
- if self.mode == Mode::Static && context.is_mutating_use() {
- // this is not strictly necessary as miri will also bail out
- // For interior mutability we can't really catch this statically as that
- // goes through raw pointers and intermediate temporaries, so miri has
- // to catch this anyway
- self.tcx.sess.span_err(
- self.span,
- "cannot mutate statics in the initializer of another static",
- );
+ // Only allow statics (not consts) to refer to other statics.
+ if self.mode == Mode::Static || self.mode == Mode::StaticMut {
+ if self.mode == Mode::Static && context.is_mutating_use() {
+ // this is not strictly necessary as miri will also bail out
+ // For interior mutability we can't really catch this statically as that
+ // goes through raw pointers and intermediate temporaries, so miri has
+ // to catch this anyway
+ self.tcx.sess.span_err(
+ self.span,
+ "cannot mutate statics in the initializer of another static",
+ );
+ }
+ return;
}
- return;
- }
- unleash_miri!(self);
+ unleash_miri!(self);
- if self.mode != Mode::Fn {
- let mut err = struct_span_err!(self.tcx.sess, self.span, E0013,
- "{}s cannot refer to statics, use \
- a constant instead", self.mode);
- if self.tcx.sess.teach(&err.get_code().unwrap()) {
- err.note(
- "Static and const variables can refer to other const variables. But a \
- const variable cannot refer to a static variable."
- );
- err.help(
- "To fix this, the value can be extracted as a const and then used."
- );
+ if self.mode != Mode::Fn {
+ let mut err = struct_span_err!(self.tcx.sess, self.span, E0013,
+ "{}s cannot refer to statics, use \
+ a constant instead", self.mode);
+ if self.tcx.sess.teach(&err.get_code().unwrap()) {
+ err.note(
+ "Static and const variables can refer to other const variables. \
+ But a const variable cannot refer to a static variable."
+ );
+ err.help(
+ "To fix this, the value can be extracted as a const and then used."
+ );
+ }
+ err.emit()
}
- err.emit()
}
}
- Place::Projection(ref proj) => {
+
+ for proj in place_projections {
match proj.elem {
ProjectionElem::Deref => {
if context.is_mutating_use() {
}
}
}
- }
+ });
}
fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) {
if let ty::FnDef(def_id, _) = func.ty.sty {
let abi = tcx.fn_sig(def_id).abi();
let name = tcx.item_name(def_id);
- if abi == Abi::RustIntrinsic && name == "rustc_peek" {
+ if abi == Abi::RustIntrinsic && name == sym::rustc_peek {
return Some((args, source_info.span));
}
}
use syntax::ast::*;
use syntax::attr;
use syntax::source_map::Spanned;
-use syntax::symbol::{keywords, sym};
+use syntax::symbol::{kw, sym};
use syntax::ptr::P;
use syntax::visit::{self, Visitor};
use syntax::{span_err, struct_span_err, walk_list};
}
fn check_lifetime(&self, ident: Ident) {
- let valid_names = [keywords::UnderscoreLifetime.name(),
- keywords::StaticLifetime.name(),
- keywords::Invalid.name()];
+ let valid_names = [kw::UnderscoreLifetime,
+ kw::StaticLifetime,
+ kw::Invalid];
if !valid_names.contains(&ident.name) && ident.without_first_quote().is_reserved() {
self.err_handler().span_err(ident.span, "lifetimes cannot use keyword names");
}
use std::path::PathBuf;
use syntax::ast;
use syntax::span_err;
-use syntax::symbol::{Symbol, keywords, sym};
+use syntax::symbol::{Symbol, kw, sym};
use syntax_pos::{Span, DUMMY_SP};
/// Pointer to a registrar function.
for plugin in plugins {
// plugins must have a name and can't be key = value
let name = plugin.name_or_empty();
- if name != keywords::Invalid.name() && !plugin.is_value_str() {
+ if name != kw::Invalid && !plugin.is_value_str() {
let args = plugin.meta_item_list().map(ToOwned::to_owned);
loader.load_plugin(plugin.span(), name, args.unwrap_or_default());
} else {
use syntax::ext::base::{SyntaxExtension, NamedSyntaxExtension, NormalTT, IdentTT};
use syntax::ext::base::MacroExpanderFn;
-use syntax::ext::hygiene;
use syntax::symbol::{Symbol, sym};
use syntax::ast;
use syntax::feature_gate::AttributeType;
allow_internal_unsafe: false,
local_inner_macros: false,
unstable_feature: None,
- edition: hygiene::default_edition(),
+ edition: self.sess.edition(),
});
}
use rustc_data_structures::sync::Lrc;
use syntax::ast::Ident;
use syntax::attr;
-use syntax::symbol::{keywords, sym};
+use syntax::symbol::{kw, sym};
use syntax_pos::Span;
use std::{cmp, fmt, mem};
span: Span, // span of the field pattern, e.g., `x: 0`
def: &'tcx ty::AdtDef, // definition of the struct or enum
field: &'tcx ty::FieldDef) { // definition of the field
- let ident = Ident::new(keywords::Invalid.name(), use_ctxt);
+ let ident = Ident::new(kw::Invalid, use_ctxt);
let current_hir = self.current_item;
let def_id = self.tcx.adjust_ident(ident, def.did, current_hir).1;
if !def.is_enum() && !field.vis.is_accessible_from(def_id, self.tcx) {
use syntax::parse::token::{self, Token};
use syntax::span_err;
use syntax::std_inject::injected_crate_name;
-use syntax::symbol::{keywords, sym};
+use syntax::symbol::{kw, sym};
use syntax::visit::{self, Visitor};
use syntax_pos::{Span, DUMMY_SP};
}
_ => None,
}.map(|ctxt| Segment::from_ident(Ident::new(
- keywords::PathRoot.name(), use_tree.prefix.span.shrink_to_lo().with_ctxt(ctxt)
+ kw::PathRoot, use_tree.prefix.span.shrink_to_lo().with_ctxt(ctxt)
)));
let prefix = crate_root.into_iter().chain(prefix_iter).collect::<Vec<_>>();
let empty_for_self = |prefix: &[Segment]| {
prefix.is_empty() ||
- prefix.len() == 1 && prefix[0].ident.name == keywords::PathRoot.name()
+ prefix.len() == 1 && prefix[0].ident.name == kw::PathRoot
};
match use_tree.kind {
ast::UseTreeKind::Simple(rename, ..) => {
if nested {
// Correctly handle `self`
- if source.ident.name == keywords::SelfLower.name() {
+ if source.ident.name == kw::SelfLower {
type_ns_only = true;
if empty_for_self(&module_path) {
}
} else {
// Disallow `self`
- if source.ident.name == keywords::SelfLower.name() {
+ if source.ident.name == kw::SelfLower {
resolve_error(self,
use_tree.span,
ResolutionError::SelfImportsOnlyAllowedWithin);
}
// Disallow `use $crate;`
- if source.ident.name == keywords::DollarCrate.name() && module_path.is_empty() {
+ if source.ident.name == kw::DollarCrate && module_path.is_empty() {
let crate_root = self.resolve_crate_root(source.ident);
let crate_name = match crate_root.kind {
ModuleKind::Def(.., name) => name,
// HACK(eddyb) unclear how good this is, but keeping `$crate`
// in `source` breaks `src/test/compile-fail/import-crate-var.rs`,
// while the current crate doesn't have a valid `crate_name`.
- if crate_name != keywords::Invalid.name() {
+ if crate_name != kw::Invalid {
// `crate_name` should not be interpreted as relative.
module_path.push(Segment {
ident: Ident {
- name: keywords::PathRoot.name(),
+ name: kw::PathRoot,
span: source.ident.span,
},
id: Some(self.session.next_node_id()),
}
}
- if ident.name == keywords::Crate.name() {
+ if ident.name == kw::Crate {
self.session.span_err(ident.span,
"crate root imports need to be explicitly named: \
`use crate as name;`");
// Ensure there is at most one `self` in the list
let self_spans = items.iter().filter_map(|&(ref use_tree, _)| {
if let ast::UseTreeKind::Simple(..) = use_tree.kind {
- if use_tree.ident().name == keywords::SelfLower.name() {
+ if use_tree.ident().name == kw::SelfLower {
return Some(use_tree.span);
}
}
let new_span = prefix[prefix.len() - 1].ident.span;
let tree = ast::UseTree {
prefix: ast::Path::from_ident(
- Ident::new(keywords::SelfLower.name(), new_span)
+ Ident::new(kw::SelfLower, new_span)
),
kind: ast::UseTreeKind::Simple(
Some(Ident::from_str_and_span("__dummy", new_span).gensym()),
}
ItemKind::ExternCrate(orig_name) => {
- let module = if orig_name.is_none() && ident.name == keywords::SelfLower.name() {
+ let module = if orig_name.is_none() && ident.name == kw::SelfLower {
self.session
.struct_span_err(item.span, "`extern crate self;` requires renaming")
.span_suggestion(
)
.emit();
return;
- } else if orig_name == Some(keywords::SelfLower.name()) {
+ } else if orig_name == Some(kw::SelfLower) {
self.graph_root
} else {
let crate_id = self.crate_loader.process_extern_crate(item, &self.definitions);
ItemKind::GlobalAsm(..) => {}
- ItemKind::Mod(..) if ident == keywords::Invalid.ident() => {} // Crate root
+ ItemKind::Mod(..) if ident.name == kw::Invalid => {} // Crate root
ItemKind::Mod(..) => {
let def_id = self.definitions.local_def_id(item.id);
"an `extern crate` loading macros must be at the crate root");
}
if let ItemKind::ExternCrate(Some(orig_name)) = item.node {
- if orig_name == keywords::SelfLower.name() {
+ if orig_name == kw::SelfLower {
self.session.span_err(attr.span,
"`macro_use` is not supported on `extern crate self`");
}
use rustc::session::{Session, config::nightly_options};
use syntax::ast::{self, Expr, ExprKind, Ident};
use syntax::ext::base::MacroKind;
-use syntax::symbol::{Symbol, keywords};
+use syntax::symbol::{Symbol, kw};
use syntax_pos::{BytePos, Span};
type Res = def::Res<ast::NodeId>;
let item_span = path.last().unwrap().ident.span;
let (mod_prefix, mod_str) = if path.len() == 1 {
(String::new(), "this scope".to_string())
- } else if path.len() == 2 && path[0].ident.name == keywords::PathRoot.name() {
+ } else if path.len() == 2 && path[0].ident.name == kw::PathRoot {
(String::new(), "the crate root".to_string())
} else {
let mod_path = &path[..path.len() - 1];
match (path.get(0), path.get(1)) {
// `{{root}}::ident::...` on both editions.
// On 2015 `{{root}}` is usually added implicitly.
- (Some(fst), Some(snd)) if fst.ident.name == keywords::PathRoot.name() &&
+ (Some(fst), Some(snd)) if fst.ident.name == kw::PathRoot &&
!snd.ident.is_path_segment_keyword() => {}
// `ident::...` on 2018.
(Some(fst), _) if fst.ident.span.rust_2018() &&
!fst.ident.is_path_segment_keyword() => {
// Insert a placeholder that's later replaced by `self`/`super`/etc.
- path.insert(0, Segment::from_ident(keywords::Invalid.ident()));
+ path.insert(0, Segment::from_ident(Ident::invalid()));
}
_ => return None,
}
parent_scope: &ParentScope<'b>,
) -> Option<(Vec<Segment>, Vec<String>)> {
// Replace first ident with `self` and check if that is valid.
- path[0].ident.name = keywords::SelfLower.name();
+ path[0].ident.name = kw::SelfLower;
let result = self.resolve_path(&path, None, parent_scope, false, span, CrateLint::No);
debug!("make_missing_self_suggestion: path={:?} result={:?}", path, result);
if let PathResult::Module(..) = result {
parent_scope: &ParentScope<'b>,
) -> Option<(Vec<Segment>, Vec<String>)> {
// Replace first ident with `crate` and check if that is valid.
- path[0].ident.name = keywords::Crate.name();
+ path[0].ident.name = kw::Crate;
let result = self.resolve_path(&path, None, parent_scope, false, span, CrateLint::No);
debug!("make_missing_crate_suggestion: path={:?} result={:?}", path, result);
if let PathResult::Module(..) = result {
parent_scope: &ParentScope<'b>,
) -> Option<(Vec<Segment>, Vec<String>)> {
// Replace first ident with `crate` and check if that is valid.
- path[0].ident.name = keywords::Super.name();
+ path[0].ident.name = kw::Super;
let result = self.resolve_path(&path, None, parent_scope, false, span, CrateLint::No);
debug!("make_missing_super_suggestion: path={:?} result={:?}", path, result);
if let PathResult::Module(..) = result {
use syntax::ext::base::SyntaxExtension;
use syntax::ext::base::Determinacy::{self, Determined, Undetermined};
use syntax::ext::base::MacroKind;
-use syntax::symbol::{Symbol, keywords, sym};
+use syntax::symbol::{Symbol, kw, sym};
use syntax::util::lev_distance::find_best_match_for_name;
use syntax::visit::{self, FnKind, Visitor};
self.smart_resolve_path(ty.id, qself.as_ref(), path, PathSource::Type);
}
TyKind::ImplicitSelf => {
- let self_ty = keywords::SelfUpper.ident();
+ let self_ty = Ident::with_empty_ctxt(kw::SelfUpper);
let res = self.resolve_ident_in_lexical_scope(self_ty, TypeNS, Some(ty.id), ty.span)
.map_or(Res::Err, |d| d.res());
self.record_partial_res(ty.id, PartialRes::new(res));
is_value: bool
) -> hir::Path {
let root = if crate_root.is_some() {
- keywords::PathRoot
+ kw::PathRoot
} else {
- keywords::Crate
+ kw::Crate
};
- let segments = iter::once(root.ident())
+ let segments = iter::once(Ident::with_empty_ctxt(root))
.chain(
crate_root.into_iter()
.chain(components.iter().cloned())
.map(Ident::with_empty_ctxt)
).map(|i| self.new_ast_path_segment(i)).collect::<Vec<_>>();
-
let path = ast::Path {
span,
segments,
let path = if path_str.starts_with("::") {
ast::Path {
span,
- segments: iter::once(keywords::PathRoot.ident())
+ segments: iter::once(Ident::with_empty_ctxt(kw::PathRoot))
.chain({
path_str.split("::").skip(1).map(Ident::from_str)
})
let root_module_kind = ModuleKind::Def(
DefKind::Mod,
root_def_id,
- keywords::Invalid.name(),
+ kw::Invalid,
);
let graph_root = arenas.alloc_module(ModuleData {
no_implicit_prelude: attr::contains_name(&krate.attrs, sym::no_implicit_prelude),
path_span: Span)
-> Option<LexicalScopeBinding<'a>> {
assert!(ns == TypeNS || ns == ValueNS);
- if ident.name == keywords::Invalid.name() {
+ if ident.name == kw::Invalid {
return Some(LexicalScopeBinding::Res(Res::Err));
}
- ident.span = if ident.name == keywords::SelfUpper.name() {
+ ident.span = if ident.name == kw::SelfUpper {
// FIXME(jseyfried) improve `Self` hygiene
ident.span.with_ctxt(SyntaxContext::empty())
} else if ns == TypeNS {
fn resolve_crate_root(&mut self, ident: Ident) -> Module<'a> {
let mut ctxt = ident.span.ctxt();
- let mark = if ident.name == keywords::DollarCrate.name() {
+ let mark = if ident.name == kw::DollarCrate {
// When resolving `$crate` from a `macro_rules!` invoked in a `macro`,
// we don't want to pretend that the `macro_rules!` definition is in the `macro`
// as described in `SyntaxContext::apply_mark`, so we ignore prepended modern marks.
let mut self_type_rib = Rib::new(NormalRibKind);
// Plain insert (no renaming, since types are not currently hygienic)
- self_type_rib.bindings.insert(keywords::SelfUpper.ident(), self_res);
+ self_type_rib.bindings.insert(Ident::with_empty_ctxt(kw::SelfUpper), self_res);
self.ribs[TypeNS].push(self_type_rib);
f(self);
self.ribs[TypeNS].pop();
{
let self_res = Res::SelfCtor(impl_id);
let mut self_type_rib = Rib::new(NormalRibKind);
- self_type_rib.bindings.insert(keywords::SelfUpper.ident(), self_res);
+ self_type_rib.bindings.insert(Ident::with_empty_ctxt(kw::SelfUpper), self_res);
self.ribs[ValueNS].push(self_type_rib);
f(self);
self.ribs[ValueNS].pop();
}
None => {
// A completely fresh binding, add to the lists if it's valid.
- if ident.name != keywords::Invalid.name() {
+ if ident.name != kw::Invalid {
bindings.insert(ident, outer_pat_id);
self.ribs[ValueNS].last_mut().unwrap().bindings.insert(ident, res);
}
}
fn self_type_is_available(&mut self, span: Span) -> bool {
- let binding = self.resolve_ident_in_lexical_scope(keywords::SelfUpper.ident(),
+ let binding = self.resolve_ident_in_lexical_scope(Ident::with_empty_ctxt(kw::SelfUpper),
TypeNS, None, span);
if let Some(LexicalScopeBinding::Res(res)) = binding { res != Res::Err } else { false }
}
fn self_value_is_available(&mut self, self_span: Span, path_span: Span) -> bool {
- let ident = Ident::new(keywords::SelfLower.name(), self_span);
+ let ident = Ident::new(kw::SelfLower, self_span);
let binding = self.resolve_ident_in_lexical_scope(ident, ValueNS, None, path_span);
if let Some(LexicalScopeBinding::Res(res)) = binding { res != Res::Err } else { false }
}
};
if path.len() > 1 && !global_by_default && result.base_res() != Res::Err &&
- path[0].ident.name != keywords::PathRoot.name() &&
- path[0].ident.name != keywords::DollarCrate.name() {
+ path[0].ident.name != kw::PathRoot &&
+ path[0].ident.name != kw::DollarCrate {
let unqualified_result = {
match self.resolve_path_without_parent_scope(
&[*path.last().unwrap()],
let name = ident.name;
allow_super &= ns == TypeNS &&
- (name == keywords::SelfLower.name() ||
- name == keywords::Super.name());
+ (name == kw::SelfLower ||
+ name == kw::Super);
if ns == TypeNS {
- if allow_super && name == keywords::Super.name() {
+ if allow_super && name == kw::Super {
let mut ctxt = ident.span.ctxt().modern();
let self_module = match i {
0 => Some(self.resolve_self(&mut ctxt, self.current_module)),
};
}
if i == 0 {
- if name == keywords::SelfLower.name() {
+ if name == kw::SelfLower {
let mut ctxt = ident.span.ctxt().modern();
module = Some(ModuleOrUniformRoot::Module(
self.resolve_self(&mut ctxt, self.current_module)));
continue;
}
- if name == keywords::PathRoot.name() && ident.span.rust_2018() {
+ if name == kw::PathRoot && ident.span.rust_2018() {
module = Some(ModuleOrUniformRoot::ExternPrelude);
continue;
}
- if name == keywords::PathRoot.name() &&
+ if name == kw::PathRoot &&
ident.span.rust_2015() && self.session.rust_2018() {
// `::a::b` from 2015 macro on 2018 global edition
module = Some(ModuleOrUniformRoot::CrateRootAndExternPrelude);
continue;
}
- if name == keywords::PathRoot.name() ||
- name == keywords::Crate.name() ||
- name == keywords::DollarCrate.name() {
+ if name == kw::PathRoot ||
+ name == kw::Crate ||
+ name == kw::DollarCrate {
// `::a::b`, `crate::a::b` or `$crate::a::b`
module = Some(ModuleOrUniformRoot::Module(
self.resolve_crate_root(ident)));
// Report special messages for path segment keywords in wrong positions.
if ident.is_path_segment_keyword() && i != 0 {
- let name_str = if name == keywords::PathRoot.name() {
+ let name_str = if name == kw::PathRoot {
"crate root".to_string()
} else {
format!("`{}`", name)
};
- let label = if i == 1 && path[0].ident.name == keywords::PathRoot.name() {
+ let label = if i == 1 && path[0].ident.name == kw::PathRoot {
format!("global paths cannot start with {}", name_str)
} else {
format!("{} in paths can only be used in start position", name_str)
// We're only interested in `use` paths which should start with
// `{{root}}` currently.
- if first_name != keywords::PathRoot.name() {
+ if first_name != kw::PathRoot {
return
}
match path.get(1) {
// If this import looks like `crate::...` it's already good
- Some(Segment { ident, .. }) if ident.name == keywords::Crate.name() => return,
+ Some(Segment { ident, .. }) if ident.name == kw::Crate => return,
// Otherwise go below to see if it's an extern crate
Some(_) => {}
// If the path has length one (and it's `PathRoot` most likely)
{
let mut candidates = Vec::new();
let mut seen_modules = FxHashSet::default();
- let not_local_module = crate_name != keywords::Crate.ident();
+ let not_local_module = crate_name.name != kw::Crate;
let mut worklist = vec![(start_module, Vec::<ast::PathSegment>::new(), not_local_module)];
while let Some((in_module,
where FilterFn: Fn(Res) -> bool
{
let mut suggestions = self.lookup_import_candidates_from_module(
- lookup_ident, namespace, self.graph_root, keywords::Crate.ident(), &filter_fn);
+ lookup_ident, namespace, self.graph_root, Ident::with_empty_ctxt(kw::Crate), &filter_fn
+ );
if lookup_ident.span.rust_2018() {
let extern_prelude_names = self.extern_prelude.clone();
} else {
let ctxt = ident.span.ctxt();
Some(Segment::from_ident(Ident::new(
- keywords::PathRoot.name(), path.span.shrink_to_lo().with_ctxt(ctxt)
+ kw::PathRoot, path.span.shrink_to_lo().with_ctxt(ctxt)
)))
};
}
fn is_self_type(path: &[Segment], namespace: Namespace) -> bool {
- namespace == TypeNS && path.len() == 1 && path[0].ident.name == keywords::SelfUpper.name()
+ namespace == TypeNS && path.len() == 1 && path[0].ident.name == kw::SelfUpper
}
fn is_self_value(path: &[Segment], namespace: Namespace) -> bool {
- namespace == ValueNS && path.len() == 1 && path[0].ident.name == keywords::SelfLower.name()
+ namespace == ValueNS && path.len() == 1 && path[0].ident.name == kw::SelfLower
}
fn names_to_string(idents: &[Ident]) -> String {
let mut result = String::new();
for (i, ident) in idents.iter()
- .filter(|ident| ident.name != keywords::PathRoot.name())
+ .filter(|ident| ident.name != kw::PathRoot)
.enumerate() {
if i > 0 {
result.push_str("::");
use syntax::ext::base::{self, Determinacy};
use syntax::ext::base::{MacroKind, SyntaxExtension};
use syntax::ext::expand::{AstFragment, Invocation, InvocationKind};
-use syntax::ext::hygiene::{self, Mark};
+use syntax::ext::hygiene::Mark;
use syntax::ext::tt::macro_rules;
use syntax::feature_gate::{
feature_err, is_builtin_attr_name, AttributeGate, GateIssue, Stability, BUILTIN_ATTRIBUTES,
};
-use syntax::symbol::{Symbol, keywords, sym};
+use syntax::symbol::{Symbol, kw, sym};
use syntax::visit::Visitor;
use syntax::util::lev_distance::find_best_match_for_name;
use syntax_pos::{Span, DUMMY_SP};
}
impl<'a> Visitor<'a> for ResolveDollarCrates<'a, '_> {
fn visit_ident(&mut self, ident: Ident) {
- if ident.name == keywords::DollarCrate.name() {
+ if ident.name == kw::DollarCrate {
let name = match self.resolver.resolve_crate_root(ident).kind {
- ModuleKind::Def(.., name) if name != keywords::Invalid.name() => name,
- _ => keywords::Crate.name(),
+ ModuleKind::Def(.., name) if name != kw::Invalid => name,
+ _ => kw::Crate,
};
ident.span.ctxt().set_dollar_crate_name(name);
}
if kind == MacroKind::Bang && path.len() == 1 &&
path[0].ident.span.ctxt().outer().expn_info()
.map_or(false, |info| info.local_inner_macros) {
- let root = Ident::new(keywords::DollarCrate.name(), path[0].ident.span);
+ let root = Ident::new(kw::DollarCrate, path[0].ident.span);
path.insert(0, Segment::from_ident(root));
}
_ => Err(Determinacy::Determined),
}
WhereToResolve::CrateRoot => {
- let root_ident = Ident::new(keywords::PathRoot.name(), orig_ident.span);
+ let root_ident = Ident::new(kw::PathRoot, orig_ident.span);
let root_module = self.resolve_crate_root(root_ident);
let binding = self.resolve_ident_in_module_ext(
ModuleOrUniformRoot::Module(root_module),
let def_id = self.definitions.local_def_id(item.id);
let ext = Lrc::new(macro_rules::compile(&self.session.parse_sess,
&self.session.features_untracked(),
- item, hygiene::default_edition()));
+ item, self.session.edition()));
self.macro_map.insert(def_id, ext);
let def = match item.node { ast::ItemKind::MacroDef(ref def) => def, _ => unreachable!() };
use syntax::ast::{self, Ident, Name, NodeId, CRATE_NODE_ID};
use syntax::ext::base::Determinacy::{self, Determined, Undetermined};
use syntax::ext::hygiene::Mark;
-use syntax::symbol::{keywords, sym};
+use syntax::symbol::{kw, sym};
use syntax::util::lev_distance::find_best_match_for_name;
use syntax::{struct_span_err, unwrap_or};
use syntax_pos::{MultiSpan, Span};
parent_scope.expect("no parent scope for a single-segment import");
if ns == TypeNS {
- if ident.name == keywords::Crate.name() ||
- ident.name == keywords::DollarCrate.name() {
+ if ident.name == kw::Crate ||
+ ident.name == kw::DollarCrate {
let module = self.resolve_crate_root(ident);
let binding = (module, ty::Visibility::Public,
module.span, Mark::root())
.to_name_binding(self.arenas);
return Ok(binding);
- } else if ident.name == keywords::Super.name() ||
- ident.name == keywords::SelfLower.name() {
+ } else if ident.name == kw::Super ||
+ ident.name == kw::SelfLower {
// FIXME: Implement these with renaming requirements so that e.g.
// `use super;` doesn't work, but `use super as name;` does.
// Fall through here to get an error from `early_resolve_...`.
has_errors = true;
if let SingleImport { source, ref source_bindings, .. } = import.subclass {
- if source.name == keywords::SelfLower.name() {
+ if source.name == kw::SelfLower {
// Silence `unresolved import` error if E0429 is already emitted
if let Err(Determined) = source_bindings.value_ns.get() {
continue;
// HACK(eddyb) `lint_if_path_starts_with_module` needs at least
// 2 segments, so the `resolve_path` above won't trigger it.
let mut full_path = directive.module_path.clone();
- full_path.push(Segment::from_ident(keywords::Invalid.ident()));
+ full_path.push(Segment::from_ident(Ident::invalid()));
self.lint_if_path_starts_with_module(
directive.crate_lint(),
&full_path,
subclass: &ImportDirectiveSubclass<'_>,
span: Span) -> String {
let pos = names.iter()
- .position(|p| span == p.span && p.name != keywords::PathRoot.name());
- let global = !names.is_empty() && names[0].name == keywords::PathRoot.name();
+ .position(|p| span == p.span && p.name != kw::PathRoot);
+ let global = !names.is_empty() && names[0].name == kw::PathRoot;
if let Some(pos) = pos {
let names = if global { &names[1..pos + 1] } else { &names[..pos + 1] };
names_to_string(names)
);
}
ast::ImplItemKind::Type(ref ty) => {
- // FIXME uses of the assoc type should ideally point to this
+ // FIXME: uses of the assoc type should ideally point to this
// 'def' and the name here should be a ref to the def in the
// trait.
self.visit_ty(ty)
}
ast::ImplItemKind::Existential(ref bounds) => {
- // FIXME uses of the assoc type should ideally point to this
+ // FIXME: uses of the assoc type should ideally point to this
// 'def' and the name here should be a ref to the def in the
// trait.
for bound in bounds.iter() {
let hir_id = self.tcx.hir().node_to_hir_id(id);
let access = access_from!(self.save_ctxt, root_item, hir_id);
- // The parent def id of a given use tree is always the enclosing item.
+ // The parent `DefId` of a given use tree is always the enclosing item.
let parent = self.save_ctxt.tcx.hir().opt_local_def_id(id)
.and_then(|id| self.save_ctxt.tcx.parent(id))
.map(id_from_def_id);
// careful!
if default_needs_object_self(param) {
struct_span_err!(tcx.sess, span, E0393,
- "the type parameter `{}` must be explicitly \
- specified",
- param.name)
- .span_label(span,
- format!("missing reference to `{}`", param.name))
- .note(&format!("because of the default `Self` reference, \
- type parameters must be specified on object \
- types"))
+ "the type parameter `{}` must be explicitly specified",
+ param.name
+ )
+ .span_label(span, format!(
+ "missing reference to `{}`", param.name))
+ .note(&format!(
+ "because of the default `Self` reference, type parameters \
+ must be specified on object types"))
.emit();
tcx.types.err.into()
} else {
}
/// Instantiates the path for the given trait reference, assuming that it's
- /// bound to a valid trait type. Returns the def_id for the defining trait.
+ /// bound to a valid trait type. Returns the `DefId` of the defining trait.
/// The type _cannot_ be a type other than a trait type.
///
/// If the `projections` argument is `None`, then assoc type bindings like `Foo<T = X>`
{
let tcx = self.tcx();
- if trait_bounds.is_empty() {
- span_err!(tcx.sess, span, E0224,
- "at least one non-builtin trait is required for an object type");
- return tcx.types.err;
- }
-
let mut projection_bounds = Vec::new();
+ let mut potential_assoc_types = Vec::new();
let dummy_self = self.tcx().types.trait_object_dummy_self;
- let (principal, potential_assoc_types) = self.instantiate_poly_trait_ref(
- &trait_bounds[0],
- dummy_self,
- &mut projection_bounds,
- );
- debug!("principal: {:?}", principal);
-
- for trait_bound in trait_bounds[1..].iter() {
- // sanity check for non-principal trait bounds
- self.instantiate_poly_trait_ref(trait_bound,
- dummy_self,
- &mut vec![]);
+ // FIXME: we want to avoid collecting into a `Vec` here, but simply cloning the iterator is
+ // not straightforward due to the borrow checker.
+ let bound_trait_refs: Vec<_> = trait_bounds
+ .iter()
+ .rev()
+ .map(|trait_bound| {
+ let (trait_ref, cur_potential_assoc_types) = self.instantiate_poly_trait_ref(
+ trait_bound,
+ dummy_self,
+ &mut projection_bounds
+ );
+ potential_assoc_types.extend(cur_potential_assoc_types.into_iter().flatten());
+ (trait_ref, trait_bound.span)
+ })
+ .collect();
+
+ // Expand trait aliases recursively and check that only one regular (non-auto) trait
+ // is used and no 'maybe' bounds are used.
+ let expanded_traits = traits::expand_trait_aliases(tcx, bound_trait_refs.iter().cloned());
+ let (mut auto_traits, regular_traits): (Vec<_>, Vec<_>) =
+ expanded_traits.partition(|i| tcx.trait_is_auto(i.trait_ref().def_id()));
+ if regular_traits.len() > 1 {
+ let first_trait = ®ular_traits[0];
+ let additional_trait = ®ular_traits[1];
+ let mut err = struct_span_err!(tcx.sess, additional_trait.bottom().1, E0225,
+ "only auto traits can be used as additional traits in a trait object"
+ );
+ additional_trait.label_with_exp_info(&mut err,
+ "additional non-auto trait", "additional use");
+ first_trait.label_with_exp_info(&mut err,
+ "first non-auto trait", "first use");
+ err.emit();
}
- let (mut auto_traits, trait_bounds) = split_auto_traits(tcx, &trait_bounds[1..]);
-
- if !trait_bounds.is_empty() {
- let b = &trait_bounds[0];
- let span = b.trait_ref.path.span;
- struct_span_err!(self.tcx().sess, span, E0225,
- "only auto traits can be used as additional traits in a trait object")
- .span_label(span, "non-auto additional trait")
- .emit();
+ if regular_traits.is_empty() && auto_traits.is_empty() {
+ span_err!(tcx.sess, span, E0224,
+ "at least one non-builtin trait is required for an object type");
+ return tcx.types.err;
}
// Check that there are no gross object safety violations;
// most importantly, that the supertraits don't contain `Self`,
// to avoid ICEs.
- let object_safety_violations =
- tcx.global_tcx().astconv_object_safety_violations(principal.def_id());
- if !object_safety_violations.is_empty() {
- tcx.report_object_safety_error(span, principal.def_id(), object_safety_violations)
- .map(|mut err| err.emit());
- return tcx.types.err;
+ for item in ®ular_traits {
+ let object_safety_violations =
+ tcx.global_tcx().astconv_object_safety_violations(item.trait_ref().def_id());
+ if !object_safety_violations.is_empty() {
+ tcx.report_object_safety_error(
+ span,
+ item.trait_ref().def_id(),
+ object_safety_violations
+ )
+ .map(|mut err| err.emit());
+ return tcx.types.err;
+ }
}
// Use a `BTreeSet` to keep output in a more consistent order.
let mut associated_types = BTreeSet::default();
- for tr in traits::elaborate_trait_ref(tcx, principal) {
- debug!("conv_object_ty_poly_trait_ref: observing object predicate `{:?}`", tr);
- match tr {
+ let regular_traits_refs = bound_trait_refs
+ .into_iter()
+ .filter(|(trait_ref, _)| !tcx.trait_is_auto(trait_ref.def_id()))
+ .map(|(trait_ref, _)| trait_ref);
+ for trait_ref in traits::elaborate_trait_refs(tcx, regular_traits_refs) {
+ debug!("conv_object_ty_poly_trait_ref: observing object predicate `{:?}`", trait_ref);
+ match trait_ref {
ty::Predicate::Trait(pred) => {
- associated_types.extend(tcx.associated_items(pred.def_id())
- .filter(|item| item.kind == ty::AssociatedKind::Type)
- .map(|item| item.def_id));
+ associated_types
+ .extend(tcx.associated_items(pred.def_id())
+ .filter(|item| item.kind == ty::AssociatedKind::Type)
+ .map(|item| item.def_id));
}
ty::Predicate::Projection(pred) => {
// A `Self` within the original bound will be substituted with a
pred.skip_binder().ty.walk().any(|t| t == dummy_self);
// If the projection output contains `Self`, force the user to
- // elaborate it explicitly to avoid a bunch of complexity.
+ // elaborate it explicitly to avoid a lot of complexity.
//
// The "classicaly useful" case is the following:
// ```
// }
// ```
//
- // Here, the user could theoretically write `dyn MyTrait<Output=X>`,
+ // Here, the user could theoretically write `dyn MyTrait<Output = X>`,
// but actually supporting that would "expand" to an infinitely-long type
- // `fix $ τ → dyn MyTrait<MyOutput=X, Output=<τ as MyTrait>::MyOutput`.
+ // `fix $ τ → dyn MyTrait<MyOutput = X, Output = <τ as MyTrait>::MyOutput`.
//
- // Instead, we force the user to write `dyn MyTrait<MyOutput=X, Output=X>`,
+ // Instead, we force the user to write `dyn MyTrait<MyOutput = X, Output = X>`,
// which is uglier but works. See the discussion in #56288 for alternatives.
if !references_self {
- // Include projections defined on supertraits,
+ // Include projections defined on supertraits.
projection_bounds.push((pred, DUMMY_SP))
}
}
if associated_types.len() == 1 { "" } else { "s" },
names,
);
- let mut suggest = false;
- let mut potential_assoc_types_spans = vec![];
- if let Some(potential_assoc_types) = potential_assoc_types {
+ let (suggest, potential_assoc_types_spans) =
if potential_assoc_types.len() == associated_types.len() {
- // Only suggest when the amount of missing associated types is equals to the
+ // Only suggest when the amount of missing associated types equals the number of
// extra type arguments present, as that gives us a relatively high confidence
// that the user forgot to give the associtated type's name. The canonical
// example would be trying to use `Iterator<isize>` instead of
- // `Iterator<Item=isize>`.
- suggest = true;
- potential_assoc_types_spans = potential_assoc_types;
- }
- }
- let mut suggestions = vec![];
+ // `Iterator<Item = isize>`.
+ (true, potential_assoc_types)
+ } else {
+ (false, Vec::new())
+ };
+ let mut suggestions = Vec::new();
for (i, item_def_id) in associated_types.iter().enumerate() {
let assoc_item = tcx.associated_item(*item_def_id);
err.span_label(
err.emit();
}
+ // De-duplicate auto traits so that, e.g., `dyn Trait + Send + Send` is the same as
+ // `dyn Trait + Send`.
+ auto_traits.sort_by_key(|i| i.trait_ref().def_id());
+ auto_traits.dedup_by_key(|i| i.trait_ref().def_id());
+ debug!("regular_traits: {:?}", regular_traits);
+ debug!("auto_traits: {:?}", auto_traits);
+
// Erase the `dummy_self` (`trait_object_dummy_self`) used above.
- let existential_principal = principal.map_bound(|trait_ref| {
- self.trait_ref_to_existential(trait_ref)
+ let existential_trait_refs = regular_traits.iter().map(|i| {
+ i.trait_ref().map_bound(|trait_ref| self.trait_ref_to_existential(trait_ref))
});
let existential_projections = projection_bounds.iter().map(|(bound, _)| {
bound.map_bound(|b| {
})
});
- // Dedup auto traits so that `dyn Trait + Send + Send` is the same as `dyn Trait + Send`.
- auto_traits.sort();
- auto_traits.dedup();
-
- // Calling `skip_binder` is okay, because the predicates are re-bound.
- let principal = if tcx.trait_is_auto(existential_principal.def_id()) {
- ty::ExistentialPredicate::AutoTrait(existential_principal.def_id())
- } else {
- ty::ExistentialPredicate::Trait(*existential_principal.skip_binder())
- };
+ // Calling `skip_binder` is okay because the predicates are re-bound.
+ let regular_trait_predicates = existential_trait_refs.map(
+ |trait_ref| ty::ExistentialPredicate::Trait(*trait_ref.skip_binder()));
+ let auto_trait_predicates = auto_traits.into_iter().map(
+ |trait_ref| ty::ExistentialPredicate::AutoTrait(trait_ref.trait_ref().def_id()));
let mut v =
- iter::once(principal)
- .chain(auto_traits.into_iter().map(ty::ExistentialPredicate::AutoTrait))
+ regular_trait_predicates
+ .chain(auto_trait_predicates)
.chain(existential_projections
.map(|x| ty::ExistentialPredicate::Projection(*x.skip_binder())))
.collect::<SmallVec<[_; 8]>>();
} else {
self.re_infer(span, None).unwrap_or_else(|| {
span_err!(tcx.sess, span, E0228,
- "the lifetime bound for this object type cannot be deduced \
- from context; please supply an explicit bound");
+ "the lifetime bound for this object type cannot be deduced \
+ from context; please supply an explicit bound");
tcx.lifetimes.re_static
})
}
})
};
-
debug!("region_bound: {:?}", region_bound);
let ty = tcx.mk_dynamic(existential_predicates, region_bound);
}
// Search for a bound on a type parameter which includes the associated item
- // given by `assoc_name`. `ty_param_def_id` is the `DefId` for the type parameter
+ // given by `assoc_name`. `ty_param_def_id` is the `DefId` of the type parameter
// This function will fail if there are no suitable bounds or there is
// any ambiguity.
fn find_bound_for_assoc_item(&self,
}
}
-/// Divides a list of general trait bounds into two groups: auto traits (e.g., Sync and Send) and
-/// the remaining general trait bounds.
-fn split_auto_traits<'a, 'b, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
- trait_bounds: &'b [hir::PolyTraitRef])
- -> (Vec<DefId>, Vec<&'b hir::PolyTraitRef>)
-{
- let (auto_traits, trait_bounds): (Vec<_>, _) = trait_bounds.iter().partition(|bound| {
- // Checks whether `trait_did` is an auto trait and adds it to `auto_traits` if so.
- match bound.trait_ref.path.res {
- Res::Def(DefKind::Trait, trait_did) if tcx.trait_is_auto(trait_did) => {
- true
- }
- _ => false
- }
- });
-
- let auto_traits = auto_traits.into_iter().map(|tr| {
- if let Res::Def(DefKind::Trait, trait_did) = tr.trait_ref.path.res {
- trait_did
- } else {
- unreachable!()
- }
- }).collect::<Vec<_>>();
-
- (auto_traits, trait_bounds)
-}
-
// A helper struct for conveniently grouping a set of bounds which we pass to
// and return from functions in multiple places.
#[derive(PartialEq, Eq, Clone, Debug)]
pub fn predicates(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, param_ty: Ty<'tcx>)
-> Vec<(ty::Predicate<'tcx>, Span)>
{
- // If it could be sized, and is, add the sized predicate.
+ // If it could be sized, and is, add the `Sized` predicate.
let sized_predicate = self.implicitly_sized.and_then(|span| {
tcx.lang_items().sized_trait().map(|sized| {
let trait_ref = ty::TraitRef {
}
fn assemble_inherent_candidates_from_param(&mut self, param_ty: ty::ParamTy) {
- // FIXME -- Do we want to commit to this behavior for param bounds?
+ // FIXME: do we want to commit to this behavior for param bounds?
let bounds = self.param_env
.caller_bounds
use syntax::feature_gate::{GateIssue, emit_feature_err};
use syntax::ptr::P;
use syntax::source_map::{DUMMY_SP, original_sp};
-use syntax::symbol::{Symbol, LocalInternedString, keywords, sym};
+use syntax::symbol::{Symbol, LocalInternedString, kw, sym};
use syntax::util::lev_distance::find_best_match_for_name;
use std::cell::{Cell, RefCell, Ref, RefMut};
Ok(method)
}
Err(error) => {
- if segment.ident.name != keywords::Invalid.name() {
+ if segment.ident.name != kw::Invalid {
self.report_method_error(span,
rcvr_t,
segment.ident,
}
err.emit();
field_ty
- } else if field.name == keywords::Invalid.name() {
+ } else if field.name == kw::Invalid {
self.tcx().types.err
} else if self.method_exists(field, expr_t, expr.hir_id, true) {
let mut err = type_error_struct!(self.tcx().sess, field.span, expr_t, E0615,
method::MethodError::PrivateMatch(kind, def_id, _) => Ok((kind, def_id)),
_ => Err(ErrorReported),
};
- if item_name.name != keywords::Invalid.name() {
+ if item_name.name != kw::Invalid {
self.report_method_error(
span,
ty,
.iter()
.map(|(p, _)| *p)
.collect();
- // Check elaborated bounds
+ // Check elaborated bounds.
let implied_obligations = traits::elaborate_predicates(fcx.tcx, predicates);
for pred in implied_obligations {
use syntax::attr::{InlineAttr, OptimizeAttr, list_contains_name, mark_used};
use syntax::source_map::Spanned;
use syntax::feature_gate;
-use syntax::symbol::{InternedString, keywords, Symbol, sym};
+use syntax::symbol::{InternedString, kw, Symbol, sym};
use syntax_pos::{Span, DUMMY_SP};
use rustc::hir::def::{CtorKind, Res, DefKind};
tcx.alloc_adt_def(def_id, kind, variants, repr)
}
-/// Ensures that the super-predicates of the trait with `DefId`
-/// trait_def_id are converted and stored. This also ensures that
-/// the transitive super-predicates are converted;
+/// Ensures that the super-predicates of the trait with a `DefId`
+/// of `trait_def_id` are converted and stored. This also ensures that
+/// the transitive super-predicates are converted.
fn super_predicates_of<'a, 'tcx>(
tcx: TyCtxt<'a, 'tcx, 'tcx>,
trait_def_id: DefId,
let icx = ItemCtxt::new(tcx, trait_def_id);
- // Convert the bounds that follow the colon, e.g., `Bar + Zed` in `trait Foo : Bar + Zed`.
+ // Convert the bounds that follow the colon, e.g., `Bar + Zed` in `trait Foo: Bar + Zed`.
let self_param_ty = tcx.mk_self_type();
let superbounds1 = compute_bounds(&icx, self_param_ty, bounds, SizedByDefault::No, item.span);
let superbounds1 = superbounds1.predicates(tcx, self_param_ty);
- // Convert any explicit superbounds in the where clause,
- // e.g., `trait Foo where Self : Bar`.
- // In the case of trait aliases, however, we include all bounds in the where clause,
+ // Convert any explicit superbounds in the where-clause,
+ // e.g., `trait Foo where Self: Bar`.
+ // In the case of trait aliases, however, we include all bounds in the where-clause,
// so e.g., `trait Foo = where u32: PartialEq<Self>` would include `u32: PartialEq<Self>`
// as one of its "superpredicates".
let is_trait_alias = tcx.is_trait_alias(trait_def_id);
opt_self = Some(ty::GenericParamDef {
index: 0,
- name: keywords::SelfUpper.name().as_interned_str(),
+ name: kw::SelfUpper.as_interned_str(),
def_id: tcx.hir().local_def_id_from_hir_id(param_id),
pure_wrt_drop: false,
kind: ty::GenericParamDefKind::Type {
synthetic,
..
} => {
- if param.name.ident().name == keywords::SelfUpper.name() {
+ if param.name.ident().name == kw::SelfUpper {
span_bug!(
param.span,
"`Self` should not be the name of a regular parameter"
}
}
GenericParamKind::Const { .. } => {
- if param.name.ident().name == keywords::SelfUpper.name() {
+ if param.name.ident().name == kw::SelfUpper {
span_bug!(
param.span,
"`Self` should not be the name of a regular parameter",
use syntax::attr;
use syntax::source_map::dummy_spanned;
use syntax::symbol::Symbol;
- use syntax::with_globals;
+ use syntax::with_default_globals;
fn word_cfg(s: &str) -> Cfg {
Cfg::Cfg(Symbol::intern(s), None)
#[test]
fn test_cfg_not() {
- with_globals(|| {
+ with_default_globals(|| {
assert_eq!(!Cfg::False, Cfg::True);
assert_eq!(!Cfg::True, Cfg::False);
assert_eq!(!word_cfg("test"), Cfg::Not(Box::new(word_cfg("test"))));
#[test]
fn test_cfg_and() {
- with_globals(|| {
+ with_default_globals(|| {
let mut x = Cfg::False;
x &= Cfg::True;
assert_eq!(x, Cfg::False);
#[test]
fn test_cfg_or() {
- with_globals(|| {
+ with_default_globals(|| {
let mut x = Cfg::True;
x |= Cfg::False;
assert_eq!(x, Cfg::True);
#[test]
fn test_parse_ok() {
- with_globals(|| {
+ with_default_globals(|| {
let mi = dummy_meta_item_word("all");
assert_eq!(Cfg::parse(&mi), Ok(word_cfg("all")));
#[test]
fn test_parse_err() {
- with_globals(|| {
+ with_default_globals(|| {
let mi = attr::mk_name_value_item(
DUMMY_SP,
Ident::from_str("foo"),
#[test]
fn test_render_short_html() {
- with_globals(|| {
+ with_default_globals(|| {
assert_eq!(
word_cfg("unix").render_short_html(),
"Unix"
#[test]
fn test_render_long_html() {
- with_globals(|| {
+ with_default_globals(|| {
assert_eq!(
word_cfg("unix").render_long_html(),
"This is supported on <strong>Unix</strong> only."
use syntax::ext::base::MacroKind;
use syntax::source_map::{dummy_spanned, Spanned};
use syntax::ptr::P;
-use syntax::symbol::keywords::{self, Keyword};
-use syntax::symbol::{Symbol, sym};
+use syntax::symbol::{Symbol, kw, sym};
use syntax::symbol::InternedString;
use syntax_pos::{self, Pos, FileName};
use std::{mem, slice, vec};
use std::iter::{FromIterator, once};
use std::rc::Rc;
-use std::str::FromStr;
use std::cell::RefCell;
use std::sync::Arc;
use std::u32;
for attr in attrs.lists(sym::doc) {
if let Some(v) = attr.value_str() {
if attr.check_name(sym::keyword) {
- keyword = Keyword::from_str(&v.as_str()).ok()
- .map(|x| x.name().to_string());
- if keyword.is_some() {
- break
+ if v.is_doc_keyword() {
+ keyword = Some(v.to_string());
+ break;
}
// FIXME: should warn on unknown keywords?
}
let stripped_typarams = gens.params.iter().filter_map(|param| match param.kind {
ty::GenericParamDefKind::Lifetime => None,
ty::GenericParamDefKind::Type { .. } => {
- if param.name == keywords::SelfUpper.name().as_str() {
+ if param.name.as_symbol() == kw::SelfUpper {
assert_eq!(param.index, 0);
return None;
}
if i > 0 {
s.push_str("::");
}
- if seg.ident.name != keywords::PathRoot.name() {
+ if seg.ident.name != kw::PathRoot {
s.push_str(&*seg.ident.as_str());
}
}
hir::Float(float_ty) => return Primitive(float_ty.into()),
},
Res::SelfTy(..) if path.segments.len() == 1 => {
- return Generic(keywords::SelfUpper.name().to_string());
+ return Generic(kw::SelfUpper.to_string());
}
Res::Def(DefKind::TyParam, _) if path.segments.len() == 1 => {
return Generic(format!("{:#}", path));
use syntax::parse::lexer::{self, TokenAndSpan};
use syntax::parse::token;
use syntax::parse;
+use syntax::symbol::{kw, sym};
use syntax_pos::{Span, FileName};
/// Highlights `src`, returning the HTML output.
// Keywords are also included in the identifier set.
token::Ident(ident, is_raw) => {
- match &*ident.as_str() {
- "ref" | "mut" if !is_raw => Class::RefKeyWord,
+ match ident.name {
+ kw::Ref | kw::Mut if !is_raw => Class::RefKeyWord,
- "self" | "Self" => Class::Self_,
- "false" | "true" if !is_raw => Class::Bool,
+ kw::SelfLower | kw::SelfUpper => Class::Self_,
+ kw::False | kw::True if !is_raw => Class::Bool,
- "Option" | "Result" => Class::PreludeTy,
- "Some" | "None" | "Ok" | "Err" => Class::PreludeVal,
+ sym::Option | sym::Result => Class::PreludeTy,
+ sym::Some | sym::None | sym::Ok | sym::Err => Class::PreludeVal,
- "$crate" => Class::KeyWord,
_ if tas.tok.is_reserved_ident() => Class::KeyWord,
_ => {
rustc_driver::set_sigpipe_handler();
env_logger::init();
let res = std::thread::Builder::new().stack_size(thread_stack_size).spawn(move || {
- rustc_interface::interface::default_thread_pool(move || {
- get_args().map(|args| main_args(&args)).unwrap_or(1)
- })
+ get_args().map(|args| main_args(&args)).unwrap_or(1)
}).unwrap().join().unwrap_or(rustc_driver::EXIT_FAILURE);
process::exit(res);
}
Ok(opts) => opts,
Err(code) => return code,
};
+ rustc_interface::interface::default_thread_pool(options.edition, move || {
+ main_options(options)
+ })
+}
+fn main_options(options: config::Options) -> i32 {
let diag = core::new_handler(options.error_format,
None,
options.debugging_options.treat_err_as_bug,
use rustc::session::search_paths::SearchPath;
use rustc::util::common::ErrorReported;
use syntax::ast;
+use syntax::with_globals;
use syntax::source_map::SourceMap;
use syntax::edition::Edition;
use syntax::feature_gate::UnstableFeatures;
// Uses libsyntax to parse the doctest and find if there's a main fn and the extern
// crate already is included.
- let (already_has_main, already_has_extern_crate, found_macro) = crate::syntax::with_globals(|| {
+ let (already_has_main, already_has_extern_crate, found_macro) = with_globals(edition, || {
use crate::syntax::{parse::{self, ParseSess}, source_map::FilePathMapping};
use errors::emitter::EmitterWriter;
use errors::Handler;
- syntax::ext::hygiene::set_default_edition(edition);
-
let filename = FileName::anon_source_code(s);
let source = crates + &everything_else;
match cvt(syscall::clone(0))? {
0 => {
drop(input);
- let err = self.do_exec(theirs);
+ let Err(err) = self.do_exec(theirs);
let errno = err.raw_os_error().unwrap_or(syscall::EINVAL) as u32;
let bytes = [
(errno >> 24) as u8,
}
match self.setup_io(default, true) {
- Ok((_, theirs)) => unsafe { self.do_exec(theirs) },
+ Ok((_, theirs)) => unsafe {
+ let Err(e) = self.do_exec(theirs);
+ e
+ },
Err(e) => e,
}
}
// allocation). Instead we just close it manually. This will never
// have the drop glue anyway because this code never returns (the
// child will either exec() or invoke syscall::exit)
- unsafe fn do_exec(&mut self, stdio: ChildPipes) -> io::Error {
- macro_rules! t {
- ($e:expr) => (match $e {
- Ok(e) => e,
- Err(e) => return e,
- })
- }
-
+ unsafe fn do_exec(&mut self, stdio: ChildPipes) -> Result<!, io::Error> {
if let Some(fd) = stdio.stderr.fd() {
- t!(cvt(syscall::dup2(fd, 2, &[])));
- let mut flags = t!(cvt(syscall::fcntl(2, syscall::F_GETFD, 0)));
+ cvt(syscall::dup2(fd, 2, &[]))?;
+ let mut flags = cvt(syscall::fcntl(2, syscall::F_GETFD, 0))?;
flags &= ! syscall::O_CLOEXEC;
- t!(cvt(syscall::fcntl(2, syscall::F_SETFD, flags)));
+ cvt(syscall::fcntl(2, syscall::F_SETFD, flags))?;
}
if let Some(fd) = stdio.stdout.fd() {
- t!(cvt(syscall::dup2(fd, 1, &[])));
- let mut flags = t!(cvt(syscall::fcntl(1, syscall::F_GETFD, 0)));
+ cvt(syscall::dup2(fd, 1, &[]))?;
+ let mut flags = cvt(syscall::fcntl(1, syscall::F_GETFD, 0))?;
flags &= ! syscall::O_CLOEXEC;
- t!(cvt(syscall::fcntl(1, syscall::F_SETFD, flags)));
+ cvt(syscall::fcntl(1, syscall::F_SETFD, flags))?;
}
if let Some(fd) = stdio.stdin.fd() {
- t!(cvt(syscall::dup2(fd, 0, &[])));
- let mut flags = t!(cvt(syscall::fcntl(0, syscall::F_GETFD, 0)));
+ cvt(syscall::dup2(fd, 0, &[]))?;
+ let mut flags = cvt(syscall::fcntl(0, syscall::F_GETFD, 0))?;
flags &= ! syscall::O_CLOEXEC;
- t!(cvt(syscall::fcntl(0, syscall::F_SETFD, flags)));
+ cvt(syscall::fcntl(0, syscall::F_SETFD, flags))?;
}
if let Some(g) = self.gid {
- t!(cvt(syscall::setregid(g as usize, g as usize)));
+ cvt(syscall::setregid(g as usize, g as usize))?;
}
if let Some(u) = self.uid {
- t!(cvt(syscall::setreuid(u as usize, u as usize)));
+ cvt(syscall::setreuid(u as usize, u as usize))?;
}
if let Some(ref cwd) = self.cwd {
- t!(cvt(syscall::chdir(cwd)));
+ cvt(syscall::chdir(cwd))?;
}
for callback in self.closures.iter_mut() {
- t!(callback());
+ callback()?;
}
self.env.apply();
};
let mut file = if let Some(program) = program {
- t!(File::open(program.as_os_str()))
+ File::open(program.as_os_str())?
} else {
- return io::Error::from_raw_os_error(syscall::ENOENT);
+ return Err(io::Error::from_raw_os_error(syscall::ENOENT));
};
// Push all the arguments
let mut shebang = [0; 2];
let mut read = 0;
loop {
- match t!(reader.read(&mut shebang[read..])) {
+ match reader.read(&mut shebang[read..])? {
0 => break,
n => read += n,
}
// First of all, since we'll be passing another file to
// fexec(), we need to manually check that we have permission
// to execute this file:
- let uid = t!(cvt(syscall::getuid()));
- let gid = t!(cvt(syscall::getgid()));
- let meta = t!(file.metadata());
+ let uid = cvt(syscall::getuid())?;
+ let gid = cvt(syscall::getgid())?;
+ let meta = file.metadata()?;
let mode = if uid == meta.uid() as usize {
meta.mode() >> 3*2 & 0o7
meta.mode() & 0o7
};
if mode & 1 == 0 {
- return io::Error::from_raw_os_error(syscall::EPERM);
+ return Err(io::Error::from_raw_os_error(syscall::EPERM));
}
// Second of all, we need to actually read which interpreter it wants
let mut interpreter = Vec::new();
- t!(reader.read_until(b'\n', &mut interpreter));
+ reader.read_until(b'\n', &mut interpreter)?;
// Pop one trailing newline, if any
if interpreter.ends_with(&[b'\n']) {
interpreter.pop().unwrap();
};
if let Some(ref interpreter) = interpreter {
let path: &OsStr = OsStr::from_bytes(&interpreter);
- file = t!(File::open(path));
+ file = File::open(path)?;
args.push([interpreter.as_ptr() as usize, interpreter.len()]);
} else {
- t!(file.seek(SeekFrom::Start(0)));
+ file.seek(SeekFrom::Start(0))?;
}
args.push([self.program.as_ptr() as usize, self.program.len()]);
}
if let Err(err) = syscall::fexec(file.as_raw_fd(), &args, &vars) {
- io::Error::from_raw_os_error(err.errno as i32)
+ Err(io::Error::from_raw_os_error(err.errno as i32))
} else {
panic!("return from exec without err");
}
}
-
fn setup_io(&self, default: Stdio, needs_stdin: bool)
-> io::Result<(StdioPipes, ChildPipes)> {
let null = Stdio::Null;
match result {
0 => {
drop(input);
- let err = self.do_exec(theirs, envp.as_ref());
+ let Err(err) = self.do_exec(theirs, envp.as_ref());
let errno = err.raw_os_error().unwrap_or(libc::EINVAL) as u32;
let bytes = [
(errno >> 24) as u8,
// environment lock before we try to exec.
let _lock = sys::os::env_lock();
- self.do_exec(theirs, envp.as_ref())
+ let Err(e) = self.do_exec(theirs, envp.as_ref());
+ e
}
}
Err(e) => e,
&mut self,
stdio: ChildPipes,
maybe_envp: Option<&CStringArray>
- ) -> io::Error {
+ ) -> Result<!, io::Error> {
use crate::sys::{self, cvt_r};
- macro_rules! t {
- ($e:expr) => (match $e {
- Ok(e) => e,
- Err(e) => return e,
- })
- }
-
if let Some(fd) = stdio.stdin.fd() {
- t!(cvt_r(|| libc::dup2(fd, libc::STDIN_FILENO)));
+ cvt_r(|| libc::dup2(fd, libc::STDIN_FILENO))?;
}
if let Some(fd) = stdio.stdout.fd() {
- t!(cvt_r(|| libc::dup2(fd, libc::STDOUT_FILENO)));
+ cvt_r(|| libc::dup2(fd, libc::STDOUT_FILENO))?;
}
if let Some(fd) = stdio.stderr.fd() {
- t!(cvt_r(|| libc::dup2(fd, libc::STDERR_FILENO)));
+ cvt_r(|| libc::dup2(fd, libc::STDERR_FILENO))?;
}
if cfg!(not(any(target_os = "l4re"))) {
if let Some(u) = self.get_gid() {
- t!(cvt(libc::setgid(u as gid_t)));
+ cvt(libc::setgid(u as gid_t))?;
}
if let Some(u) = self.get_uid() {
// When dropping privileges from root, the `setgroups` call
// privilege dropping function.
let _ = libc::setgroups(0, ptr::null());
- t!(cvt(libc::setuid(u as uid_t)));
+ cvt(libc::setuid(u as uid_t))?;
}
}
if let Some(ref cwd) = *self.get_cwd() {
- t!(cvt(libc::chdir(cwd.as_ptr())));
+ cvt(libc::chdir(cwd.as_ptr()))?;
}
// emscripten has no signal support.
0,
mem::size_of::<libc::sigset_t>());
} else {
- t!(cvt(libc::sigemptyset(&mut set)));
+ cvt(libc::sigemptyset(&mut set))?;
}
- t!(cvt(libc::pthread_sigmask(libc::SIG_SETMASK, &set,
- ptr::null_mut())));
+ cvt(libc::pthread_sigmask(libc::SIG_SETMASK, &set,
+ ptr::null_mut()))?;
let ret = sys::signal(libc::SIGPIPE, libc::SIG_DFL);
if ret == libc::SIG_ERR {
- return io::Error::last_os_error()
+ return Err(io::Error::last_os_error())
}
}
for callback in self.get_closures().iter_mut() {
- t!(callback());
+ callback()?;
}
// Although we're performing an exec here we may also return with an
}
libc::execvp(self.get_argv()[0], self.get_argv().as_ptr());
- io::Error::last_os_error()
+ Err(io::Error::last_os_error())
}
#[cfg(not(any(target_os = "macos", target_os = "freebsd",
use crate::print::pprust;
use crate::ptr::P;
use crate::source_map::{dummy_spanned, respan, Spanned};
-use crate::symbol::{keywords, Symbol};
+use crate::symbol::{kw, Symbol};
use crate::tokenstream::TokenStream;
use crate::ThinVec;
pub struct Path {
pub span: Span,
/// The segments in the path: the things separated by `::`.
- /// Global paths begin with `keywords::PathRoot`.
+ /// Global paths begin with `kw::PathRoot`.
pub segments: Vec<PathSegment>,
}
}
pub fn is_global(&self) -> bool {
- !self.segments.is_empty() && self.segments[0].ident.name == keywords::PathRoot.name()
+ !self.segments.is_empty() && self.segments[0].ident.name == kw::PathRoot
}
}
PathSegment { ident, id: DUMMY_NODE_ID, args: None }
}
pub fn path_root(span: Span) -> Self {
- PathSegment::from_ident(Ident::new(keywords::PathRoot.name(), span))
+ PathSegment::from_ident(Ident::new(kw::PathRoot, span))
}
}
impl Arg {
pub fn to_self(&self) -> Option<ExplicitSelf> {
if let PatKind::Ident(BindingMode::ByValue(mutbl), ident, _) = self.pat.node {
- if ident.name == keywords::SelfLower.name() {
+ if ident.name == kw::SelfLower {
return match self.ty.node {
TyKind::ImplicitSelf => Some(respan(self.pat.span, SelfKind::Value(mutbl))),
TyKind::Rptr(lt, MutTy { ref ty, mutbl }) if ty.node.is_implicit_self() => {
pub fn is_self(&self) -> bool {
if let PatKind::Ident(_, ident, _) = self.pat.node {
- ident.name == keywords::SelfLower.name()
+ ident.name == kw::SelfLower
} else {
false
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct PolyTraitRef {
- /// The `'a` in `<'a> Foo<&'a T>`
+ /// The `'a` in `<'a> Foo<&'a T>`.
pub bound_generic_params: Vec<GenericParam>,
- /// The `Foo<&'a T>` in `<'a> Foo<&'a T>`
+ /// The `Foo<&'a T>` in `<'a> Foo<&'a T>`.
pub trait_ref: TraitRef,
pub span: Span,
use crate::parse::{self, ParseSess, PResult};
use crate::parse::token::{self, Token};
use crate::ptr::P;
-use crate::symbol::{keywords, Symbol, sym};
+use crate::symbol::{sym, Symbol};
use crate::ThinVec;
use crate::tokenstream::{TokenStream, TokenTree, DelimSpan};
use crate::GLOBALS;
self.meta_item().and_then(|meta_item| meta_item.ident())
}
pub fn name_or_empty(&self) -> Symbol {
- self.ident().unwrap_or(keywords::Invalid.ident()).name
+ self.ident().unwrap_or(Ident::invalid()).name
}
/// Gets the string value if self is a MetaItem and the MetaItem is a
}
}
pub fn name_or_empty(&self) -> Symbol {
- self.ident().unwrap_or(keywords::Invalid.ident()).name
+ self.ident().unwrap_or(Ident::invalid()).name
}
pub fn value_str(&self) -> Option<Symbol> {
}
}
pub fn name_or_empty(&self) -> Symbol {
- self.ident().unwrap_or(keywords::Invalid.ident()).name
+ self.ident().unwrap_or(Ident::invalid()).name
}
// #[attribute(name = "value")]
use crate::ext::build::AstBuilder;
use crate::parse::token;
use crate::ptr::P;
-use crate::symbol::keywords;
+use crate::symbol::kw;
use crate::tokenstream::{TokenTree};
use smallvec::smallvec;
(descriptions.len(), ecx.expr_vec(span, descriptions))
});
- let static_ = ecx.lifetime(span, keywords::StaticLifetime.ident());
+ let static_ = ecx.lifetime(span, Ident::with_empty_ctxt(kw::StaticLifetime));
let ty_str = ecx.ty_rptr(
span,
ecx.ty_ident(span, ecx.ident_of("str")),
use crate::source_map::{SourceMap, Spanned, respan};
use crate::edition::Edition;
use crate::ext::expand::{self, AstFragment, Invocation};
-use crate::ext::hygiene::{self, Mark, SyntaxContext, Transparency};
+use crate::ext::hygiene::{Mark, SyntaxContext, Transparency};
use crate::mut_visit::{self, MutVisitor};
use crate::parse::{self, parser, DirectoryOwnership};
use crate::parse::token;
use crate::ptr::P;
-use crate::symbol::{keywords, Ident, Symbol, sym};
+use crate::symbol::{kw, sym, Ident, Symbol};
use crate::ThinVec;
use crate::tokenstream::{self, TokenStream};
}
}
- pub fn edition(&self) -> Edition {
+ pub fn edition(&self, default_edition: Edition) -> Edition {
match *self {
SyntaxExtension::NormalTT { edition, .. } |
SyntaxExtension::DeclMacro { edition, .. } |
SyntaxExtension::IdentTT { .. } |
SyntaxExtension::MultiDecorator(..) |
SyntaxExtension::MultiModifier(..) |
- SyntaxExtension::BuiltinDerive(..) => hygiene::default_edition(),
+ SyntaxExtension::BuiltinDerive(..) => default_edition,
}
}
}
pub trait Resolver {
fn next_node_id(&mut self) -> ast::NodeId;
+
fn get_module_scope(&mut self, id: ast::NodeId) -> Mark;
fn resolve_dollar_crates(&mut self, fragment: &AstFragment);
impl Resolver for DummyResolver {
fn next_node_id(&mut self) -> ast::NodeId { ast::DUMMY_NODE_ID }
+
fn get_module_scope(&mut self, _id: ast::NodeId) -> Mark { Mark::root() }
fn resolve_dollar_crates(&mut self, _fragment: &AstFragment) {}
}
pub fn std_path(&self, components: &[&str]) -> Vec<ast::Ident> {
let def_site = DUMMY_SP.apply_mark(self.current_expansion.mark);
- iter::once(Ident::new(keywords::DollarCrate.name(), def_site))
+ iter::once(Ident::new(kw::DollarCrate, def_site))
.chain(components.iter().map(|s| self.ident_of(s)))
.collect()
}
use crate::source_map::{dummy_spanned, respan, Spanned};
use crate::ext::base::ExtCtxt;
use crate::ptr::P;
-use crate::symbol::{Symbol, keywords};
+use crate::symbol::{Symbol, kw};
use crate::ThinVec;
use rustc_target::spec::abi::Abi;
self.expr_path(self.path_ident(span, id))
}
fn expr_self(&self, span: Span) -> P<ast::Expr> {
- self.expr_ident(span, keywords::SelfLower.ident())
+ self.expr_ident(span, Ident::with_empty_ctxt(kw::SelfLower))
}
fn expr_binary(&self, sp: Span, op: ast::BinOpKind,
vis: ast::Visibility, vp: P<ast::UseTree>) -> P<ast::Item> {
P(ast::Item {
id: ast::DUMMY_NODE_ID,
- ident: keywords::Invalid.ident(),
+ ident: Ident::invalid(),
attrs: vec![],
node: ast::ItemKind::Use(vp),
vis,
use crate::attr::HasAttrs;
use crate::ast;
-use crate::source_map::{hygiene, ExpnInfo, ExpnFormat};
+use crate::source_map::{ExpnInfo, ExpnFormat};
use crate::ext::base::ExtCtxt;
use crate::ext::build::AstBuilder;
use crate::parse::parser::PathStyle;
].into()),
allow_internal_unsafe: false,
local_inner_macros: false,
- edition: hygiene::default_edition(),
+ edition: cx.parse_sess.edition,
});
let span = span.with_ctxt(cx.backtrace());
use crate::config::StripUnconfigured;
use crate::ext::base::*;
use crate::ext::derive::{add_derived_markers, collect_derives};
-use crate::ext::hygiene::{self, Mark, SyntaxContext};
+use crate::ext::hygiene::{Mark, SyntaxContext};
use crate::ext::placeholders::{placeholder, PlaceholderExpander};
use crate::feature_gate::{self, Features, GateIssue, is_builtin_attr, emit_feature_err};
use crate::mut_visit::*;
use crate::parse::parser::Parser;
use crate::ptr::P;
use crate::symbol::Symbol;
-use crate::symbol::{keywords, sym};
+use crate::symbol::{kw, sym};
use crate::tokenstream::{TokenStream, TokenTree};
use crate::visit::{self, Visitor};
use crate::util::map_in_place::MapInPlace;
if i != 0 {
path_str.push_str("::");
}
- if segment.ident.name != keywords::PathRoot.name() {
+ if segment.ident.name != kw::PathRoot {
path_str.push_str(&segment.ident.as_str())
}
}
attrs: krate.attrs,
span: krate.span,
node: ast::ItemKind::Mod(krate.module),
- ident: keywords::Invalid.ident(),
+ ident: Ident::invalid(),
id: ast::DUMMY_NODE_ID,
vis: respan(krate.span.shrink_to_lo(), ast::VisibilityKind::Public),
tokens: None,
allow_internal_unstable: None,
allow_internal_unsafe: false,
local_inner_macros: false,
- edition: ext.edition(),
+ edition: ext.edition(self.cx.parse_sess.edition),
});
match *ext {
};
let path = &mac.node.path;
- let ident = ident.unwrap_or_else(|| keywords::Invalid.ident());
+ let ident = ident.unwrap_or_else(|| Ident::invalid());
let validate_and_set_expn_info = |this: &mut Self, // arg instead of capture
def_site_span: Option<Span>,
allow_internal_unstable,
}
}
- if ident.name != keywords::Invalid.name() {
+ if ident.name != kw::Invalid {
let msg = format!("macro {}! expects no ident argument, given '{}'", path, ident);
this.cx.span_err(path.span, &msg);
this.cx.trace_macros_diag();
}
IdentTT { ref expander, span: tt_span, ref allow_internal_unstable } => {
- if ident.name == keywords::Invalid.name() {
+ if ident.name == kw::Invalid {
self.cx.span_err(path.span,
&format!("macro {}! expects an ident argument", path));
self.cx.trace_macros_diag();
allow_internal_unstable: allow_internal_unstable.clone(),
allow_internal_unsafe: false,
local_inner_macros: false,
- edition: hygiene::default_edition(),
+ edition: self.cx.parse_sess.edition,
});
let input: Vec<_> = mac.node.stream().into_trees().collect();
}
SyntaxExtension::ProcMacro { ref expander, ref allow_internal_unstable, edition } => {
- if ident.name != keywords::Invalid.name() {
+ if ident.name != kw::Invalid {
let msg =
format!("macro {}! expects no ident argument, given '{}'", path, ident);
self.cx.span_err(path.span, &msg);
allow_internal_unstable: None,
allow_internal_unsafe: false,
local_inner_macros: false,
- edition: ext.edition(),
+ edition: ext.edition(self.cx.parse_sess.edition),
};
match *ext {
invoc.expansion_data.mark.set_expn_info(expn_info);
let span = span.with_ctxt(self.cx.backtrace());
let dummy = ast::MetaItem { // FIXME(jseyfried) avoid this
- path: Path::from_ident(keywords::Invalid.ident()),
+ path: Path::from_ident(Ident::invalid()),
span: DUMMY_SP,
node: ast::MetaItemKind::Word,
};
})
}
ast::ItemKind::Mod(ast::Mod { inner, .. }) => {
- if item.ident == keywords::Invalid.ident() {
+ if item.ident == Ident::invalid() {
return noop_flat_map_item(item, self);
}
use crate::tokenstream::TokenStream;
use crate::mut_visit::*;
use crate::ptr::P;
-use crate::symbol::keywords;
use crate::ThinVec;
use smallvec::{smallvec, SmallVec};
})
}
- let ident = keywords::Invalid.ident();
+ let ident = ast::Ident::invalid();
let attrs = Vec::new();
let generics = ast::Generics::default();
let vis = dummy_spanned(ast::VisibilityKind::Inherited);
use crate::parse::parser::{Parser, PathStyle};
use crate::parse::token::{self, DocComment, Nonterminal, Token};
use crate::print::pprust;
-use crate::symbol::keywords;
+use crate::symbol::kw;
use crate::tokenstream::{DelimSpan, TokenStream};
use errors::FatalError;
TokenTree::Delimited(_, ref delim) => for next_m in &delim.tts {
n_rec(sess, next_m, res.by_ref(), ret_val)?;
},
- TokenTree::MetaVarDecl(span, _, id) if id.name == keywords::Invalid.name() => {
+ TokenTree::MetaVarDecl(span, _, id) if id.name == kw::Invalid => {
if sess.missing_fragment_specifiers.borrow_mut().remove(&span) {
return Err((span, "missing fragment specifier".to_string()));
}
}
// We need to match a metavar (but the identifier is invalid)... this is an error
- TokenTree::MetaVarDecl(span, _, id) if id.name == keywords::Invalid.name() => {
+ TokenTree::MetaVarDecl(span, _, id) if id.name == kw::Invalid => {
if sess.missing_fragment_specifiers.borrow_mut().remove(&span) {
return Error(span, "missing fragment specifier".to_string());
}
/// We prohibit passing `_` to macros expecting `ident` for now.
fn get_macro_ident(token: &Token) -> Option<(Ident, bool)> {
match *token {
- token::Ident(ident, is_raw) if ident.name != keywords::Underscore.name() =>
+ token::Ident(ident, is_raw) if ident.name != kw::Underscore =>
Some((ident, is_raw)),
_ => None,
}
use crate::parse::parser::Parser;
use crate::parse::token::{self, NtTT};
use crate::parse::token::Token::*;
-use crate::symbol::{Symbol, keywords, sym};
+use crate::symbol::{Symbol, kw, sym};
use crate::tokenstream::{DelimSpan, TokenStream, TokenTree};
use errors::FatalError;
match *tok {
TokenTree::Token(_, ref tok) => match *tok {
FatArrow | Comma | Eq | BinOp(token::Or) => IsInFollow::Yes,
- Ident(i, false) if i.name == keywords::If.name() ||
- i.name == keywords::In.name() => IsInFollow::Yes,
+ Ident(i, false) if i.name == kw::If ||
+ i.name == kw::In => IsInFollow::Yes,
_ => IsInFollow::No(tokens),
},
_ => IsInFollow::No(tokens),
OpenDelim(token::DelimToken::Bracket) |
Comma | FatArrow | Colon | Eq | Gt | BinOp(token::Shr) | Semi |
BinOp(token::Or) => IsInFollow::Yes,
- Ident(i, false) if i.name == keywords::As.name() ||
- i.name == keywords::Where.name() => IsInFollow::Yes,
+ Ident(i, false) if i.name == kw::As ||
+ i.name == kw::Where => IsInFollow::Yes,
_ => IsInFollow::No(tokens),
},
TokenTree::MetaVarDecl(_, _, frag) if frag.name == sym::block =>
match *tok {
TokenTree::Token(_, ref tok) => match *tok {
Comma => IsInFollow::Yes,
- Ident(i, is_raw) if is_raw || i.name != keywords::Priv.name() =>
+ Ident(i, is_raw) if is_raw || i.name != kw::Priv =>
IsInFollow::Yes,
ref tok => if tok.can_begin_type() {
IsInFollow::Yes
_ => IsInFollow::No(tokens),
}
},
- "" => IsInFollow::Yes, // keywords::Invalid
+ "" => IsInFollow::Yes, // kw::Invalid
_ => IsInFollow::Invalid(format!("invalid fragment specifier `{}`", frag),
VALID_FRAGMENT_NAMES_MSG),
}
use crate::print::pprust;
use crate::tokenstream::{self, DelimSpan};
use crate::ast;
-use crate::symbol::keywords;
+use crate::symbol::kw;
use syntax_pos::{edition::Edition, BytePos, Span};
result.push(TokenTree::MetaVarDecl(
span,
ident,
- keywords::Invalid.ident(),
+ ast::Ident::invalid(),
));
}
Some(tokenstream::TokenTree::Token(ident_span, ref token)) if token.is_ident() => {
let (ident, is_raw) = token.ident().unwrap();
let span = ident_span.with_lo(span.lo());
- if ident.name == keywords::Crate.name() && !is_raw {
- let ident = ast::Ident::new(keywords::DollarCrate.name(), ident.span);
+ if ident.name == kw::Crate && !is_raw {
+ let ident = ast::Ident::new(kw::DollarCrate, ident.span);
TokenTree::Token(span, token::Ident(ident, is_raw))
} else {
TokenTree::MetaVar(span, ident)
pprust::token_to_string(&tok)
);
sess.span_diagnostic.span_err(span, &msg);
- TokenTree::MetaVar(span, keywords::Invalid.ident())
+ TokenTree::MetaVar(span, ast::Ident::invalid())
}
// There are no more tokens. Just return the `$` we already have.
use crate::edition::{ALL_EDITIONS, Edition};
use crate::visit::{self, FnKind, Visitor};
use crate::parse::{token, ParseSess};
-use crate::symbol::{Symbol, keywords, sym};
+use crate::symbol::{Symbol, kw, sym};
use crate::tokenstream::TokenTree;
use errors::{DiagnosticBuilder, Handler};
is just used to enable niche optimizations in libcore \
and will never be stable",
cfg_fn!(rustc_attrs))),
+ (sym::rustc_nonnull_optimization_guaranteed, Whitelisted, template!(Word),
+ Gated(Stability::Unstable,
+ sym::rustc_attrs,
+ "the `#[rustc_nonnull_optimization_guaranteed]` attribute \
+ is just used to enable niche optimizations in libcore \
+ and will never be stable",
+ cfg_fn!(rustc_attrs))),
(sym::rustc_regions, Normal, template!(Word), Gated(Stability::Unstable,
sym::rustc_attrs,
"the `#[rustc_regions]` attribute \
fn visit_item(&mut self, i: &'a ast::Item) {
match i.node {
ast::ItemKind::Const(_,_) => {
- if i.ident.name == keywords::Underscore.name() {
+ if i.ident.name == kw::Underscore {
gate_feature_post!(&self, underscore_const_names, i.span,
"naming constants with `_` is unstable");
}
use rustc_data_structures::bit_set::GrowableBitSet;
pub use rustc_data_structures::thin_vec::ThinVec;
use ast::AttrId;
+use syntax_pos::edition::Edition;
// A variant of 'try!' that panics on an Err. This is used as a crutch on the
// way towards a non-panic!-prone parser. It should be used for fatal parsing
}
impl Globals {
- fn new() -> Globals {
+ fn new(edition: Edition) -> Globals {
Globals {
// We have no idea how many attributes their will be, so just
// initiate the vectors with 0 bits. We'll grow them as necessary.
used_attrs: Lock::new(GrowableBitSet::new_empty()),
known_attrs: Lock::new(GrowableBitSet::new_empty()),
- syntax_pos_globals: syntax_pos::Globals::new(),
+ syntax_pos_globals: syntax_pos::Globals::new(edition),
}
}
}
-pub fn with_globals<F, R>(f: F) -> R
+pub fn with_globals<F, R>(edition: Edition, f: F) -> R
where F: FnOnce() -> R
{
- let globals = Globals::new();
+ let globals = Globals::new(edition);
GLOBALS.set(&globals, || {
syntax_pos::GLOBALS.set(&globals.syntax_pos_globals, f)
})
}
+pub fn with_default_globals<F, R>(f: F) -> R
+ where F: FnOnce() -> R
+{
+ with_globals(edition::DEFAULT_EDITION, f)
+}
+
scoped_tls::scoped_thread_local!(pub static GLOBALS: Globals);
#[macro_use]
use crate::source_map::{Spanned, respan};
use crate::parse::token::{self, Token};
use crate::ptr::P;
-use crate::symbol::keywords;
use crate::ThinVec;
use crate::tokenstream::*;
use crate::util::map_in_place::MapInPlace;
pub fn noop_visit_crate<T: MutVisitor>(krate: &mut Crate, vis: &mut T) {
visit_clobber(krate, |Crate { module, attrs, span }| {
let item = P(Item {
- ident: keywords::Invalid.ident(),
+ ident: Ident::invalid(),
attrs,
id: DUMMY_NODE_ID,
vis: respan(span.shrink_to_lo(), VisibilityKind::Public),
use crate::util::parser_testing::{string_to_crate, matches_codepattern};
use crate::print::pprust;
use crate::mut_visit;
- use crate::with_globals;
+ use crate::with_default_globals;
use super::*;
// this version doesn't care about getting comments or docstrings in.
// make sure idents get transformed everywhere
#[test] fn ident_transformation () {
- with_globals(|| {
+ with_default_globals(|| {
let mut zz_visitor = ToZzIdentMutVisitor;
let mut krate = string_to_crate(
"#[a] mod b {fn c (d : e, f : g) {h!(i,j,k);l;m}}".to_string());
// even inside macro defs....
#[test] fn ident_transformation_in_defs () {
- with_globals(|| {
+ with_default_globals(|| {
let mut zz_visitor = ToZzIdentMutVisitor;
let mut krate = string_to_crate(
"macro_rules! a {(b $c:expr $(d $e:token)f+ => \
use crate::parse::Parser;
use crate::print::pprust;
use crate::ptr::P;
-use crate::symbol::keywords;
+use crate::symbol::kw;
use crate::ThinVec;
use errors::{Applicability, DiagnosticBuilder};
use syntax_pos::Span;
/// Recover from `pub` keyword in places where it seems _reasonable_ but isn't valid.
crate fn eat_bad_pub(&mut self) {
- if self.token.is_keyword(keywords::Pub) {
+ if self.token.is_keyword(kw::Pub) {
match self.parse_visibility(false) {
Ok(vis) => {
self.diagnostic()
use crate::feature_gate::UnstableFeatures;
use crate::parse::token;
use crate::diagnostics::plugin::ErrorMap;
- use crate::with_globals;
+ use crate::with_default_globals;
use std::io;
use std::path::PathBuf;
- use syntax_pos::{BytePos, Span, NO_EXPANSION};
+ use syntax_pos::{BytePos, Span, NO_EXPANSION, edition::Edition};
use rustc_data_structures::fx::{FxHashSet, FxHashMap};
use rustc_data_structures::sync::Lock;
raw_identifier_spans: Lock::new(Vec::new()),
registered_diagnostics: Lock::new(ErrorMap::new()),
buffered_lints: Lock::new(vec![]),
+ edition: Edition::from_session(),
ambiguous_block_expr_parse: Lock::new(FxHashMap::default()),
}
}
#[test]
fn t1() {
- with_globals(|| {
+ with_default_globals(|| {
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
let mut string_reader = setup(&sm,
#[test]
fn doublecolonparsing() {
- with_globals(|| {
+ with_default_globals(|| {
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
check_tokenization(setup(&sm, &sh, "a b".to_string()),
#[test]
fn dcparsing_2() {
- with_globals(|| {
+ with_default_globals(|| {
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
check_tokenization(setup(&sm, &sh, "a::b".to_string()),
#[test]
fn dcparsing_3() {
- with_globals(|| {
+ with_default_globals(|| {
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
check_tokenization(setup(&sm, &sh, "a ::b".to_string()),
#[test]
fn dcparsing_4() {
- with_globals(|| {
+ with_default_globals(|| {
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
check_tokenization(setup(&sm, &sh, "a:: b".to_string()),
#[test]
fn character_a() {
- with_globals(|| {
+ with_default_globals(|| {
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
assert_eq!(setup(&sm, &sh, "'a'".to_string()).next_token().tok,
#[test]
fn character_space() {
- with_globals(|| {
+ with_default_globals(|| {
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
assert_eq!(setup(&sm, &sh, "' '".to_string()).next_token().tok,
#[test]
fn character_escaped() {
- with_globals(|| {
+ with_default_globals(|| {
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
assert_eq!(setup(&sm, &sh, "'\\n'".to_string()).next_token().tok,
#[test]
fn lifetime_name() {
- with_globals(|| {
+ with_default_globals(|| {
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
assert_eq!(setup(&sm, &sh, "'abc".to_string()).next_token().tok,
#[test]
fn raw_string() {
- with_globals(|| {
+ with_default_globals(|| {
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
assert_eq!(setup(&sm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string())
#[test]
fn literal_suffixes() {
- with_globals(|| {
+ with_default_globals(|| {
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
macro_rules! test {
#[test]
fn nested_block_comments() {
- with_globals(|| {
+ with_default_globals(|| {
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
let mut lexer = setup(&sm, &sh, "/* /* */ */'a'".to_string());
#[test]
fn crlf_comments() {
- with_globals(|| {
+ with_default_globals(|| {
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
let mut lexer = setup(&sm, &sh, "// test\r\n/// test\r\n".to_string());
use crate::parse::token::{self, Token};
use crate::parse::unescape::{unescape_str, unescape_char, unescape_byte_str, unescape_byte};
use crate::print::pprust;
-use crate::symbol::{keywords, Symbol};
+use crate::symbol::{kw, Symbol};
use crate::tokenstream::{TokenStream, TokenTree};
use errors::{Applicability, Handler};
Some(match lit {
token::Bool(i) => {
- assert!(i == keywords::True.name() || i == keywords::False.name());
- LitKind::Bool(i == keywords::True.name())
+ assert!(i == kw::True || i == kw::False);
+ LitKind::Bool(i == kw::True)
}
token::Byte(i) => {
match unescape_byte(&i.as_str()) {
}
LitKind::FloatUnsuffixed(symbol) => (token::Lit::Float(symbol), None),
LitKind::Bool(value) => {
- let kw = if value { keywords::True } else { keywords::False };
- (token::Lit::Bool(kw.name()), None)
+ let kw = if value { kw::True } else { kw::False };
+ (token::Lit::Bool(kw), None)
}
LitKind::Err(val) => (token::Lit::Err(val), None),
}
diag: Option<(Span, &Handler)>,
) -> Option<Lit> {
let (token, suffix) = match *token {
- token::Ident(ident, false) if ident.name == keywords::True.name() ||
- ident.name == keywords::False.name() =>
+ token::Ident(ident, false) if ident.name == kw::True || ident.name == kw::False =>
(token::Bool(ident.name), None),
token::Literal(token, suffix) =>
(token, suffix),
use errors::{Applicability, FatalError, Level, Handler, ColorConfig, Diagnostic, DiagnosticBuilder};
use rustc_data_structures::sync::{Lrc, Lock};
use syntax_pos::{Span, SourceFile, FileName, MultiSpan};
+use syntax_pos::edition::Edition;
use rustc_data_structures::fx::{FxHashSet, FxHashMap};
use std::borrow::Cow;
pub span_diagnostic: Handler,
pub unstable_features: UnstableFeatures,
pub config: CrateConfig,
+ pub edition: Edition,
pub missing_fragment_specifiers: Lock<FxHashSet<Span>>,
/// Places where raw identifiers were used. This is used for feature-gating raw identifiers.
pub raw_identifier_spans: Lock<Vec<Span>>,
included_mod_stack: Lock::new(vec![]),
source_map,
buffered_lints: Lock::new(vec![]),
+ edition: Edition::from_session(),
ambiguous_block_expr_parse: Lock::new(FxHashMap::default()),
}
}
Parser::new(sess, stream, None, true, false)
}
+/// Given stream, the `ParseSess` and the base directory, produces a parser.
+///
+/// Use this function when you are creating a parser from the token stream
+/// and also care about the current working directory of the parser (e.g.,
+/// you are trying to resolve modules defined inside a macro invocation).
+///
+/// # Note
+///
+/// The main usage of this function is outside of rustc, for those who uses
+/// libsyntax as a library. Please do not remove this function while refactoring
+/// just because it is not used in rustc codebase!
+pub fn stream_to_parser_with_base_dir<'a>(sess: &'a ParseSess,
+ stream: TokenStream,
+ base_dir: Directory<'a>) -> Parser<'a> {
+ Parser::new(sess, stream, Some(base_dir), true, false)
+}
+
/// A sequence separator.
pub struct SeqSep {
/// The seperator token.
use crate::tokenstream::{DelimSpan, TokenTree};
use crate::util::parser_testing::string_to_stream;
use crate::util::parser_testing::{string_to_expr, string_to_item};
- use crate::with_globals;
+ use crate::with_default_globals;
use syntax_pos::{Span, BytePos, Pos, NO_EXPANSION};
/// Parses an item.
#[should_panic]
#[test] fn bad_path_expr_1() {
- with_globals(|| {
+ with_default_globals(|| {
string_to_expr("::abc::def::return".to_string());
})
}
// check the token-tree-ization of macros
#[test]
fn string_to_tts_macro () {
- with_globals(|| {
+ with_default_globals(|| {
use crate::symbol::sym;
let tts: Vec<_> =
#[test]
fn string_to_tts_1() {
- with_globals(|| {
+ with_default_globals(|| {
let tts = string_to_stream("fn a (b : i32) { b; }".to_string());
let expected = TokenStream::new(vec![
}
#[test] fn parse_use() {
- with_globals(|| {
+ with_default_globals(|| {
let use_s = "use foo::bar::baz;";
let vitem = string_to_item(use_s.to_string()).unwrap();
let vitem_s = item_to_string(&vitem);
}
#[test] fn parse_extern_crate() {
- with_globals(|| {
+ with_default_globals(|| {
let ex_s = "extern crate foo;";
let vitem = string_to_item(ex_s.to_string()).unwrap();
let vitem_s = item_to_string(&vitem);
}
#[test] fn span_of_self_arg_pat_idents_are_correct() {
- with_globals(|| {
+ with_default_globals(|| {
let srcs = ["impl z { fn a (&self, &myarg: i32) {} }",
"impl z { fn a (&mut self, &myarg: i32) {} }",
}
#[test] fn parse_exprs () {
- with_globals(|| {
+ with_default_globals(|| {
// just make sure that they parse....
string_to_expr("3 + 4".to_string());
string_to_expr("a::z.froob(b,&(987+3))".to_string());
}
#[test] fn attrs_fix_bug () {
- with_globals(|| {
+ with_default_globals(|| {
string_to_item("pub fn mk_file_writer(path: &Path, flags: &[FileFlag])
-> Result<Box<Writer>, String> {
#[cfg(windows)]
}
#[test] fn crlf_doc_comments() {
- with_globals(|| {
+ with_default_globals(|| {
use crate::symbol::sym;
let sess = ParseSess::new(FilePathMapping::empty());
new_parser_from_source_str(sess, name, source).parse_expr()
}
- with_globals(|| {
+ with_default_globals(|| {
let sess = ParseSess::new(FilePathMapping::empty());
let expr = parse_expr_from_source_str(PathBuf::from("foo").into(),
"foo!( fn main() { body } )".to_string(), &sess).unwrap();
// See `recurse_into_file_modules` in the parser.
#[test]
fn out_of_line_mod() {
- with_globals(|| {
+ with_default_globals(|| {
let sess = ParseSess::new(FilePathMapping::empty());
let item = parse_item_from_source_str(
PathBuf::from("foo").into(),
use crate::parse::PResult;
use crate::ThinVec;
use crate::tokenstream::{self, DelimSpan, TokenTree, TokenStream, TreeAndJoint};
-use crate::symbol::{keywords, sym, Symbol};
+use crate::symbol::{kw, sym, Symbol};
use errors::{Applicability, DiagnosticBuilder, DiagnosticId, FatalError};
use rustc_target::spec::abi::{self, Abi};
#[derive(Clone, PartialEq)]
crate enum TokenType {
Token(token::Token),
- Keyword(keywords::Keyword),
+ Keyword(Symbol),
Operator,
Lifetime,
Ident,
crate fn to_string(&self) -> String {
match *self {
TokenType::Token(ref t) => format!("`{}`", pprust::token_to_string(t)),
- TokenType::Keyword(kw) => format!("`{}`", kw.name()),
+ TokenType::Keyword(kw) => format!("`{}`", kw),
TokenType::Operator => "an operator".to_string(),
TokenType::Lifetime => "lifetime".to_string(),
TokenType::Ident => "identifier".to_string(),
/// Creates a placeholder argument.
fn dummy_arg(span: Span) -> Arg {
- let ident = Ident::new(keywords::Invalid.name(), span);
+ let ident = Ident::new(kw::Invalid, span);
let pat = P(Pat {
id: ast::DUMMY_NODE_ID,
node: PatKind::Ident(BindingMode::ByValue(Mutability::Immutable), ident, None),
TokenType::Token(token::Semi) => true, // we expect a `;` here
_ => false,
}) && ( // a `;` would be expected before the current keyword
- self.token.is_keyword(keywords::Break) ||
- self.token.is_keyword(keywords::Continue) ||
- self.token.is_keyword(keywords::For) ||
- self.token.is_keyword(keywords::If) ||
- self.token.is_keyword(keywords::Let) ||
- self.token.is_keyword(keywords::Loop) ||
- self.token.is_keyword(keywords::Match) ||
- self.token.is_keyword(keywords::Return) ||
- self.token.is_keyword(keywords::While)
+ self.token.is_keyword(kw::Break) ||
+ self.token.is_keyword(kw::Continue) ||
+ self.token.is_keyword(kw::For) ||
+ self.token.is_keyword(kw::If) ||
+ self.token.is_keyword(kw::Let) ||
+ self.token.is_keyword(kw::Loop) ||
+ self.token.is_keyword(kw::Match) ||
+ self.token.is_keyword(kw::Return) ||
+ self.token.is_keyword(kw::While)
);
let cm = self.sess.source_map();
match (cm.lookup_line(self.span.lo()), cm.lookup_line(sp.lo())) {
is_present
}
- fn check_keyword(&mut self, kw: keywords::Keyword) -> bool {
+ fn check_keyword(&mut self, kw: Symbol) -> bool {
self.expected_tokens.push(TokenType::Keyword(kw));
self.token.is_keyword(kw)
}
/// If the next token is the given keyword, eats it and returns
/// `true`. Otherwise, returns `false`.
- pub fn eat_keyword(&mut self, kw: keywords::Keyword) -> bool {
+ pub fn eat_keyword(&mut self, kw: Symbol) -> bool {
if self.check_keyword(kw) {
self.bump();
true
}
}
- fn eat_keyword_noexpect(&mut self, kw: keywords::Keyword) -> bool {
+ fn eat_keyword_noexpect(&mut self, kw: Symbol) -> bool {
if self.token.is_keyword(kw) {
self.bump();
true
/// If the given word is not a keyword, signals an error.
/// If the next token is not the given word, signals an error.
/// Otherwise, eats it.
- fn expect_keyword(&mut self, kw: keywords::Keyword) -> PResult<'a, ()> {
+ fn expect_keyword(&mut self, kw: Symbol) -> PResult<'a, ()> {
if !self.eat_keyword(kw) {
self.unexpected()
} else {
/// Is the current token one of the keywords that signals a bare function type?
fn token_is_bare_fn_keyword(&mut self) -> bool {
- self.check_keyword(keywords::Fn) ||
- self.check_keyword(keywords::Unsafe) ||
- self.check_keyword(keywords::Extern)
+ self.check_keyword(kw::Fn) ||
+ self.check_keyword(kw::Unsafe) ||
+ self.check_keyword(kw::Extern)
}
/// Parses a `TyKind::BareFn` type.
*/
let unsafety = self.parse_unsafety();
- let abi = if self.eat_keyword(keywords::Extern) {
+ let abi = if self.eat_keyword(kw::Extern) {
self.parse_opt_abi()?.unwrap_or(Abi::C)
} else {
Abi::Rust
};
- self.expect_keyword(keywords::Fn)?;
+ self.expect_keyword(kw::Fn)?;
let (inputs, c_variadic) = self.parse_fn_args(false, true)?;
let ret_ty = self.parse_ret_ty(false)?;
let decl = P(FnDecl {
/// Parses asyncness: `async` or nothing.
fn parse_asyncness(&mut self) -> IsAsync {
- if self.eat_keyword(keywords::Async) {
+ if self.eat_keyword(kw::Async) {
IsAsync::Async {
closure_id: ast::DUMMY_NODE_ID,
return_impl_trait_id: ast::DUMMY_NODE_ID,
/// Parses unsafety: `unsafe` or nothing.
fn parse_unsafety(&mut self) -> Unsafety {
- if self.eat_keyword(keywords::Unsafe) {
+ if self.eat_keyword(kw::Unsafe) {
Unsafety::Unsafe
} else {
Unsafety::Normal
mut attrs: Vec<Attribute>) -> PResult<'a, TraitItem> {
let lo = self.span;
self.eat_bad_pub();
- let (name, node, generics) = if self.eat_keyword(keywords::Type) {
+ let (name, node, generics) = if self.eat_keyword(kw::Type) {
self.parse_trait_item_assoc_ty()?
} else if self.is_const_item() {
- self.expect_keyword(keywords::Const)?;
+ self.expect_keyword(kw::Const)?;
let ident = self.parse_ident()?;
self.expect(&token::Colon)?;
let ty = self.parse_ty()?;
(ident, TraitItemKind::Const(ty, default), ast::Generics::default())
} else if let Some(mac) = self.parse_assoc_macro_invoc("trait", None, &mut false)? {
// trait item macro.
- (keywords::Invalid.ident(), ast::TraitItemKind::Macro(mac), ast::Generics::default())
+ (Ident::invalid(), ast::TraitItemKind::Macro(mac), ast::Generics::default())
} else {
let (constness, unsafety, mut asyncness, abi) = self.parse_fn_front_matter()?;
// Reference
self.expect_and()?;
self.parse_borrowed_pointee()?
- } else if self.eat_keyword_noexpect(keywords::Typeof) {
+ } else if self.eat_keyword_noexpect(kw::Typeof) {
// `typeof(EXPR)`
// In order to not be ambiguous, the type must be surrounded by parens.
self.expect(&token::OpenDelim(token::Paren))?;
};
self.expect(&token::CloseDelim(token::Paren))?;
TyKind::Typeof(e)
- } else if self.eat_keyword(keywords::Underscore) {
+ } else if self.eat_keyword(kw::Underscore) {
// A type to be inferred `_`
TyKind::Infer
} else if self.token_is_bare_fn_keyword() {
// Function pointer type
self.parse_ty_bare_fn(Vec::new())?
- } else if self.check_keyword(keywords::For) {
+ } else if self.check_keyword(kw::For) {
// Function pointer type or bound list (trait object type) starting with a poly-trait.
// `for<'lt> [unsafe] [extern "ABI"] fn (&'lt S) -> T`
// `for<'lt> Trait1<'lt> + Trait2 + 'a`
let parse_plus = allow_plus && self.check_plus();
self.parse_remaining_bounds(lifetime_defs, path, lo, parse_plus)?
}
- } else if self.eat_keyword(keywords::Impl) {
+ } else if self.eat_keyword(kw::Impl) {
// Always parse bounds greedily for better error recovery.
let bounds = self.parse_generic_bounds(None)?;
impl_dyn_multi = bounds.len() > 1 || self.prev_token_kind == PrevTokenKind::Plus;
TyKind::ImplTrait(ast::DUMMY_NODE_ID, bounds)
- } else if self.check_keyword(keywords::Dyn) &&
+ } else if self.check_keyword(kw::Dyn) &&
(self.span.rust_2018() ||
self.look_ahead(1, |t| t.can_begin_bound() &&
!can_continue_type_after_non_fn_ident(t))) {
}
fn parse_ptr(&mut self) -> PResult<'a, MutTy> {
- let mutbl = if self.eat_keyword(keywords::Mut) {
+ let mutbl = if self.eat_keyword(kw::Mut) {
Mutability::Mutable
- } else if self.eat_keyword(keywords::Const) {
+ } else if self.eat_keyword(kw::Const) {
Mutability::Immutable
} else {
let span = self.prev_span;
_ => 0,
}
token::BinOp(token::And) | token::AndAnd => 1,
- _ if self.token.is_keyword(keywords::Mut) => 1,
+ _ if self.token.is_keyword(kw::Mut) => 1,
_ => 0,
};
}
match ty {
Ok(ty) => {
- let ident = Ident::new(keywords::Invalid.name(), self.prev_span);
+ let ident = Ident::new(kw::Invalid, self.prev_span);
let pat = P(Pat {
id: ast::DUMMY_NODE_ID,
node: PatKind::Ident(
fn parse_ident_or_underscore(&mut self) -> PResult<'a, ast::Ident> {
match self.token {
- token::Ident(ident, false) if ident.name == keywords::Underscore.name() => {
+ token::Ident(ident, false) if ident.name == kw::Underscore => {
let span = self.span;
self.bump();
Ok(Ident::new(ident.name, span))
// above). `path_span` has the span of that path, or an empty
// span in the case of something like `<T>::Bar`.
let (mut path, path_span);
- if self.eat_keyword(keywords::As) {
+ if self.eat_keyword(kw::As) {
let path_lo = self.span;
path = self.parse_path(PathStyle::Type)?;
path_span = path_lo.to(self.prev_span);
/// Parses mutability (`mut` or nothing).
fn parse_mutability(&mut self) -> Mutability {
- if self.eat_keyword(keywords::Mut) {
+ if self.eat_keyword(kw::Mut) {
Mutability::Mutable
} else {
Mutability::Immutable
hi = path.span;
return Ok(self.mk_expr(lo.to(hi), ExprKind::Path(Some(qself), path), attrs));
}
- if self.span.rust_2018() && self.check_keyword(keywords::Async) {
+ if self.span.rust_2018() && self.check_keyword(kw::Async) {
return if self.is_async_block() { // check for `async {` and `async move {`
self.parse_async_block(attrs)
} else {
self.parse_lambda_expr(attrs)
};
}
- if self.check_keyword(keywords::Move) || self.check_keyword(keywords::Static) {
+ if self.check_keyword(kw::Move) || self.check_keyword(kw::Static) {
return self.parse_lambda_expr(attrs);
}
- if self.eat_keyword(keywords::If) {
+ if self.eat_keyword(kw::If) {
return self.parse_if_expr(attrs);
}
- if self.eat_keyword(keywords::For) {
+ if self.eat_keyword(kw::For) {
let lo = self.prev_span;
return self.parse_for_expr(None, lo, attrs);
}
- if self.eat_keyword(keywords::While) {
+ if self.eat_keyword(kw::While) {
let lo = self.prev_span;
return self.parse_while_expr(None, lo, attrs);
}
if let Some(label) = self.eat_label() {
let lo = label.ident.span;
self.expect(&token::Colon)?;
- if self.eat_keyword(keywords::While) {
+ if self.eat_keyword(kw::While) {
return self.parse_while_expr(Some(label), lo, attrs)
}
- if self.eat_keyword(keywords::For) {
+ if self.eat_keyword(kw::For) {
return self.parse_for_expr(Some(label), lo, attrs)
}
- if self.eat_keyword(keywords::Loop) {
+ if self.eat_keyword(kw::Loop) {
return self.parse_loop_expr(Some(label), lo, attrs)
}
if self.token == token::OpenDelim(token::Brace) {
err.span_label(self.span, msg);
return Err(err);
}
- if self.eat_keyword(keywords::Loop) {
+ if self.eat_keyword(kw::Loop) {
let lo = self.prev_span;
return self.parse_loop_expr(None, lo, attrs);
}
- if self.eat_keyword(keywords::Continue) {
+ if self.eat_keyword(kw::Continue) {
let label = self.eat_label();
let ex = ExprKind::Continue(label);
let hi = self.prev_span;
return Ok(self.mk_expr(lo.to(hi), ex, attrs));
}
- if self.eat_keyword(keywords::Match) {
+ if self.eat_keyword(kw::Match) {
let match_sp = self.prev_span;
return self.parse_match_expr(attrs).map_err(|mut err| {
err.span_label(match_sp, "while parsing this match expression");
err
});
}
- if self.eat_keyword(keywords::Unsafe) {
+ if self.eat_keyword(kw::Unsafe) {
return self.parse_block_expr(
None,
lo,
}
if self.is_try_block() {
let lo = self.span;
- assert!(self.eat_keyword(keywords::Try));
+ assert!(self.eat_keyword(kw::Try));
return self.parse_try_block(lo, attrs);
}
- if self.eat_keyword(keywords::Return) {
+ if self.eat_keyword(kw::Return) {
if self.token.can_begin_expr() {
let e = self.parse_expr()?;
hi = e.span;
} else {
ex = ExprKind::Ret(None);
}
- } else if self.eat_keyword(keywords::Break) {
+ } else if self.eat_keyword(kw::Break) {
let label = self.eat_label();
let e = if self.token.can_begin_expr()
&& !(self.token == token::OpenDelim(token::Brace)
};
ex = ExprKind::Break(label, e);
hi = self.prev_span;
- } else if self.eat_keyword(keywords::Yield) {
+ } else if self.eat_keyword(kw::Yield) {
if self.token.can_begin_expr() {
let e = self.parse_expr()?;
hi = e.span;
} else {
ex = ExprKind::Yield(None);
}
- } else if self.token.is_keyword(keywords::Let) {
+ } else if self.token.is_keyword(kw::Let) {
// Catch this syntax error here, instead of in `parse_ident`, so
// that we can explicitly mention that let is not to be used as an expression
let mut db = self.fatal("expected expression, found statement (`let`)");
db.span_label(self.span, "expected expression");
db.note("variable declaration using `let` is a statement");
return Err(db);
- } else if self.span.rust_2018() && self.eat_keyword(keywords::Await) {
+ } else if self.span.rust_2018() && self.eat_keyword(kw::Await) {
let (await_hi, e_kind) = self.parse_await_macro_or_alt(lo, self.prev_span)?;
hi = await_hi;
ex = e_kind;
// Assuming we have just parsed `.`, continue parsing into an expression.
fn parse_dot_suffix(&mut self, self_arg: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> {
- if self.span.rust_2018() && self.eat_keyword(keywords::Await) {
+ if self.span.rust_2018() && self.eat_keyword(kw::Await) {
let span = lo.to(self.prev_span);
let await_expr = self.mk_expr(
span,
let (span, e) = self.interpolated_or_expr_span(e)?;
(lo.to(span), ExprKind::AddrOf(m, e))
}
- token::Ident(..) if self.token.is_keyword(keywords::In) => {
+ token::Ident(..) if self.token.is_keyword(kw::In) => {
self.bump();
let place = self.parse_expr_res(
Restrictions::NO_STRUCT_LITERAL,
let blk_expr = self.mk_expr(span, ExprKind::Block(blk, None), ThinVec::new());
(lo.to(span), ExprKind::ObsoleteInPlace(place, blk_expr))
}
- token::Ident(..) if self.token.is_keyword(keywords::Box) => {
+ token::Ident(..) if self.token.is_keyword(kw::Box) => {
self.bump();
let e = self.parse_prefix_expr(None);
let (span, e) = self.interpolated_or_expr_span(e)?;
/// Parses an `if` or `if let` expression (`if` token already eaten).
fn parse_if_expr(&mut self, attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
- if self.check_keyword(keywords::Let) {
+ if self.check_keyword(kw::Let) {
return self.parse_if_let_expr(attrs);
}
let lo = self.prev_span;
// verify that the last statement is either an implicit return (no `;`) or an explicit
// return. This won't catch blocks with an explicit `return`, but that would be caught by
// the dead code lint.
- if self.eat_keyword(keywords::Else) || !cond.returns() {
+ if self.eat_keyword(kw::Else) || !cond.returns() {
let sp = self.sess.source_map().next_point(lo);
let mut err = self.diagnostic()
.struct_span_err(sp, "missing condition for `if` statemement");
})?;
let mut els: Option<P<Expr>> = None;
let mut hi = thn.span;
- if self.eat_keyword(keywords::Else) {
+ if self.eat_keyword(kw::Else) {
let elexpr = self.parse_else_expr()?;
hi = elexpr.span;
els = Some(elexpr);
fn parse_if_let_expr(&mut self, attrs: ThinVec<Attribute>)
-> PResult<'a, P<Expr>> {
let lo = self.prev_span;
- self.expect_keyword(keywords::Let)?;
+ self.expect_keyword(kw::Let)?;
let pats = self.parse_pats()?;
self.expect(&token::Eq)?;
let expr = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
let thn = self.parse_block()?;
- let (hi, els) = if self.eat_keyword(keywords::Else) {
+ let (hi, els) = if self.eat_keyword(kw::Else) {
let expr = self.parse_else_expr()?;
(expr.span, Some(expr))
} else {
-> PResult<'a, P<Expr>>
{
let lo = self.span;
- let movability = if self.eat_keyword(keywords::Static) {
+ let movability = if self.eat_keyword(kw::Static) {
Movability::Static
} else {
Movability::Movable
} else {
IsAsync::NotAsync
};
- let capture_clause = if self.eat_keyword(keywords::Move) {
+ let capture_clause = if self.eat_keyword(kw::Move) {
CaptureBy::Value
} else {
CaptureBy::Ref
// `else` token already eaten
fn parse_else_expr(&mut self) -> PResult<'a, P<Expr>> {
- if self.eat_keyword(keywords::If) {
+ if self.eat_keyword(kw::If) {
return self.parse_if_expr(ThinVec::new());
} else {
let blk = self.parse_block()?;
// Parse: `for <src_pat> in <src_expr> <src_loop_block>`
let pat = self.parse_top_level_pat()?;
- if !self.eat_keyword(keywords::In) {
+ if !self.eat_keyword(kw::In) {
let in_span = self.prev_span.between(self.span);
let mut err = self.sess.span_diagnostic
.struct_span_err(in_span, "missing `in` in `for` loop");
err.emit();
}
let in_span = self.prev_span;
- if self.eat_keyword(keywords::In) {
+ if self.eat_keyword(kw::In) {
// a common typo: `for _ in in bar {}`
let mut err = self.sess.span_diagnostic.struct_span_err(
self.prev_span,
fn parse_while_expr(&mut self, opt_label: Option<Label>,
span_lo: Span,
mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
- if self.token.is_keyword(keywords::Let) {
+ if self.token.is_keyword(kw::Let) {
return self.parse_while_let_expr(opt_label, span_lo, attrs);
}
let cond = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
fn parse_while_let_expr(&mut self, opt_label: Option<Label>,
span_lo: Span,
mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
- self.expect_keyword(keywords::Let)?;
+ self.expect_keyword(kw::Let)?;
let pats = self.parse_pats()?;
self.expect(&token::Eq)?;
let expr = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
-> PResult<'a, P<Expr>>
{
let span_lo = self.span;
- self.expect_keyword(keywords::Async)?;
- let capture_clause = if self.eat_keyword(keywords::Move) {
+ self.expect_keyword(kw::Async)?;
+ let capture_clause = if self.eat_keyword(kw::Move) {
CaptureBy::Value
} else {
CaptureBy::Ref
{
let (iattrs, body) = self.parse_inner_attrs_and_block()?;
attrs.extend(iattrs);
- if self.eat_keyword(keywords::Catch) {
+ if self.eat_keyword(kw::Catch) {
let mut error = self.struct_span_err(self.prev_span,
"keyword `catch` cannot follow a `try` block");
error.help("try using `match` on the result of the `try` block instead");
let attrs = self.parse_outer_attributes()?;
let lo = self.span;
let pats = self.parse_pats()?;
- let guard = if self.eat_keyword(keywords::If) {
+ let guard = if self.eat_keyword(kw::If) {
Some(Guard::If(self.parse_expr()?))
} else {
None
(pat, fieldname, false)
} else {
// Parsing a pattern of the form "(box) (ref) (mut) fieldname"
- let is_box = self.eat_keyword(keywords::Box);
+ let is_box = self.eat_keyword(kw::Box);
let boxed_span = self.span;
- let is_ref = self.eat_keyword(keywords::Ref);
- let is_mut = self.eat_keyword(keywords::Mut);
+ let is_ref = self.eat_keyword(kw::Ref);
+ let is_mut = self.eat_keyword(kw::Mut);
let fieldname = self.parse_ident()?;
hi = self.prev_span;
pat = PatKind::Slice(before, slice, after);
}
// At this point, token != &, &&, (, [
- _ => if self.eat_keyword(keywords::Underscore) {
+ _ => if self.eat_keyword(kw::Underscore) {
// Parse _
pat = PatKind::Wild;
- } else if self.eat_keyword(keywords::Mut) {
+ } else if self.eat_keyword(kw::Mut) {
// Parse mut ident @ pat / mut ref ident @ pat
let mutref_span = self.prev_span.to(self.span);
- let binding_mode = if self.eat_keyword(keywords::Ref) {
+ let binding_mode = if self.eat_keyword(kw::Ref) {
self.diagnostic()
.struct_span_err(mutref_span, "the order of `mut` and `ref` is incorrect")
.span_suggestion(
BindingMode::ByValue(Mutability::Mutable)
};
pat = self.parse_pat_ident(binding_mode)?;
- } else if self.eat_keyword(keywords::Ref) {
+ } else if self.eat_keyword(kw::Ref) {
// Parse ref ident @ pat / ref mut ident @ pat
let mutbl = self.parse_mutability();
pat = self.parse_pat_ident(BindingMode::ByRef(mutbl))?;
- } else if self.eat_keyword(keywords::Box) {
+ } else if self.eat_keyword(kw::Box) {
// Parse box pat
let subpat = self.parse_pat_with_range_pat(false, None)?;
pat = PatKind::Box(subpat);
}
fn is_async_block(&self) -> bool {
- self.token.is_keyword(keywords::Async) &&
+ self.token.is_keyword(kw::Async) &&
(
( // `async move {`
- self.look_ahead(1, |t| t.is_keyword(keywords::Move)) &&
+ self.look_ahead(1, |t| t.is_keyword(kw::Move)) &&
self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace))
) || ( // `async {`
self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace))
}
fn is_async_fn(&self) -> bool {
- self.token.is_keyword(keywords::Async) &&
- self.look_ahead(1, |t| t.is_keyword(keywords::Fn))
+ self.token.is_keyword(kw::Async) &&
+ self.look_ahead(1, |t| t.is_keyword(kw::Fn))
}
fn is_do_catch_block(&self) -> bool {
- self.token.is_keyword(keywords::Do) &&
- self.look_ahead(1, |t| t.is_keyword(keywords::Catch)) &&
+ self.token.is_keyword(kw::Do) &&
+ self.look_ahead(1, |t| t.is_keyword(kw::Catch)) &&
self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace)) &&
!self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
}
fn is_try_block(&self) -> bool {
- self.token.is_keyword(keywords::Try) &&
+ self.token.is_keyword(kw::Try) &&
self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace)) &&
self.span.rust_2018() &&
// prevent `while try {} {}`, `if try {} {} else {}`, etc.
}
fn is_union_item(&self) -> bool {
- self.token.is_keyword(keywords::Union) &&
+ self.token.is_keyword(kw::Union) &&
self.look_ahead(1, |t| t.is_ident() && !t.is_reserved_ident())
}
fn is_crate_vis(&self) -> bool {
- self.token.is_keyword(keywords::Crate) && self.look_ahead(1, |t| t != &token::ModSep)
+ self.token.is_keyword(kw::Crate) && self.look_ahead(1, |t| t != &token::ModSep)
}
fn is_existential_type_decl(&self) -> bool {
- self.token.is_keyword(keywords::Existential) &&
- self.look_ahead(1, |t| t.is_keyword(keywords::Type))
+ self.token.is_keyword(kw::Existential) &&
+ self.look_ahead(1, |t| t.is_keyword(kw::Type))
}
fn is_auto_trait_item(&self) -> bool {
// auto trait
- (self.token.is_keyword(keywords::Auto)
- && self.look_ahead(1, |t| t.is_keyword(keywords::Trait)))
+ (self.token.is_keyword(kw::Auto)
+ && self.look_ahead(1, |t| t.is_keyword(kw::Trait)))
|| // unsafe auto trait
- (self.token.is_keyword(keywords::Unsafe) &&
- self.look_ahead(1, |t| t.is_keyword(keywords::Auto)) &&
- self.look_ahead(2, |t| t.is_keyword(keywords::Trait)))
+ (self.token.is_keyword(kw::Unsafe) &&
+ self.look_ahead(1, |t| t.is_keyword(kw::Auto)) &&
+ self.look_ahead(2, |t| t.is_keyword(kw::Trait)))
}
fn eat_macro_def(&mut self, attrs: &[Attribute], vis: &Visibility, lo: Span)
-> PResult<'a, Option<P<Item>>> {
let token_lo = self.span;
let (ident, def) = match self.token {
- token::Ident(ident, false) if ident.name == keywords::Macro.name() => {
+ token::Ident(ident, false) if ident.name == kw::Macro => {
self.bump();
let ident = self.parse_ident()?;
let tokens = if self.check(&token::OpenDelim(token::Brace)) {
let attrs = self.parse_outer_attributes()?;
let lo = self.span;
- Ok(Some(if self.eat_keyword(keywords::Let) {
+ Ok(Some(if self.eat_keyword(kw::Let) {
Stmt {
id: ast::DUMMY_NODE_ID,
node: StmtKind::Local(self.parse_local(attrs.into())?),
// it's a macro invocation
let id = match self.token {
- token::OpenDelim(_) => keywords::Invalid.ident(), // no special identifier
+ token::OpenDelim(_) => Ident::invalid(), // no special identifier
_ => self.parse_ident()?,
};
_ => {
// we only expect an ident if we didn't parse one
// above.
- let ident_str = if id.name == keywords::Invalid.name() {
+ let ident_str = if id.name == kw::Invalid {
"identifier, "
} else {
""
MacStmtStyle::NoBraces
};
- if id.name == keywords::Invalid.name() {
+ if id.name == kw::Invalid {
let mac = respan(lo.to(hi), Mac_ { path: pth, tts, delim });
let node = if delim == MacDelimiter::Brace ||
self.token == token::Semi || self.token == token::Eof {
let tok = self.this_token_descr();
let mut e = self.span_fatal(sp, &format!("expected `{{`, found {}", tok));
let do_not_suggest_help =
- self.token.is_keyword(keywords::In) || self.token == token::Colon;
+ self.token.is_keyword(kw::In) || self.token == token::Colon;
if self.token.is_ident_named("and") {
e.span_suggestion_short(
let is_bound_start = self.check_path() || self.check_lifetime() ||
self.check(&token::Not) || // used for error reporting only
self.check(&token::Question) ||
- self.check_keyword(keywords::For) ||
+ self.check_keyword(kw::For) ||
self.check(&token::OpenDelim(token::Paren));
if is_bound_start {
let lo = self.span;
}
fn parse_const_param(&mut self, preceding_attrs: Vec<Attribute>) -> PResult<'a, GenericParam> {
- self.expect_keyword(keywords::Const)?;
+ self.expect_keyword(kw::Const)?;
let ident = self.parse_ident()?;
self.expect(&token::Colon)?;
let ty = self.parse_ty()?;
bounds,
kind: ast::GenericParamKind::Lifetime,
});
- } else if self.check_keyword(keywords::Const) {
+ } else if self.check_keyword(kw::Const) {
// Parse const parameter.
params.push(self.parse_const_param(attrs)?);
} else if self.check_ident() {
span: syntax_pos::DUMMY_SP,
};
- if !self.eat_keyword(keywords::Where) {
+ if !self.eat_keyword(kw::Where) {
return Ok(where_clause);
}
let lo = self.prev_span;
_ => unreachable!()
};
let isolated_self = |this: &mut Self, n| {
- this.look_ahead(n, |t| t.is_keyword(keywords::SelfLower)) &&
+ this.look_ahead(n, |t| t.is_keyword(kw::SelfLower)) &&
this.look_ahead(n + 1, |t| t != &token::ModSep)
};
(if isolated_self(self, 1) {
self.bump();
SelfKind::Region(None, Mutability::Immutable)
- } else if self.look_ahead(1, |t| t.is_keyword(keywords::Mut)) &&
+ } else if self.look_ahead(1, |t| t.is_keyword(kw::Mut)) &&
isolated_self(self, 2) {
self.bump();
self.bump();
let lt = self.expect_lifetime();
SelfKind::Region(Some(lt), Mutability::Immutable)
} else if self.look_ahead(1, |t| t.is_lifetime()) &&
- self.look_ahead(2, |t| t.is_keyword(keywords::Mut)) &&
+ self.look_ahead(2, |t| t.is_keyword(kw::Mut)) &&
isolated_self(self, 3) {
self.bump();
let lt = self.expect_lifetime();
} else {
SelfKind::Value(Mutability::Immutable)
}, eself_ident, eself_hi)
- } else if self.token.is_keyword(keywords::Mut) &&
+ } else if self.token.is_keyword(kw::Mut) &&
isolated_self(self, 1) {
// mut self
// mut self: TYPE
/// Returns `true` if we are looking at `const ID`
/// (returns `false` for things like `const fn`, etc.).
fn is_const_item(&self) -> bool {
- self.token.is_keyword(keywords::Const) &&
- !self.look_ahead(1, |t| t.is_keyword(keywords::Fn)) &&
- !self.look_ahead(1, |t| t.is_keyword(keywords::Unsafe))
+ self.token.is_keyword(kw::Const) &&
+ !self.look_ahead(1, |t| t.is_keyword(kw::Fn)) &&
+ !self.look_ahead(1, |t| t.is_keyword(kw::Unsafe))
}
/// Parses all the "front matter" for a `fn` declaration, up to
Abi
)>
{
- let is_const_fn = self.eat_keyword(keywords::Const);
+ let is_const_fn = self.eat_keyword(kw::Const);
let const_span = self.prev_span;
let unsafety = self.parse_unsafety();
let asyncness = self.parse_asyncness();
let (constness, unsafety, abi) = if is_const_fn {
(respan(const_span, Constness::Const), unsafety, Abi::Rust)
} else {
- let abi = if self.eat_keyword(keywords::Extern) {
+ let abi = if self.eat_keyword(kw::Extern) {
self.parse_opt_abi()?.unwrap_or(Abi::C)
} else {
Abi::Rust
};
(respan(self.prev_span, Constness::NotConst), unsafety, abi)
};
- if !self.eat_keyword(keywords::Fn) {
+ if !self.eat_keyword(kw::Fn) {
// It is possible for `expect_one_of` to recover given the contents of
// `self.expected_tokens`, therefore, do not use `self.unexpected()` which doesn't
// account for this.
} else if self.is_const_item() {
// This parses the grammar:
// ImplItemConst = "const" Ident ":" Ty "=" Expr ";"
- self.expect_keyword(keywords::Const)?;
+ self.expect_keyword(kw::Const)?;
let name = self.parse_ident()?;
self.expect(&token::Colon)?;
let typ = self.parse_ty()?;
// code copied from parse_macro_use_or_failure... abstraction!
if let Some(mac) = self.parse_assoc_macro_invoc("impl", Some(vis), at_end)? {
// method macro
- Ok((keywords::Invalid.ident(), vec![], ast::Generics::default(),
+ Ok((Ident::invalid(), vec![], ast::Generics::default(),
ast::ImplItemKind::Macro(mac)))
} else {
let (constness, unsafety, mut asyncness, abi) = self.parse_fn_front_matter()?;
self.look_ahead(1, |t| t.is_lifetime() || t.is_ident()) &&
self.look_ahead(2, |t| t == &token::Gt || t == &token::Comma ||
t == &token::Colon || t == &token::Eq) ||
- self.look_ahead(1, |t| t.is_keyword(keywords::Const)))
+ self.look_ahead(1, |t| t.is_keyword(kw::Const)))
}
fn parse_impl_body(&mut self) -> PResult<'a, (Vec<ImplItem>, Vec<Attribute>)> {
};
// Parse both types and traits as a type, then reinterpret if necessary.
- let err_path = |span| ast::Path::from_ident(Ident::new(keywords::Invalid.name(), span));
- let ty_first = if self.token.is_keyword(keywords::For) &&
+ let err_path = |span| ast::Path::from_ident(Ident::new(kw::Invalid, span));
+ let ty_first = if self.token.is_keyword(kw::For) &&
self.look_ahead(1, |t| t != &token::Lt) {
let span = self.prev_span.between(self.span);
self.struct_span_err(span, "missing trait in a trait impl").emit();
};
// If `for` is missing we try to recover.
- let has_for = self.eat_keyword(keywords::For);
+ let has_for = self.eat_keyword(kw::For);
let missing_for_span = self.prev_span.between(self.span);
let ty_second = if self.token == token::DotDot {
}
};
- Ok((keywords::Invalid.ident(), item_kind, Some(attrs)))
+ Ok((Ident::invalid(), item_kind, Some(attrs)))
}
fn parse_late_bound_lifetime_defs(&mut self) -> PResult<'a, Vec<GenericParam>> {
- if self.eat_keyword(keywords::For) {
+ if self.eat_keyword(kw::For) {
self.expect_lt()?;
let params = self.parse_generic_params()?;
self.expect_gt()?;
// Otherwise if we look ahead and see a paren we parse a tuple-style
// struct.
- let vdata = if self.token.is_keyword(keywords::Where) {
+ let vdata = if self.token.is_keyword(kw::Where) {
generics.where_clause = self.parse_where_clause()?;
if self.eat(&token::Semi) {
// If we see a: `struct Foo<T> where T: Copy;` style decl.
let mut generics = self.parse_generics()?;
- let vdata = if self.token.is_keyword(keywords::Where) {
+ let vdata = if self.token.is_keyword(kw::Where) {
generics.where_clause = self.parse_where_clause()?;
let (fields, recovered) = self.parse_record_struct_body()?;
VariantData::Struct(fields, recovered)
pub fn parse_visibility(&mut self, can_take_tuple: bool) -> PResult<'a, Visibility> {
maybe_whole!(self, NtVis, |x| x);
- self.expected_tokens.push(TokenType::Keyword(keywords::Crate));
+ self.expected_tokens.push(TokenType::Keyword(kw::Crate));
if self.is_crate_vis() {
self.bump(); // `crate`
return Ok(respan(self.prev_span, VisibilityKind::Crate(CrateSugar::JustCrate)));
}
- if !self.eat_keyword(keywords::Pub) {
+ if !self.eat_keyword(kw::Pub) {
// We need a span for our `Spanned<VisibilityKind>`, but there's inherently no
// keyword to grab a span from for inherited visibility; an empty span at the
// beginning of the current token would seem to be the "Schelling span".
// `()` or a tuple might be allowed. For example, `struct Struct(pub (), pub (usize));`.
// Because of this, we only `bump` the `(` if we're assured it is appropriate to do so
// by the following tokens.
- if self.look_ahead(1, |t| t.is_keyword(keywords::Crate)) &&
+ if self.look_ahead(1, |t| t.is_keyword(kw::Crate)) &&
self.look_ahead(2, |t| t != &token::ModSep) // account for `pub(crate::foo)`
{
// `pub(crate)`
VisibilityKind::Crate(CrateSugar::PubCrate),
);
return Ok(vis)
- } else if self.look_ahead(1, |t| t.is_keyword(keywords::In)) {
+ } else if self.look_ahead(1, |t| t.is_keyword(kw::In)) {
// `pub(in path)`
self.bump(); // `(`
self.bump(); // `in`
});
return Ok(vis)
} else if self.look_ahead(2, |t| t == &token::CloseDelim(token::Paren)) &&
- self.look_ahead(1, |t| t.is_keyword(keywords::Super) ||
- t.is_keyword(keywords::SelfLower))
+ self.look_ahead(1, |t| t.is_keyword(kw::Super) ||
+ t.is_keyword(kw::SelfLower))
{
// `pub(self)` or `pub(super)`
self.bump(); // `(`
/// Parses defaultness (i.e., `default` or nothing).
fn parse_defaultness(&mut self) -> Defaultness {
// `pub` is included for better error messages
- if self.check_keyword(keywords::Default) &&
- self.look_ahead(1, |t| t.is_keyword(keywords::Impl) ||
- t.is_keyword(keywords::Const) ||
- t.is_keyword(keywords::Fn) ||
- t.is_keyword(keywords::Unsafe) ||
- t.is_keyword(keywords::Extern) ||
- t.is_keyword(keywords::Type) ||
- t.is_keyword(keywords::Pub)) {
+ if self.check_keyword(kw::Default) &&
+ self.look_ahead(1, |t| t.is_keyword(kw::Impl) ||
+ t.is_keyword(kw::Const) ||
+ t.is_keyword(kw::Fn) ||
+ t.is_keyword(kw::Unsafe) ||
+ t.is_keyword(kw::Extern) ||
+ t.is_keyword(kw::Type) ||
+ t.is_keyword(kw::Pub)) {
self.bump(); // `default`
Defaultness::Default
} else {
/// Parses a function declaration from a foreign module.
fn parse_item_foreign_fn(&mut self, vis: ast::Visibility, lo: Span, attrs: Vec<Attribute>)
-> PResult<'a, ForeignItem> {
- self.expect_keyword(keywords::Fn)?;
+ self.expect_keyword(kw::Fn)?;
let (ident, mut generics) = self.parse_fn_header()?;
let decl = self.parse_fn_decl(true)?;
/// Parses a type from a foreign module.
fn parse_item_foreign_type(&mut self, vis: ast::Visibility, lo: Span, attrs: Vec<Attribute>)
-> PResult<'a, ForeignItem> {
- self.expect_keyword(keywords::Type)?;
+ self.expect_keyword(kw::Type)?;
let ident = self.parse_ident()?;
let hi = self.span;
let error_msg = "crate name using dashes are not valid in `extern crate` statements";
let suggestion_msg = "if the original crate name uses dashes you need to use underscores \
in the code";
- let mut ident = if self.token.is_keyword(keywords::SelfLower) {
+ let mut ident = if self.token.is_keyword(kw::SelfLower) {
self.parse_path_segment_ident()
} else {
self.parse_ident()
abi,
items: foreign_items
};
- let invalid = keywords::Invalid.ident();
+ let invalid = Ident::invalid();
Ok(self.mk_item(lo.to(prev_span), invalid, ItemKind::ForeignMod(m), visibility, attrs))
}
fn eat_type(&mut self) -> Option<PResult<'a, (Ident, AliasKind, ast::Generics)>> {
// This parses the grammar:
// Ident ["<"...">"] ["where" ...] ("=" | ":") Ty ";"
- if self.check_keyword(keywords::Type) ||
- self.check_keyword(keywords::Existential) &&
- self.look_ahead(1, |t| t.is_keyword(keywords::Type)) {
- let existential = self.eat_keyword(keywords::Existential);
- assert!(self.eat_keyword(keywords::Type));
+ if self.check_keyword(kw::Type) ||
+ self.check_keyword(kw::Existential) &&
+ self.look_ahead(1, |t| t.is_keyword(kw::Type)) {
+ let existential = self.eat_keyword(kw::Existential);
+ assert!(self.eat_keyword(kw::Type));
Some(self.parse_existential_or_alias(existential))
} else {
None
}
fn is_static_global(&mut self) -> bool {
- if self.check_keyword(keywords::Static) {
+ if self.check_keyword(kw::Static) {
// Check if this could be a closure
!self.look_ahead(1, |token| {
- if token.is_keyword(keywords::Move) {
+ if token.is_keyword(kw::Move) {
return true;
}
match *token {
let visibility = self.parse_visibility(false)?;
- if self.eat_keyword(keywords::Use) {
+ if self.eat_keyword(kw::Use) {
// USE ITEM
let item_ = ItemKind::Use(P(self.parse_use_tree()?));
self.expect(&token::Semi)?;
let span = lo.to(self.prev_span);
- let item = self.mk_item(span, keywords::Invalid.ident(), item_, visibility, attrs);
+ let item =
+ self.mk_item(span, Ident::invalid(), item_, visibility, attrs);
return Ok(Some(item));
}
- if self.eat_keyword(keywords::Extern) {
- if self.eat_keyword(keywords::Crate) {
+ if self.eat_keyword(kw::Extern) {
+ if self.eat_keyword(kw::Crate) {
return Ok(Some(self.parse_item_extern_crate(lo, visibility, attrs)?));
}
let opt_abi = self.parse_opt_abi()?;
- if self.eat_keyword(keywords::Fn) {
+ if self.eat_keyword(kw::Fn) {
// EXTERN FUNCTION ITEM
let fn_span = self.prev_span;
let abi = opt_abi.unwrap_or(Abi::C);
if self.is_static_global() {
self.bump();
// STATIC ITEM
- let m = if self.eat_keyword(keywords::Mut) {
+ let m = if self.eat_keyword(kw::Mut) {
Mutability::Mutable
} else {
Mutability::Immutable
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
- if self.eat_keyword(keywords::Const) {
+ if self.eat_keyword(kw::Const) {
let const_span = self.prev_span;
- if self.check_keyword(keywords::Fn)
- || (self.check_keyword(keywords::Unsafe)
- && self.look_ahead(1, |t| t.is_keyword(keywords::Fn))) {
+ if self.check_keyword(kw::Fn)
+ || (self.check_keyword(kw::Unsafe)
+ && self.look_ahead(1, |t| t.is_keyword(kw::Fn))) {
// CONST FUNCTION ITEM
let unsafety = self.parse_unsafety();
self.bump();
}
// CONST ITEM
- if self.eat_keyword(keywords::Mut) {
+ if self.eat_keyword(kw::Mut) {
let prev_span = self.prev_span;
let mut err = self.diagnostic()
.struct_span_err(prev_span, "const globals cannot be mutable");
// `unsafe async fn` or `async fn`
if (
- self.check_keyword(keywords::Unsafe) &&
- self.look_ahead(1, |t| t.is_keyword(keywords::Async))
+ self.check_keyword(kw::Unsafe) &&
+ self.look_ahead(1, |t| t.is_keyword(kw::Async))
) || (
- self.check_keyword(keywords::Async) &&
- self.look_ahead(1, |t| t.is_keyword(keywords::Fn))
+ self.check_keyword(kw::Async) &&
+ self.look_ahead(1, |t| t.is_keyword(kw::Fn))
)
{
// ASYNC FUNCTION ITEM
let unsafety = self.parse_unsafety();
- self.expect_keyword(keywords::Async)?;
+ self.expect_keyword(kw::Async)?;
let async_span = self.prev_span;
- self.expect_keyword(keywords::Fn)?;
+ self.expect_keyword(kw::Fn)?;
let fn_span = self.prev_span;
let (ident, item_, extra_attrs) =
self.parse_item_fn(unsafety,
}
return Ok(Some(item));
}
- if self.check_keyword(keywords::Unsafe) &&
- (self.look_ahead(1, |t| t.is_keyword(keywords::Trait)) ||
- self.look_ahead(1, |t| t.is_keyword(keywords::Auto)))
+ if self.check_keyword(kw::Unsafe) &&
+ (self.look_ahead(1, |t| t.is_keyword(kw::Trait)) ||
+ self.look_ahead(1, |t| t.is_keyword(kw::Auto)))
{
// UNSAFE TRAIT ITEM
self.bump(); // `unsafe`
- let is_auto = if self.eat_keyword(keywords::Trait) {
+ let is_auto = if self.eat_keyword(kw::Trait) {
IsAuto::No
} else {
- self.expect_keyword(keywords::Auto)?;
- self.expect_keyword(keywords::Trait)?;
+ self.expect_keyword(kw::Auto)?;
+ self.expect_keyword(kw::Trait)?;
IsAuto::Yes
};
let (ident, item_, extra_attrs) =
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
- if self.check_keyword(keywords::Impl) ||
- self.check_keyword(keywords::Unsafe) &&
- self.look_ahead(1, |t| t.is_keyword(keywords::Impl)) ||
- self.check_keyword(keywords::Default) &&
- self.look_ahead(1, |t| t.is_keyword(keywords::Impl)) ||
- self.check_keyword(keywords::Default) &&
- self.look_ahead(1, |t| t.is_keyword(keywords::Unsafe)) {
+ if self.check_keyword(kw::Impl) ||
+ self.check_keyword(kw::Unsafe) &&
+ self.look_ahead(1, |t| t.is_keyword(kw::Impl)) ||
+ self.check_keyword(kw::Default) &&
+ self.look_ahead(1, |t| t.is_keyword(kw::Impl)) ||
+ self.check_keyword(kw::Default) &&
+ self.look_ahead(1, |t| t.is_keyword(kw::Unsafe)) {
// IMPL ITEM
let defaultness = self.parse_defaultness();
let unsafety = self.parse_unsafety();
- self.expect_keyword(keywords::Impl)?;
+ self.expect_keyword(kw::Impl)?;
let (ident, item, extra_attrs) = self.parse_item_impl(unsafety, defaultness)?;
let span = lo.to(self.prev_span);
return Ok(Some(self.mk_item(span, ident, item, visibility,
maybe_append(attrs, extra_attrs))));
}
- if self.check_keyword(keywords::Fn) {
+ if self.check_keyword(kw::Fn) {
// FUNCTION ITEM
self.bump();
let fn_span = self.prev_span;
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
- if self.check_keyword(keywords::Unsafe)
+ if self.check_keyword(kw::Unsafe)
&& self.look_ahead(1, |t| *t != token::OpenDelim(token::Brace)) {
// UNSAFE FUNCTION ITEM
self.bump(); // `unsafe`
// `{` is also expected after `unsafe`, in case of error, include it in the diagnostic
self.check(&token::OpenDelim(token::Brace));
- let abi = if self.eat_keyword(keywords::Extern) {
+ let abi = if self.eat_keyword(kw::Extern) {
self.parse_opt_abi()?.unwrap_or(Abi::C)
} else {
Abi::Rust
};
- self.expect_keyword(keywords::Fn)?;
+ self.expect_keyword(kw::Fn)?;
let fn_span = self.prev_span;
let (ident, item_, extra_attrs) =
self.parse_item_fn(Unsafety::Unsafe,
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
- if self.eat_keyword(keywords::Mod) {
+ if self.eat_keyword(kw::Mod) {
// MODULE ITEM
let (ident, item_, extra_attrs) =
self.parse_item_mod(&attrs[..])?;
attrs);
return Ok(Some(item));
}
- if self.eat_keyword(keywords::Enum) {
+ if self.eat_keyword(kw::Enum) {
// ENUM ITEM
let (ident, item_, extra_attrs) = self.parse_item_enum()?;
let prev_span = self.prev_span;
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
- if self.check_keyword(keywords::Trait)
- || (self.check_keyword(keywords::Auto)
- && self.look_ahead(1, |t| t.is_keyword(keywords::Trait)))
+ if self.check_keyword(kw::Trait)
+ || (self.check_keyword(kw::Auto)
+ && self.look_ahead(1, |t| t.is_keyword(kw::Trait)))
{
- let is_auto = if self.eat_keyword(keywords::Trait) {
+ let is_auto = if self.eat_keyword(kw::Trait) {
IsAuto::No
} else {
- self.expect_keyword(keywords::Auto)?;
- self.expect_keyword(keywords::Trait)?;
+ self.expect_keyword(kw::Auto)?;
+ self.expect_keyword(kw::Trait)?;
IsAuto::Yes
};
// TRAIT ITEM
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
- if self.eat_keyword(keywords::Struct) {
+ if self.eat_keyword(kw::Struct) {
// STRUCT ITEM
let (ident, item_, extra_attrs) = self.parse_item_struct()?;
let prev_span = self.prev_span;
// FOREIGN STATIC ITEM
// Treat `const` as `static` for error recovery, but don't add it to expected tokens.
- if self.check_keyword(keywords::Static) || self.token.is_keyword(keywords::Const) {
- if self.token.is_keyword(keywords::Const) {
+ if self.check_keyword(kw::Static) || self.token.is_keyword(kw::Const) {
+ if self.token.is_keyword(kw::Const) {
self.diagnostic()
.struct_span_err(self.span, "extern items cannot be `const`")
.span_suggestion(
return Ok(self.parse_item_foreign_static(visibility, lo, attrs)?);
}
// FOREIGN FUNCTION ITEM
- if self.check_keyword(keywords::Fn) {
+ if self.check_keyword(kw::Fn) {
return Ok(self.parse_item_foreign_fn(visibility, lo, attrs)?);
}
// FOREIGN TYPE ITEM
- if self.check_keyword(keywords::Type) {
+ if self.check_keyword(kw::Type) {
return Ok(self.parse_item_foreign_type(visibility, lo, attrs)?);
}
Some(mac) => {
Ok(
ForeignItem {
- ident: keywords::Invalid.ident(),
+ ident: Ident::invalid(),
span: lo.to(self.prev_span),
id: ast::DUMMY_NODE_ID,
attrs,
let id = if self.token.is_ident() {
self.parse_ident()?
} else {
- keywords::Invalid.ident() // no special identifier
+ Ident::invalid() // no special identifier
};
// eat a matched-delimiter token tree:
let (delim, tts) = self.expect_delimited_token_tree()?;
}
fn parse_rename(&mut self) -> PResult<'a, Option<Ident>> {
- if self.eat_keyword(keywords::As) {
+ if self.eat_keyword(kw::As) {
self.parse_ident_or_underscore().map(Some)
} else {
Ok(None)
use crate::parse::ParseSess;
use crate::print::pprust;
use crate::ptr::P;
-use crate::symbol::keywords;
+use crate::symbol::kw;
use crate::syntax::parse::parse_stream_from_source_str;
use crate::tokenstream::{self, DelimSpan, TokenStream, TokenTree};
!ident_token.is_reserved_ident() ||
ident_token.is_path_segment_keyword() ||
[
- keywords::Async.name(),
+ kw::Async,
// FIXME: remove when `await!(..)` syntax is removed
// https://github.com/rust-lang/rust/issues/60610
- keywords::Await.name(),
-
- keywords::Do.name(),
- keywords::Box.name(),
- keywords::Break.name(),
- keywords::Continue.name(),
- keywords::False.name(),
- keywords::For.name(),
- keywords::If.name(),
- keywords::Loop.name(),
- keywords::Match.name(),
- keywords::Move.name(),
- keywords::Return.name(),
- keywords::True.name(),
- keywords::Unsafe.name(),
- keywords::While.name(),
- keywords::Yield.name(),
- keywords::Static.name(),
+ kw::Await,
+
+ kw::Do,
+ kw::Box,
+ kw::Break,
+ kw::Continue,
+ kw::False,
+ kw::For,
+ kw::If,
+ kw::Loop,
+ kw::Match,
+ kw::Move,
+ kw::Return,
+ kw::True,
+ kw::Unsafe,
+ kw::While,
+ kw::Yield,
+ kw::Static,
].contains(&ident.name)
}
!ident_token.is_reserved_ident() ||
ident_token.is_path_segment_keyword() ||
[
- keywords::Underscore.name(),
- keywords::For.name(),
- keywords::Impl.name(),
- keywords::Fn.name(),
- keywords::Unsafe.name(),
- keywords::Extern.name(),
- keywords::Typeof.name(),
- keywords::Dyn.name(),
+ kw::Underscore,
+ kw::For,
+ kw::Impl,
+ kw::Fn,
+ kw::Unsafe,
+ kw::Extern,
+ kw::Typeof,
+ kw::Dyn,
].contains(&ident.name)
}
/// Returns `true` if the token can appear at the start of a generic bound.
crate fn can_begin_bound(&self) -> bool {
- self.is_path_start() || self.is_lifetime() || self.is_keyword(keywords::For) ||
+ self.is_path_start() || self.is_lifetime() || self.is_keyword(kw::For) ||
self == &Question || self == &OpenDelim(Paren)
}
match *self {
Literal(..) => true,
BinOp(Minus) => true,
- Ident(ident, false) if ident.name == keywords::True.name() => true,
- Ident(ident, false) if ident.name == keywords::False.name() => true,
+ Ident(ident, false) if ident.name == kw::True => true,
+ Ident(ident, false) if ident.name == kw::False => true,
Interpolated(ref nt) => match **nt {
NtLiteral(..) => true,
_ => false,
/// Returns `true` if the token is either the `mut` or `const` keyword.
crate fn is_mutability(&self) -> bool {
- self.is_keyword(keywords::Mut) ||
- self.is_keyword(keywords::Const)
+ self.is_keyword(kw::Mut) ||
+ self.is_keyword(kw::Const)
}
crate fn is_qpath_start(&self) -> bool {
}
/// Returns `true` if the token is a given keyword, `kw`.
- pub fn is_keyword(&self, kw: keywords::Keyword) -> bool {
- self.ident().map(|(ident, is_raw)| ident.name == kw.name() && !is_raw).unwrap_or(false)
+ pub fn is_keyword(&self, kw: Symbol) -> bool {
+ self.ident().map(|(ident, is_raw)| ident.name == kw && !is_raw).unwrap_or(false)
}
pub fn is_path_segment_keyword(&self) -> bool {
(&Lifetime(a), &Lifetime(b)) => a.name == b.name,
(&Ident(a, b), &Ident(c, d)) => b == d && (a.name == c.name ||
- a.name == keywords::DollarCrate.name() ||
- c.name == keywords::DollarCrate.name()),
+ a.name == kw::DollarCrate ||
+ c.name == kw::DollarCrate),
(&Literal(ref a, b), &Literal(ref c, d)) => {
b == d && a.probably_equal_for_proc_macro(c)
use crate::print::pp::Breaks::{Consistent, Inconsistent};
use crate::ptr::P;
use crate::std_inject;
-use crate::symbol::{keywords, sym};
+use crate::symbol::{kw, sym};
use crate::tokenstream::{self, TokenStream, TokenTree};
use rustc_target::spec::abi::{self, Abi};
if i > 0 {
self.writer().word("::")?
}
- if segment.ident.name != keywords::PathRoot.name() {
- if segment.ident.name == keywords::DollarCrate.name() {
+ if segment.ident.name != kw::PathRoot {
+ if segment.ident.name == kw::DollarCrate {
self.print_dollar_crate(segment.ident)?;
} else {
self.writer().word(segment.ident.as_str().to_string())?;
self.s.word(";")?;
}
ast::ItemKind::Mac(ref mac) => {
- if item.ident.name == keywords::Invalid.name() {
+ if item.ident.name == kw::Invalid {
self.print_mac(mac)?;
match mac.node.delim {
MacDelimiter::Brace => {}
colons_before_params: bool)
-> io::Result<()>
{
- if segment.ident.name != keywords::PathRoot.name() {
- if segment.ident.name == keywords::DollarCrate.name() {
+ if segment.ident.name != kw::PathRoot {
+ if segment.ident.name == kw::DollarCrate {
self.print_dollar_crate(segment.ident)?;
} else {
self.print_ident(segment.ident)?;
self.print_explicit_self(&eself)?;
} else {
let invalid = if let PatKind::Ident(_, ident, _) = input.pat.node {
- ident.name == keywords::Invalid.name()
+ ident.name == kw::Invalid
} else {
false
};
use crate::ast;
use crate::source_map;
- use crate::with_globals;
+ use crate::with_default_globals;
use syntax_pos;
#[test]
fn test_fun_to_string() {
- with_globals(|| {
+ with_default_globals(|| {
let abba_ident = ast::Ident::from_str("abba");
let decl = ast::FnDecl {
#[test]
fn test_variant_to_string() {
- with_globals(|| {
+ with_default_globals(|| {
let ident = ast::Ident::from_str("principal_skinner");
let var = source_map::respan(syntax_pos::DUMMY_SP, ast::Variant_ {
allow_internal_unstable,
allow_internal_unsafe: false,
local_inner_macros: false,
- edition: hygiene::default_edition(),
+ edition: edition::Edition::from_session(),
});
span.with_ctxt(SyntaxContext::empty().apply_mark(mark))
}
use crate::attr;
use crate::edition::Edition;
use crate::ext::hygiene::{Mark, SyntaxContext};
-use crate::symbol::{Ident, Symbol, keywords, sym};
-use crate::source_map::{ExpnInfo, MacroAttribute, dummy_spanned, hygiene, respan};
+use crate::symbol::{Ident, Symbol, kw, sym};
+use crate::source_map::{ExpnInfo, MacroAttribute, dummy_spanned, respan};
use crate::ptr::P;
use crate::tokenstream::TokenStream;
/// Craft a span that will be ignored by the stability lint's
/// call to source_map's `is_internal` check.
/// The expanded code uses the unstable `#[prelude_import]` attribute.
-fn ignored_span(sp: Span) -> Span {
+fn ignored_span(sp: Span, edition: Edition) -> Span {
let mark = Mark::fresh(Mark::root());
mark.set_expn_info(ExpnInfo {
call_site: DUMMY_SP,
].into()),
allow_internal_unsafe: false,
local_inner_macros: false,
- edition: hygiene::default_edition(),
+ edition,
});
sp.with_ctxt(SyntaxContext::empty().apply_mark(mark))
}
INJECTED_CRATE_NAME.with(|opt_name| opt_name.set(Some(name)));
- let span = ignored_span(DUMMY_SP);
+ let span = ignored_span(DUMMY_SP, edition);
krate.module.items.insert(0, P(ast::Item {
attrs: vec![ast::Attribute {
style: ast::AttrStyle::Outer,
vis: respan(span.shrink_to_lo(), ast::VisibilityKind::Inherited),
node: ast::ItemKind::Use(P(ast::UseTree {
prefix: ast::Path {
- segments: iter::once(keywords::PathRoot.ident())
+ segments: iter::once(ast::Ident::with_empty_ctxt(kw::PathRoot))
.chain(
[name, "prelude", "v1"].iter().cloned()
.map(ast::Ident::from_str)
span,
})),
id: ast::DUMMY_NODE_ID,
- ident: keywords::Invalid.ident(),
+ ident: ast::Ident::invalid(),
span,
tokens: None,
}));
use crate::print::pprust;
use crate::ast::{self, Ident};
use crate::ptr::P;
-use crate::symbol::{self, Symbol, keywords, sym};
+use crate::symbol::{self, Symbol, kw, sym};
use crate::ThinVec;
struct Test {
fn flat_map_item(&mut self, i: P<ast::Item>) -> SmallVec<[P<ast::Item>; 1]> {
let ident = i.ident;
- if ident.name != keywords::Invalid.name() {
+ if ident.name != kw::Invalid {
self.cx.path.push(ident);
}
debug!("current path: {}", path_name_i(&self.cx.path));
}
item.node = ast::ItemKind::Mod(module);
}
- if ident.name != keywords::Invalid.name() {
+ if ident.name != kw::Invalid {
self.cx.path.pop();
}
smallvec![P(item)]
tests: Vec<Ident>,
tested_submods: Vec<(Ident, Ident)>)
-> (P<ast::Item>, Ident) {
- let super_ = Ident::with_empty_ctxt(keywords::Super.name());
+ let super_ = Ident::with_empty_ctxt(kw::Super);
let items = tests.into_iter().map(|r| {
cx.ext_cx.item_use_simple(DUMMY_SP, dummy_spanned(ast::VisibilityKind::Public),
].into()),
allow_internal_unsafe: false,
local_inner_macros: false,
- edition: hygiene::default_edition(),
+ edition: sess.edition,
});
TestHarnessGenerator {
use crate::source_map::{SourceMap, FilePathMapping};
-use crate::with_globals;
+use crate::with_default_globals;
use errors::Handler;
use errors::emitter::EmitterWriter;
}
fn test_harness(file_text: &str, span_labels: Vec<SpanLabel>, expected_output: &str) {
- with_globals(|| {
+ with_default_globals(|| {
let output = Arc::new(Mutex::new(Vec::new()));
let source_map = Lrc::new(SourceMap::new(FilePathMapping::empty()));
mod tests {
use super::*;
use crate::syntax::ast::Ident;
- use crate::with_globals;
+ use crate::with_default_globals;
use crate::parse::token::Token;
use crate::util::parser_testing::string_to_stream;
use syntax_pos::{Span, BytePos, NO_EXPANSION};
#[test]
fn test_concat() {
- with_globals(|| {
+ with_default_globals(|| {
let test_res = string_to_ts("foo::bar::baz");
let test_fst = string_to_ts("foo::bar");
let test_snd = string_to_ts("::baz");
#[test]
fn test_to_from_bijection() {
- with_globals(|| {
+ with_default_globals(|| {
let test_start = string_to_ts("foo::bar(baz)");
let test_end = test_start.trees().collect();
assert_eq!(test_start, test_end)
#[test]
fn test_eq_0() {
- with_globals(|| {
+ with_default_globals(|| {
let test_res = string_to_ts("foo");
let test_eqs = string_to_ts("foo");
assert_eq!(test_res, test_eqs)
#[test]
fn test_eq_1() {
- with_globals(|| {
+ with_default_globals(|| {
let test_res = string_to_ts("::bar::baz");
let test_eqs = string_to_ts("::bar::baz");
assert_eq!(test_res, test_eqs)
#[test]
fn test_eq_3() {
- with_globals(|| {
+ with_default_globals(|| {
let test_res = string_to_ts("");
let test_eqs = string_to_ts("");
assert_eq!(test_res, test_eqs)
#[test]
fn test_diseq_0() {
- with_globals(|| {
+ with_default_globals(|| {
let test_res = string_to_ts("::bar::baz");
let test_eqs = string_to_ts("bar::baz");
assert_eq!(test_res == test_eqs, false)
#[test]
fn test_diseq_1() {
- with_globals(|| {
+ with_default_globals(|| {
let test_res = string_to_ts("(bar,baz)");
let test_eqs = string_to_ts("bar,baz");
assert_eq!(test_res == test_eqs, false)
#[test]
fn test_is_empty() {
- with_globals(|| {
+ with_default_globals(|| {
let test0: TokenStream = Vec::<TokenTree>::new().into_iter().collect();
let test1: TokenStream =
TokenTree::Token(sp(0, 1), Token::Ident(Ident::from_str("a"), false)).into();
#[test]
fn test_dotdotdot() {
- let mut builder = TokenStreamBuilder::new();
- builder.push(TokenTree::Token(sp(0, 1), Token::Dot).joint());
- builder.push(TokenTree::Token(sp(1, 2), Token::Dot).joint());
- builder.push(TokenTree::Token(sp(2, 3), Token::Dot));
- let stream = builder.build();
- assert!(stream.eq_unspanned(&string_to_ts("...")));
- assert_eq!(stream.trees().count(), 1);
+ with_default_globals(|| {
+ let mut builder = TokenStreamBuilder::new();
+ builder.push(TokenTree::Token(sp(0, 1), Token::Dot).joint());
+ builder.push(TokenTree::Token(sp(1, 2), Token::Dot).joint());
+ builder.push(TokenTree::Token(sp(2, 3), Token::Dot));
+ let stream = builder.build();
+ assert!(stream.eq_unspanned(&string_to_ts("...")));
+ assert_eq!(stream.trees().count(), 1);
+ })
}
}
#[test]
fn test_find_best_match_for_name() {
- use crate::with_globals;
- with_globals(|| {
+ use crate::with_default_globals;
+ with_default_globals(|| {
let input = vec![Symbol::intern("aaab"), Symbol::intern("aaabc")];
assert_eq!(
find_best_match_for_name(input.iter(), "aaaa", None),
use crate::parse::token::{Token, BinOpToken};
-use crate::symbol::keywords;
+use crate::symbol::kw;
use crate::ast::{self, BinOpKind};
/// Associative operator with precedence.
// DotDotDot is no longer supported, but we need some way to display the error
Token::DotDotDot => Some(DotDotEq),
Token::Colon => Some(Colon),
- _ if t.is_keyword(keywords::As) => Some(As),
+ _ if t.is_keyword(kw::As) => Some(As),
_ => None
}
}
use syntax::ext::base::{Annotatable, ExtCtxt};
use syntax::ext::build::AstBuilder;
use syntax::ptr::P;
-use syntax::symbol::{Symbol, keywords, sym};
+use syntax::symbol::{Symbol, kw, sym};
use syntax_pos::Span;
pub fn expand_deriving_clone(cx: &mut ExtCtxt<'_>,
let mut stmts = Vec::new();
if is_union {
// let _: AssertParamIsCopy<Self>;
- let self_ty = cx.ty_path(cx.path_ident(trait_span, keywords::SelfUpper.ident()));
+ let self_ty =
+ cx.ty_path(cx.path_ident(trait_span, ast::Ident::with_empty_ctxt(kw::SelfUpper)));
assert_ty_bounds(cx, &mut stmts, self_ty, trait_span, "AssertParamIsCopy");
} else {
match *substr.fields {
use syntax::source_map::{self, respan};
use syntax::util::map_in_place::MapInPlace;
use syntax::ptr::P;
-use syntax::symbol::{Symbol, keywords, sym};
+use syntax::symbol::{Symbol, kw, sym};
use syntax::parse::ParseSess;
use syntax_pos::{DUMMY_SP, Span};
};
cx.item(self.span,
- keywords::Invalid.ident(),
+ Ident::invalid(),
a,
ast::ItemKind::Impl(unsafety,
ast::ImplPolarity::Positive,
let args = {
let self_args = explicit_self.map(|explicit_self| {
- ast::Arg::from_self(explicit_self,
- keywords::SelfLower.ident().with_span_pos(trait_.span))
+ let ident = Ident::with_empty_ctxt(kw::SelfLower).with_span_pos(trait_.span);
+ ast::Arg::from_self(explicit_self, ident)
});
let nonself_args = arg_types.into_iter()
.map(|(name, ty)| cx.arg(trait_.span, name, ty));
use syntax::source_map::{respan, DUMMY_SP};
use syntax::ptr::P;
use syntax_pos::Span;
-use syntax_pos::symbol::keywords;
+use syntax_pos::symbol::kw;
/// The types of pointers
#[derive(Clone)]
PathKind::Local => cx.path_all(span, false, idents, params, Vec::new()),
PathKind::Std => {
let def_site = DUMMY_SP.apply_mark(cx.current_expansion.mark);
- idents.insert(0, Ident::new(keywords::DollarCrate.name(), def_site));
+ idents.insert(0, Ident::new(kw::DollarCrate, def_site));
cx.path_all(span, false, idents, params, Vec::new())
}
}
use syntax::ast::{self, Ident, GenericArg};
use syntax::ext::base::{self, *};
use syntax::ext::build::AstBuilder;
-use syntax::symbol::{keywords, Symbol, sym};
+use syntax::symbol::{kw, sym, Symbol};
use syntax_pos::Span;
use syntax::tokenstream;
let sp = sp.apply_mark(cx.current_expansion.mark);
let e = match env::var(&*var.as_str()) {
Err(..) => {
- let lt = cx.lifetime(sp, keywords::StaticLifetime.ident());
+ let lt = cx.lifetime(sp, Ident::with_empty_ctxt(kw::StaticLifetime));
cx.expr_path(cx.path_all(sp,
true,
cx.std_path(&["option", "Option", "None"]),
match parse_global_asm(cx, sp, tts) {
Ok(Some(global_asm)) => {
MacEager::items(smallvec![P(ast::Item {
- ident: ast::Ident::with_empty_ctxt(Symbol::intern("")),
+ ident: ast::Ident::invalid(),
attrs: Vec::new(),
id: ast::DUMMY_NODE_ID,
node: ast::ItemKind::GlobalAsm(P(global_asm)),
use rustc_data_structures::sync::Lrc;
use syntax::ast;
use syntax::ext::base::{MacroExpanderFn, NormalTT, NamedSyntaxExtension, MultiModifier};
-use syntax::ext::hygiene;
use syntax::symbol::Symbol;
+use syntax::edition::Edition;
pub fn register_builtins(resolver: &mut dyn syntax::ext::base::Resolver,
- user_exts: Vec<NamedSyntaxExtension>) {
+ user_exts: Vec<NamedSyntaxExtension>,
+ edition: Edition) {
deriving::register_builtin_derives(resolver);
let mut register = |name, ext| {
resolver.add_builtin(ast::Ident::with_empty_ctxt(name), Lrc::new(ext));
};
-
macro_rules! register {
($( $name:ident: $f:expr, )*) => { $(
register(Symbol::intern(stringify!($name)),
allow_internal_unsafe: false,
local_inner_macros: false,
unstable_feature: None,
- edition: hygiene::default_edition(),
+ edition,
});
)* }
}
allow_internal_unsafe: false,
local_inner_macros: false,
unstable_feature: None,
- edition: hygiene::default_edition(),
+ edition,
});
register(Symbol::intern("format_args_nl"),
NormalTT {
allow_internal_unsafe: false,
local_inner_macros: false,
unstable_feature: None,
- edition: hygiene::default_edition(),
+ edition,
});
for (name, ext) in user_exts {
use syntax::ast::{self, Ident};
use syntax::attr;
-use syntax::source_map::{ExpnInfo, MacroAttribute, hygiene, respan};
+use syntax::source_map::{ExpnInfo, MacroAttribute, respan};
use syntax::ext::base::ExtCtxt;
use syntax::ext::build::AstBuilder;
use syntax::ext::expand::ExpansionConfig;
use syntax::parse::ParseSess;
use syntax::ptr::P;
use syntax::symbol::Symbol;
-use syntax::symbol::{keywords, sym};
+use syntax::symbol::{kw, sym};
use syntax::visit::{self, Visitor};
use syntax_pos::{Span, DUMMY_SP};
].into()),
allow_internal_unsafe: false,
local_inner_macros: false,
- edition: hygiene::default_edition(),
+ edition: cx.parse_sess.edition,
});
let span = DUMMY_SP.apply_mark(mark);
let custom_derive = Ident::from_str("custom_derive");
let attr = Ident::from_str("attr");
let bang = Ident::from_str("bang");
- let crate_kw = Ident::with_empty_ctxt(keywords::Crate.name());
+ let crate_kw = Ident::with_empty_ctxt(kw::Crate);
let decls = {
let local_path = |sp: Span, name| {
use syntax::parse::{self, token, ParseSess};
use syntax::tokenstream::{self, DelimSpan, IsJoint::*, TokenStream, TreeAndJoint};
use syntax_pos::hygiene::{SyntaxContext, Transparency};
-use syntax_pos::symbol::{keywords, Symbol};
+use syntax_pos::symbol::{kw, Symbol};
use syntax_pos::{BytePos, FileName, MultiSpan, Pos, SourceFile, Span};
trait FromInternal<T> {
Question => op!('?'),
SingleQuote => op!('\''),
- Ident(ident, false) if ident.name == keywords::DollarCrate.name() =>
+ Ident(ident, false) if ident.name == kw::DollarCrate =>
tt!(Ident::dollar_crate()),
Ident(ident, is_raw) => tt!(Ident::new(ident.name, is_raw)),
Lifetime(ident) => {
}
fn dollar_crate(span: Span) -> Ident {
// `$crate` is accepted as an ident only if it comes from the compiler.
- Ident { sym: keywords::DollarCrate.name(), is_raw: false, span }
+ Ident { sym: kw::DollarCrate, is_raw: false, span }
}
}
use syntax::ext::base::*;
use syntax::ext::build::AstBuilder;
-use syntax::ext::hygiene::{self, Mark, SyntaxContext};
+use syntax::ext::hygiene::{Mark, SyntaxContext};
use syntax::attr;
use syntax::ast;
use syntax::print::pprust;
].into()),
allow_internal_unsafe: false,
local_inner_macros: false,
- edition: hygiene::default_edition(),
+ edition: cx.parse_sess.edition,
});
(item.span.with_ctxt(SyntaxContext::empty().apply_mark(mark)),
attr_sp.with_ctxt(SyntaxContext::empty().apply_mark(mark)))
use syntax::ext::base::*;
use syntax::ext::build::AstBuilder;
-use syntax::ext::hygiene::{self, Mark, SyntaxContext};
+use syntax::ext::hygiene::{Mark, SyntaxContext};
use syntax::ast;
use syntax::source_map::respan;
use syntax::symbol::{Symbol, sym};
].into()),
allow_internal_unsafe: false,
local_inner_macros: false,
- edition: hygiene::default_edition(),
+ edition: ecx.parse_sess.edition,
});
attr_sp.with_ctxt(SyntaxContext::empty().apply_mark(mark))
};
use syntax::ext::base::{self, ExtCtxt};
use syntax::feature_gate;
-use syntax::symbol::{keywords, sym};
+use syntax::symbol::{kw, sym};
use syntax_pos::Span;
use syntax::tokenstream::TokenTree;
}
match (tt.len(), tt.first()) {
- (1, Some(&TokenTree::Token(_, ref tok))) if tok.is_keyword(keywords::True) => {
+ (1, Some(&TokenTree::Token(_, ref tok))) if tok.is_keyword(kw::True) => {
cx.set_trace_macros(true);
}
- (1, Some(&TokenTree::Token(_, ref tok))) if tok.is_keyword(keywords::False) => {
+ (1, Some(&TokenTree::Token(_, ref tok))) if tok.is_keyword(kw::False) => {
cx.set_trace_macros(false);
}
_ => cx.span_err(sp, "trace_macros! accepts only `true` or `false`"),
use crate::symbol::{Symbol, sym};
use std::fmt;
use std::str::FromStr;
+use crate::GLOBALS;
/// The edition of the compiler (RFC 2052)
#[derive(Clone, Copy, Hash, PartialEq, PartialOrd, Debug, RustcEncodable, RustcDecodable, Eq)]
}
impl Edition {
+ pub fn from_session() -> Edition {
+ GLOBALS.with(|globals| globals.edition)
+ }
+
pub fn lint_name(&self) -> &'static str {
match *self {
Edition::Edition2015 => "rust_2015_compatibility",
use crate::GLOBALS;
use crate::Span;
-use crate::edition::{Edition, DEFAULT_EDITION};
-use crate::symbol::{keywords, Symbol};
+use crate::edition::Edition;
+use crate::symbol::{kw, Symbol};
use serialize::{Encodable, Decodable, Encoder, Decoder};
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
marks: Vec<MarkData>,
syntax_contexts: Vec<SyntaxContextData>,
markings: FxHashMap<(SyntaxContext, Mark, Transparency), SyntaxContext>,
- default_edition: Edition,
}
impl HygieneData {
prev_ctxt: SyntaxContext(0),
opaque: SyntaxContext(0),
opaque_and_semitransparent: SyntaxContext(0),
- dollar_crate_name: keywords::DollarCrate.name(),
+ dollar_crate_name: kw::DollarCrate,
}],
markings: FxHashMap::default(),
- default_edition: DEFAULT_EDITION,
}
}
}
}
-pub fn default_edition() -> Edition {
- HygieneData::with(|data| data.default_edition)
-}
-
-pub fn set_default_edition(edition: Edition) {
- HygieneData::with(|data| data.default_edition = edition);
-}
-
pub fn clear_markings() {
HygieneData::with(|data| data.markings = FxHashMap::default());
}
prev_ctxt: SyntaxContext::empty(),
opaque: SyntaxContext::empty(),
opaque_and_semitransparent: SyntaxContext::empty(),
- dollar_crate_name: keywords::DollarCrate.name(),
+ dollar_crate_name: kw::DollarCrate,
});
SyntaxContext(data.syntax_contexts.len() as u32 - 1)
})
prev_ctxt,
opaque: new_opaque,
opaque_and_semitransparent: new_opaque,
- dollar_crate_name: keywords::DollarCrate.name(),
+ dollar_crate_name: kw::DollarCrate,
});
new_opaque
});
prev_ctxt,
opaque,
opaque_and_semitransparent: new_opaque_and_semitransparent,
- dollar_crate_name: keywords::DollarCrate.name(),
+ dollar_crate_name: kw::DollarCrate,
});
new_opaque_and_semitransparent
});
prev_ctxt,
opaque,
opaque_and_semitransparent,
- dollar_crate_name: keywords::DollarCrate.name(),
+ dollar_crate_name: kw::DollarCrate,
});
new_opaque_and_semitransparent_and_transparent
})
&mut data.syntax_contexts[self.0 as usize].dollar_crate_name, dollar_crate_name
);
assert!(dollar_crate_name == prev_dollar_crate_name ||
- prev_dollar_crate_name == keywords::DollarCrate.name(),
+ prev_dollar_crate_name == kw::DollarCrate,
"$crate name is reset for a syntax context");
})
}
extern crate serialize as rustc_serialize; // used by deriving
pub mod edition;
+use edition::Edition;
pub mod hygiene;
pub use hygiene::{Mark, SyntaxContext, ExpnInfo, ExpnFormat, CompilerDesugaringKind};
symbol_interner: Lock<symbol::Interner>,
span_interner: Lock<span_encoding::SpanInterner>,
hygiene_data: Lock<hygiene::HygieneData>,
+ edition: Edition,
}
impl Globals {
- pub fn new() -> Globals {
+ pub fn new(edition: Edition) -> Globals {
Globals {
symbol_interner: Lock::new(symbol::Interner::fresh()),
span_interner: Lock::new(span_encoding::SpanInterner::default()),
hygiene_data: Lock::new(hygiene::HygieneData::new()),
+ edition,
}
}
}
/// Edition of the crate from which this span came.
pub fn edition(self) -> edition::Edition {
- self.ctxt().outer().expn_info().map_or_else(|| hygiene::default_edition(),
- |einfo| einfo.edition)
+ self.ctxt().outer().expn_info().map_or_else(|| {
+ Edition::from_session()
+ }, |einfo| einfo.edition)
}
#[inline]
rustc_layout_scalar_valid_range_end,
rustc_layout_scalar_valid_range_start,
rustc_mir,
+ rustc_nonnull_optimization_guaranteed,
rustc_object_lifetime_default,
rustc_on_unimplemented,
rustc_outlives,
Ident::new(name, DUMMY_SP)
}
+ #[inline]
+ pub fn invalid() -> Ident {
+ Ident::with_empty_ctxt(kw::Invalid)
+ }
+
/// Maps an interned string to an identifier with an empty syntax context.
pub fn from_interned_str(string: InternedString) -> Ident {
Ident::with_empty_ctxt(string.as_symbol())
/// Transforms an underscore identifier into one with the same name, but
/// gensymed. Leaves non-underscore identifiers unchanged.
pub fn gensym_if_underscore(self) -> Ident {
- if self.name == keywords::Underscore.name() { self.gensym() } else { self }
+ if self.name == kw::Underscore { self.gensym() } else { self }
}
// WARNING: this function is deprecated and will be removed in the future.
this.strings.reserve(init.len());
// We can't allocate empty strings in the arena, so handle this here.
- assert!(keywords::Invalid.name().as_u32() == 0 && init[0].is_empty());
- this.names.insert("", keywords::Invalid.name());
+ assert!(kw::Invalid.as_u32() == 0 && init[0].is_empty());
+ this.names.insert("", kw::Invalid);
this.strings.push("");
for string in &init[1..] {
}
}
-pub mod keywords {
- use super::{Symbol, Ident};
-
- #[derive(Clone, Copy, PartialEq, Eq)]
- pub struct Keyword {
- ident: Ident,
- }
-
- impl Keyword {
- #[inline]
- pub fn ident(self) -> Ident {
- self.ident
- }
-
- #[inline]
- pub fn name(self) -> Symbol {
- self.ident.name
- }
- }
-
+// This module has a very short name because it's used a lot.
+pub mod kw {
+ use super::Symbol;
keywords!();
}
impl Symbol {
fn is_used_keyword_2018(self) -> bool {
- self == keywords::Dyn.name()
+ self == kw::Dyn
}
fn is_unused_keyword_2018(self) -> bool {
- self >= keywords::Async.name() && self <= keywords::Try.name()
+ self >= kw::Async && self <= kw::Try
+ }
+
+ /// Used for sanity checking rustdoc keyword sections.
+ pub fn is_doc_keyword(self) -> bool {
+ self <= kw::Union
}
}
// Returns `true` for reserved identifiers used internally for elided lifetimes,
// unnamed method parameters, crate root module, error recovery etc.
pub fn is_special(self) -> bool {
- self.name <= keywords::Underscore.name()
+ self.name <= kw::Underscore
}
/// Returns `true` if the token is a keyword used in the language.
pub fn is_used_keyword(self) -> bool {
// Note: `span.edition()` is relatively expensive, don't call it unless necessary.
- self.name >= keywords::As.name() && self.name <= keywords::While.name() ||
+ self.name >= kw::As && self.name <= kw::While ||
self.name.is_used_keyword_2018() && self.span.rust_2018()
}
/// Returns `true` if the token is a keyword reserved for possible future use.
pub fn is_unused_keyword(self) -> bool {
// Note: `span.edition()` is relatively expensive, don't call it unless necessary.
- self.name >= keywords::Abstract.name() && self.name <= keywords::Yield.name() ||
+ self.name >= kw::Abstract && self.name <= kw::Yield ||
self.name.is_unused_keyword_2018() && self.span.rust_2018()
}
/// A keyword or reserved identifier that can be used as a path segment.
pub fn is_path_segment_keyword(self) -> bool {
- self.name == keywords::Super.name() ||
- self.name == keywords::SelfLower.name() ||
- self.name == keywords::SelfUpper.name() ||
- self.name == keywords::Crate.name() ||
- self.name == keywords::PathRoot.name() ||
- self.name == keywords::DollarCrate.name()
+ self.name == kw::Super ||
+ self.name == kw::SelfLower ||
+ self.name == kw::SelfUpper ||
+ self.name == kw::Crate ||
+ self.name == kw::PathRoot ||
+ self.name == kw::DollarCrate
}
/// This identifier can be a raw identifier.
pub fn can_be_raw(self) -> bool {
- self.name != keywords::Invalid.name() && self.name != keywords::Underscore.name() &&
+ self.name != kw::Invalid && self.name != kw::Underscore &&
!self.is_path_segment_keyword()
}
/// assert_ne!(Symbol::gensym("x"), Symbol::gensym("x"))
/// assert_eq!(Symbol::gensym("x").as_interned_str(), Symbol::gensym("x").as_interned_str())
/// ```
-#[derive(Clone, Copy, Eq)]
+#[derive(Clone, Copy, PartialEq, Eq)]
pub struct InternedString {
symbol: Symbol,
}
}
}
-impl<T: std::ops::Deref<Target = str>> PartialEq<T> for InternedString {
- fn eq(&self, other: &T) -> bool {
- self.with(|string| string == other.deref())
- }
-}
-
-impl PartialEq<InternedString> for InternedString {
- fn eq(&self, other: &InternedString) -> bool {
- self.symbol == other.symbol
- }
-}
-
-impl PartialEq<InternedString> for str {
- fn eq(&self, other: &InternedString) -> bool {
- other.with(|string| self == string)
- }
-}
-
-impl<'a> PartialEq<InternedString> for &'a str {
- fn eq(&self, other: &InternedString) -> bool {
- other.with(|string| *self == string)
- }
-}
-
-impl PartialEq<InternedString> for String {
- fn eq(&self, other: &InternedString) -> bool {
- other.with(|string| self == string)
- }
-}
-
-impl<'a> PartialEq<InternedString> for &'a String {
- fn eq(&self, other: &InternedString) -> bool {
- other.with(|string| *self == string)
- }
-}
-
impl fmt::Debug for InternedString {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.with(|str| fmt::Debug::fmt(&str, f))
mod tests {
use super::*;
use crate::Globals;
+ use crate::edition;
#[test]
fn interner_tests() {
#[test]
fn without_first_quote_test() {
- GLOBALS.set(&Globals::new(), || {
+ GLOBALS.set(&Globals::new(edition::DEFAULT_EDITION), || {
let i = Ident::from_str("'break");
- assert_eq!(i.without_first_quote().name, keywords::Break.name());
+ assert_eq!(i.without_first_quote().name, kw::Break);
});
}
}
// switchInt(move _5) -> [0u32: bb4, 3u32: bb7, otherwise: bb8];
// }
// bb1: {
+// StorageDead(_3);
// goto -> bb5;
// }
// bb2: {
}
fn main() {
- syntax::with_globals(|| run());
+ syntax::with_default_globals(|| run());
}
fn run() {
allow_internal_unsafe: false,
local_inner_macros: false,
unstable_feature: None,
- edition: hygiene::default_edition(),
+ edition: reg.sess.edition(),
});
}
mod gravy;
pub fn main() {
- syntax::with_globals(|| parse());
+ syntax::with_default_globals(|| parse());
assert_eq!(gravy::foo(), 10);
}
}
fn main() {
- syntax::with_globals(|| run());
+ syntax::with_default_globals(|| run());
}
fn run() {
+++ /dev/null
-#![feature(trait_alias)]
-
-use std::marker::PhantomData;
-
-trait Empty {}
-trait EmptyAlias = Empty;
-trait CloneDefault = Clone + Default;
-trait SendSyncAlias = Send + Sync;
-trait WhereSendAlias = where Self: Send;
-trait SendEqAlias<T> = Send where T: PartialEq<Self>;
-trait I32Iterator = Iterator<Item = i32>;
-
-#[allow(dead_code)]
-struct Foo<T: SendSyncAlias>(PhantomData<T>);
-#[allow(dead_code)]
-struct Bar<T>(PhantomData<T>) where T: SendSyncAlias;
-
-impl EmptyAlias {}
-
-impl<T: SendSyncAlias> Empty for T {}
-
-fn a<T: CloneDefault>() -> (T, T) {
- let one = T::default();
- let two = one.clone();
- (one, two)
-}
-
-fn b(x: &impl SendEqAlias<i32>) -> bool {
- 22_i32 == *x
-}
-
-fn c<T: I32Iterator>(x: &mut T) -> Option<i32> {
- x.next()
-}
-
-fn d<T: SendSyncAlias>() {
- is_send_and_sync::<T>();
-}
-
-fn is_send_and_sync<T: Send + Sync>() {}
-
-fn main() {
- let both = a::<i32>();
- assert_eq!(both.0, 0);
- assert_eq!(both.1, 0);
- let both: (i32, i32) = a();
- assert_eq!(both.0, 0);
- assert_eq!(both.1, 0);
-
- assert!(b(&22));
-
- assert_eq!(c(&mut vec![22].into_iter()), Some(22));
-
- d::<i32>();
-}
+++ /dev/null
-#![feature(trait_alias)]
-
-trait Foo = PartialEq<i32> + Send;
-trait Bar = Foo + Sync;
-
-trait I32Iterator = Iterator<Item = i32>;
-
-pub fn main() {
- let a: &dyn Bar = &123;
- assert!(*a == 123);
- let b = Box::new(456) as Box<dyn Foo>;
- assert!(*b == 456);
-
- let c: &mut dyn I32Iterator = &mut vec![123].into_iter();
- assert_eq!(c.next(), Some(123));
-}
+++ /dev/null
-#![feature(trait_alias)]
-
-trait SimpleAlias = Default;
-trait GenericAlias<T> = Iterator<Item = T>;
-trait Partial<T> = IntoIterator<Item = T>;
-trait SpecificAlias = GenericAlias<i32>;
-trait PartialEqRef<'a, T: 'a> = PartialEq<&'a T>;
-trait StaticAlias = 'static;
-
-trait Things<T> {}
-trait Romeo {}
-#[allow(dead_code)]
-struct The<T>(T);
-#[allow(dead_code)]
-struct Fore<T>(T);
-impl<T, U> Things<T> for The<U> {}
-impl<T> Romeo for Fore<T> {}
-
-trait WithWhere<Art, Thou> = Romeo + Romeo where Fore<(Art, Thou)>: Romeo;
-trait BareWhere<Wild, Are> = where The<Wild>: Things<Are>;
-
-fn main() {}
error: aborting due to previous error
For more information about this error, try `rustc --explain E0425`.
-thread '$DIR/failed-doctest-output.rs - OtherStruct (line 17)' panicked at 'couldn't compile the test', src/librustdoc/test.rs:319:13
+thread '$DIR/failed-doctest-output.rs - OtherStruct (line 17)' panicked at 'couldn't compile the test', src/librustdoc/test.rs:320:13
note: Run with `RUST_BACKTRACE=1` environment variable to display a backtrace.
---- $DIR/failed-doctest-output.rs - SomeStruct (line 11) stdout ----
thread 'main' panicked at 'oh no', $DIR/failed-doctest-output.rs:3:1
note: Run with `RUST_BACKTRACE=1` environment variable to display a backtrace.
-', src/librustdoc/test.rs:341:17
+', src/librustdoc/test.rs:342:17
failures:
error: aborting due to previous error
-thread '$DIR/unparseable-doc-test.rs - foo (line 6)' panicked at 'couldn't compile the test', src/librustdoc/test.rs:319:13
+thread '$DIR/unparseable-doc-test.rs - foo (line 6)' panicked at 'couldn't compile the test', src/librustdoc/test.rs:320:13
note: Run with `RUST_BACKTRACE=1` environment variable to display a backtrace.
--- /dev/null
+use std::marker::PhantomData;
+
+pub struct True;
+pub struct False;
+
+pub trait InterfaceType{
+ type Send;
+}
+
+
+pub struct FooInterface<T>(PhantomData<fn()->T>);
+
+impl<T> InterfaceType for FooInterface<T> {
+ type Send=False;
+}
+
+
+pub struct DynTrait<I>{
+ _interface:PhantomData<fn()->I>,
+ _unsync_unsend:PhantomData<::std::rc::Rc<()>>,
+}
+
+unsafe impl<I> Send for DynTrait<I>
+where
+ I:InterfaceType<Send=True>
+{}
+
+// @has issue_60726/struct.IntoIter.html
+// @has - '//*[@id="synthetic-implementations-list"]/*[@class="impl"]//code' "impl<T> !Send for \
+// IntoIter<T>"
+// @has - '//*[@id="synthetic-implementations-list"]/*[@class="impl"]//code' "impl<T> !Sync for \
+// IntoIter<T>"
+pub struct IntoIter<T>{
+ hello:DynTrait<FooInterface<T>>,
+}
|
= note: expected type `u32`
found type `i32`
- = note: required for the cast to the object type `dyn I32Iterator<Item = u32, Item = i32>`
+ = note: required for the cast to the object type `dyn std::iter::Iterator<Item = u32, Item = i32>`
error: aborting due to previous error
--> $DIR/bad-sized.rs:4:24
|
LL | let x: Vec<Trait + Sized> = Vec::new();
- | ^^^^^ non-auto additional trait
+ | ----- ^^^^^
+ | | |
+ | | additional non-auto trait
+ | | trait alias used in trait object type (additional use)
+ | first non-auto trait
+ | trait alias used in trait object type (first use)
error[E0277]: the size for values of type `dyn Trait` cannot be known at compilation time
--> $DIR/bad-sized.rs:4:12
|
= warning: this error has been downgraded to a warning for backwards compatibility with previous releases
= warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
+ = note: for more information, try `rustc --explain E0729`
error[E0503]: cannot use `y` because it was mutably borrowed
--> $DIR/borrowck-anon-fields-variant.rs:37:7
|
= warning: this error has been downgraded to a warning for backwards compatibility with previous releases
= warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
+ = note: for more information, try `rustc --explain E0729`
warning[E0502]: cannot borrow `*block.current` as immutable because it is also borrowed as mutable
--> $DIR/borrowck-describe-lvalue.rs:227:33
|
= warning: this error has been downgraded to a warning for backwards compatibility with previous releases
= warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
+ = note: for more information, try `rustc --explain E0729`
error[E0382]: use of moved value: `x`
--> $DIR/borrowck-describe-lvalue.rs:282:22
|
= warning: this error has been downgraded to a warning for backwards compatibility with previous releases
= warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
+ = note: for more information, try `rustc --explain E0729`
|
= warning: this error has been downgraded to a warning for backwards compatibility with previous releases
= warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
+ = note: for more information, try `rustc --explain E0729`
|
= warning: this error has been downgraded to a warning for backwards compatibility with previous releases
= warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
+ = note: for more information, try `rustc --explain E0729`
warning[E0510]: cannot mutably borrow `x` in match guard
--> $DIR/borrowck-mutate-in-guard.rs:15:33
|
= warning: this error has been downgraded to a warning for backwards compatibility with previous releases
= warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
+ = note: for more information, try `rustc --explain E0729`
error: aborting due to 3 previous errors
|
= warning: this error has been downgraded to a warning for backwards compatibility with previous releases
= warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
+ = note: for more information, try `rustc --explain E0729`
warning[E0381]: use of possibly uninitialized variable: `b`
--> $DIR/const_let_refutable.rs:4:9
|
= warning: this error has been downgraded to a warning for backwards compatibility with previous releases
= warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
+ = note: for more information, try `rustc --explain E0729`
error: aborting due to 2 previous errors
|
= warning: this error has been downgraded to a warning for backwards compatibility with previous releases
= warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
+ = note: for more information, try `rustc --explain E0729`
error[E0723]: trait bounds other than `Sized` on const fn parameters are unstable
--> $DIR/min_const_fn.rs:144:41
|
= warning: this error has been downgraded to a warning for backwards compatibility with previous releases
= warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
+ = note: for more information, try `rustc --explain E0729`
error: aborting due to 2 previous errors
|
= warning: this error has been downgraded to a warning for backwards compatibility with previous releases
= warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
+ = note: for more information, try `rustc --explain E0729`
error: aborting due to previous error
+#![feature(trait_alias)]
+
+trait Foo = std::io::Read + std::io::Write;
+
fn main() {
- let _: Box<std::io::Read + std::io::Write>;
+ let _: Box<dyn std::io::Read + std::io::Write>;
+ //~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+ let _: Box<dyn Foo>;
//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
}
error[E0225]: only auto traits can be used as additional traits in a trait object
- --> $DIR/E0225.rs:2:32
+ --> $DIR/E0225.rs:6:36
|
-LL | let _: Box<std::io::Read + std::io::Write>;
- | ^^^^^^^^^^^^^^ non-auto additional trait
+LL | let _: Box<dyn std::io::Read + std::io::Write>;
+ | ------------- ^^^^^^^^^^^^^^
+ | | |
+ | | additional non-auto trait
+ | | trait alias used in trait object type (additional use)
+ | first non-auto trait
+ | trait alias used in trait object type (first use)
-error: aborting due to previous error
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/E0225.rs:8:20
+ |
+LL | trait Foo = std::io::Read + std::io::Write;
+ | ------------- -------------- additional non-auto trait
+ | |
+ | first non-auto trait
+...
+LL | let _: Box<dyn Foo>;
+ | ^^^
+ | |
+ | trait alias used in trait object type (additional use)
+ | trait alias used in trait object type (first use)
+
+error: aborting due to 2 previous errors
For more information about this error, try `rustc --explain E0225`.
|
= warning: this error has been downgraded to a warning for backwards compatibility with previous releases
= warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
+ = note: for more information, try `rustc --explain E0729`
error: compilation successful
--> $DIR/feature-gate-nll.rs:10:1
#[rustc_variance] //~ ERROR the `#[rustc_variance]` attribute is just used for rustc unit tests and will never be stable
#[rustc_error] //~ ERROR the `#[rustc_error]` attribute is just used for rustc unit tests and will never be stable
+#[rustc_nonnull_optimization_guaranteed] //~ ERROR the `#[rustc_nonnull_optimization_guaranteed]` attribute is just used to enable niche optimizations in libcore and will never be stable
fn main() {}
= note: for more information, see https://github.com/rust-lang/rust/issues/29642
= help: add #![feature(rustc_attrs)] to the crate attributes to enable
-error: aborting due to 2 previous errors
+error[E0658]: the `#[rustc_nonnull_optimization_guaranteed]` attribute is just used to enable niche optimizations in libcore and will never be stable
+ --> $DIR/feature-gate-rustc-attrs-1.rs:7:1
+ |
+LL | #[rustc_nonnull_optimization_guaranteed]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: for more information, see https://github.com/rust-lang/rust/issues/29642
+ = help: add #![feature(rustc_attrs)] to the crate attributes to enable
+
+error: aborting due to 3 previous errors
For more information about this error, try `rustc --explain E0658`.
|
= warning: this error has been downgraded to a warning for backwards compatibility with previous releases
= warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
+ = note: for more information, try `rustc --explain E0729`
error: aborting due to previous error
error[E0393]: the type parameter `Rhs` must be explicitly specified
- --> $DIR/issue-22560.rs:3:13
+ --> $DIR/issue-22560.rs:6:13
|
-LL | type Test = Add +
+LL | Sub;
| ^^^ missing reference to `Rhs`
|
= note: because of the default `Self` reference, type parameters must be specified on object types
error[E0393]: the type parameter `Rhs` must be explicitly specified
- --> $DIR/issue-22560.rs:6:13
+ --> $DIR/issue-22560.rs:3:13
|
-LL | Sub;
+LL | type Test = Add +
| ^^^ missing reference to `Rhs`
|
= note: because of the default `Self` reference, type parameters must be specified on object types
error[E0225]: only auto traits can be used as additional traits in a trait object
--> $DIR/issue-22560.rs:6:13
|
+LL | type Test = Add +
+ | ---
+ | |
+ | first non-auto trait
+ | trait alias used in trait object type (first use)
+...
LL | Sub;
- | ^^^ non-auto additional trait
+ | ^^^
+ | |
+ | additional non-auto trait
+ | trait alias used in trait object type (additional use)
-error[E0191]: the value of the associated type `Output` (from the trait `std::ops::Add`) must be specified
+error[E0191]: the value of the associated types `Output` (from the trait `std::ops::Add`), `Output` (from the trait `std::ops::Sub`) must be specified
--> $DIR/issue-22560.rs:3:13
|
LL | type Test = Add +
| _____________^
+ | |_____________|
+ | |
LL | |
LL | |
LL | | Sub;
- | |_______________^ associated type `Output` must be specified
+ | | ^
+ | |_______________|
+ | |_______________associated type `Output` must be specified
+ | associated type `Output` must be specified
error: aborting due to 4 previous errors
--> $DIR/issue-32963.rs:8:25
|
LL | size_of_copy::<Misc+Copy>();
- | ^^^^ non-auto additional trait
+ | ---- ^^^^
+ | | |
+ | | additional non-auto trait
+ | | trait alias used in trait object type (additional use)
+ | first non-auto trait
+ | trait alias used in trait object type (first use)
error[E0277]: the trait bound `dyn Misc: std::marker::Copy` is not satisfied
--> $DIR/issue-32963.rs:8:5
= note: ...therefore, they cannot allow references to captured variables to escape
= warning: this error has been downgraded to a warning for backwards compatibility with previous releases
= warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
+ = note: for more information, try `rustc --explain E0729`
error: compilation successful
--> $DIR/issue-40510-1.rs:20:1
= note: ...therefore, they cannot allow references to captured variables to escape
= warning: this error has been downgraded to a warning for backwards compatibility with previous releases
= warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
+ = note: for more information, try `rustc --explain E0729`
error: compilation successful
--> $DIR/issue-40510-3.rs:22:1
|
= warning: this error has been downgraded to a warning for backwards compatibility with previous releases
= warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
+ = note: for more information, try `rustc --explain E0729`
warning[E0713]: borrow may still be in use when destructor runs
--> $DIR/issue-45696-scribble-on-boxed-borrow.rs:62:5
|
= warning: this error has been downgraded to a warning for backwards compatibility with previous releases
= warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
+ = note: for more information, try `rustc --explain E0729`
warning[E0713]: borrow may still be in use when destructor runs
--> $DIR/issue-45696-scribble-on-boxed-borrow.rs:73:5
|
= warning: this error has been downgraded to a warning for backwards compatibility with previous releases
= warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
+ = note: for more information, try `rustc --explain E0729`
error: compilation successful
--> $DIR/issue-45696-scribble-on-boxed-borrow.rs:80:1
= note: ...therefore, they cannot allow references to captured variables to escape
= warning: this error has been downgraded to a warning for backwards compatibility with previous releases
= warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
+ = note: for more information, try `rustc --explain E0729`
error: compilation successful
--> $DIR/issue-49824.rs:6:1
#![deny(improper_ctypes)]
#![allow(dead_code)]
+use std::num;
+
enum Z { }
enum U { A }
enum B { C, D }
#[repr(isize)]
enum Isize { A, B, C }
+#[repr(transparent)]
+struct Transparent<T>(T, std::marker::PhantomData<Z>);
+
+struct Rust<T>(T);
+
extern {
fn zf(x: Z);
fn uf(x: U); //~ ERROR enum has no representation hint
fn bf(x: B); //~ ERROR enum has no representation hint
fn tf(x: T); //~ ERROR enum has no representation hint
- fn reprc(x: ReprC);
- fn u8(x: U8);
- fn isize(x: Isize);
+ fn repr_c(x: ReprC);
+ fn repr_u8(x: U8);
+ fn repr_isize(x: Isize);
+ fn option_ref(x: Option<&'static u8>);
+ fn option_fn(x: Option<extern "C" fn()>);
+ fn nonnull(x: Option<std::ptr::NonNull<u8>>);
+ fn nonzero_u8(x: Option<num::NonZeroU8>);
+ fn nonzero_u16(x: Option<num::NonZeroU16>);
+ fn nonzero_u32(x: Option<num::NonZeroU32>);
+ fn nonzero_u64(x: Option<num::NonZeroU64>);
+ fn nonzero_u128(x: Option<num::NonZeroU128>);
+ //~^ ERROR 128-bit integers don't currently have a known stable ABI
+ fn nonzero_usize(x: Option<num::NonZeroUsize>);
+ fn nonzero_i8(x: Option<num::NonZeroI8>);
+ fn nonzero_i16(x: Option<num::NonZeroI16>);
+ fn nonzero_i32(x: Option<num::NonZeroI32>);
+ fn nonzero_i64(x: Option<num::NonZeroI64>);
+ fn nonzero_i128(x: Option<num::NonZeroI128>);
+ //~^ ERROR 128-bit integers don't currently have a known stable ABI
+ fn nonzero_isize(x: Option<num::NonZeroIsize>);
+ fn repr_transparent(x: Option<Transparent<num::NonZeroU8>>);
+ fn repr_rust(x: Option<Rust<num::NonZeroU8>>); //~ ERROR enum has no representation hint
+ fn no_result(x: Result<(), num::NonZeroI32>); //~ ERROR enum has no representation hint
}
pub fn main() { }
error: `extern` block uses type `U` which is not FFI-safe: enum has no representation hint
- --> $DIR/lint-ctypes-enum.rs:20:13
+ --> $DIR/lint-ctypes-enum.rs:27:13
|
LL | fn uf(x: U);
| ^
| ^^^^^^^^^^^^^^^
= help: consider adding a #[repr(...)] attribute to this enum
note: type defined here
- --> $DIR/lint-ctypes-enum.rs:5:1
+ --> $DIR/lint-ctypes-enum.rs:7:1
|
LL | enum U { A }
| ^^^^^^^^^^^^
error: `extern` block uses type `B` which is not FFI-safe: enum has no representation hint
- --> $DIR/lint-ctypes-enum.rs:21:13
+ --> $DIR/lint-ctypes-enum.rs:28:13
|
LL | fn bf(x: B);
| ^
|
= help: consider adding a #[repr(...)] attribute to this enum
note: type defined here
- --> $DIR/lint-ctypes-enum.rs:6:1
+ --> $DIR/lint-ctypes-enum.rs:8:1
|
LL | enum B { C, D }
| ^^^^^^^^^^^^^^^
error: `extern` block uses type `T` which is not FFI-safe: enum has no representation hint
- --> $DIR/lint-ctypes-enum.rs:22:13
+ --> $DIR/lint-ctypes-enum.rs:29:13
|
LL | fn tf(x: T);
| ^
|
= help: consider adding a #[repr(...)] attribute to this enum
note: type defined here
- --> $DIR/lint-ctypes-enum.rs:7:1
+ --> $DIR/lint-ctypes-enum.rs:9:1
|
LL | enum T { E, F, G }
| ^^^^^^^^^^^^^^^^^^
-error: aborting due to 3 previous errors
+error: `extern` block uses type `u128` which is not FFI-safe: 128-bit integers don't currently have a known stable ABI
+ --> $DIR/lint-ctypes-enum.rs:40:23
+ |
+LL | fn nonzero_u128(x: Option<num::NonZeroU128>);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: `extern` block uses type `i128` which is not FFI-safe: 128-bit integers don't currently have a known stable ABI
+ --> $DIR/lint-ctypes-enum.rs:47:23
+ |
+LL | fn nonzero_i128(x: Option<num::NonZeroI128>);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: `extern` block uses type `std::option::Option<Rust<std::num::NonZeroU8>>` which is not FFI-safe: enum has no representation hint
+ --> $DIR/lint-ctypes-enum.rs:51:20
+ |
+LL | fn repr_rust(x: Option<Rust<num::NonZeroU8>>);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider adding a #[repr(...)] attribute to this enum
+
+error: `extern` block uses type `std::result::Result<(), std::num::NonZeroI32>` which is not FFI-safe: enum has no representation hint
+ --> $DIR/lint-ctypes-enum.rs:52:20
+ |
+LL | fn no_result(x: Result<(), num::NonZeroI32>);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider adding a #[repr(...)] attribute to this enum
+
+error: aborting due to 7 previous errors
-trait Tr: ?Sized {} //~ ERROR `?Trait` is not permitted in supertraits
+trait Tr: ?Sized {}
+//~^ ERROR `?Trait` is not permitted in supertraits
-type A1 = Tr + (?Sized); //~ ERROR `?Trait` is not permitted in trait object types
-type A2 = for<'a> Tr + (?Sized); //~ ERROR `?Trait` is not permitted in trait object types
+type A1 = dyn Tr + (?Sized);
+//~^ ERROR `?Trait` is not permitted in trait object types
+type A2 = dyn for<'a> Tr + (?Sized);
+//~^ ERROR `?Trait` is not permitted in trait object types
fn main() {}
= note: traits are `?Sized` by default
error: `?Trait` is not permitted in trait object types
- --> $DIR/maybe-bounds.rs:3:16
+ --> $DIR/maybe-bounds.rs:4:20
|
-LL | type A1 = Tr + (?Sized);
- | ^^^^^^^^
+LL | type A1 = dyn Tr + (?Sized);
+ | ^^^^^^^^
error: `?Trait` is not permitted in trait object types
- --> $DIR/maybe-bounds.rs:4:24
+ --> $DIR/maybe-bounds.rs:6:28
|
-LL | type A2 = for<'a> Tr + (?Sized);
- | ^^^^^^^^
+LL | type A2 = dyn for<'a> Tr + (?Sized);
+ | ^^^^^^^^
error: aborting due to 3 previous errors
//~^ ERROR `?Trait` is not permitted in trait object types
let _: Box<(?Sized) + (for<'a> Trait<'a>) + (Copy)>;
let _: Box<(for<'a> Trait<'a>) + (Copy) + (?Sized)>;
- //~^ ERROR `?Trait` is not permitted in trait object types
- //~| ERROR use of undeclared lifetime name `'a`
+ //~^ ERROR use of undeclared lifetime name `'a`
+ //~| ERROR `?Trait` is not permitted in trait object types
}
|
= warning: this error has been downgraded to a warning for backwards compatibility with previous releases
= warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
+ = note: for more information, try `rustc --explain E0729`
error: aborting due to previous error
|
= warning: this error has been downgraded to a warning for backwards compatibility with previous releases
= warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
+ = note: for more information, try `rustc --explain E0729`
error: aborting due to previous error
+++ /dev/null
-// run-pass
-#![feature(trait_alias)]
-
-pub trait Foo {}
-pub trait FooAlias = Foo;
-
-fn main() {}
+++ /dev/null
-// run-pass
-
-trait Foo<'a> {
- fn xyz(self);
-}
-impl<'a, T> Foo<'a> for T where 'static: 'a {
- fn xyz(self) {}
-}
-
-trait Bar {
- fn uvw(self);
-}
-impl<T> Bar for T where for<'a> T: Foo<'a> {
- fn uvw(self) { self.xyz(); }
-}
-
-fn foo<T>(t: T) where T: Bar {
- t.uvw();
-}
-
-fn main() {
- foo(0);
-}
|
= warning: this error has been downgraded to a warning for backwards compatibility with previous releases
= warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
+ = note: for more information, try `rustc --explain E0729`
error[E0625]: thread-local statics cannot be accessed at compile-time
--> $DIR/thread-local-in-ctfe.rs:15:16
|
= warning: this error has been downgraded to a warning for backwards compatibility with previous releases
= warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
+ = note: for more information, try `rustc --explain E0729`
error[E0625]: thread-local statics cannot be accessed at compile-time
--> $DIR/thread-local-in-ctfe.rs:25:5
+++ /dev/null
-#![feature(trait_alias)]
-
-pub trait SendSync = Send + Sync;
+++ /dev/null
-// aux-build:trait_alias.rs
-
-#![feature(trait_alias)]
-
-extern crate trait_alias;
-
-use std::rc::Rc;
-use trait_alias::SendSync;
-
-fn use_alias<T: SendSync>() {}
-
-fn main() {
- use_alias::<u32>();
- use_alias::<Rc<u32>>();
- //~^ ERROR `std::rc::Rc<u32>` cannot be sent between threads safely [E0277]
- //~^^ ERROR `std::rc::Rc<u32>` cannot be shared between threads safely [E0277]
-}
+++ /dev/null
-error[E0277]: `std::rc::Rc<u32>` cannot be sent between threads safely
- --> $DIR/trait-alias-cross-crate.rs:14:5
- |
-LL | use_alias::<Rc<u32>>();
- | ^^^^^^^^^^^^^^^^^^^^ `std::rc::Rc<u32>` cannot be sent between threads safely
- |
- = help: the trait `std::marker::Send` is not implemented for `std::rc::Rc<u32>`
-note: required by `use_alias`
- --> $DIR/trait-alias-cross-crate.rs:10:1
- |
-LL | fn use_alias<T: SendSync>() {}
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-error[E0277]: `std::rc::Rc<u32>` cannot be shared between threads safely
- --> $DIR/trait-alias-cross-crate.rs:14:5
- |
-LL | use_alias::<Rc<u32>>();
- | ^^^^^^^^^^^^^^^^^^^^ `std::rc::Rc<u32>` cannot be shared between threads safely
- |
- = help: the trait `std::marker::Sync` is not implemented for `std::rc::Rc<u32>`
-note: required by `use_alias`
- --> $DIR/trait-alias-cross-crate.rs:10:1
- |
-LL | fn use_alias<T: SendSync>() {}
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-error: aborting due to 2 previous errors
-
-For more information about this error, try `rustc --explain E0277`.
+++ /dev/null
-#![feature(trait_alias)]
-
-trait DefaultAlias = Default;
-
-impl DefaultAlias for () {} //~ ERROR expected trait, found trait alias
-
-fn main() {}
+++ /dev/null
-error[E0404]: expected trait, found trait alias `DefaultAlias`
- --> $DIR/trait-alias-impl.rs:5:6
- |
-LL | impl DefaultAlias for () {}
- | ^^^^^^^^^^^^ not a trait
-
-error: aborting due to previous error
-
-For more information about this error, try `rustc --explain E0404`.
+++ /dev/null
-#![feature(trait_alias)]
-
-trait EqAlias = Eq;
-trait IteratorAlias = Iterator;
-
-fn main() {
- let _: &dyn EqAlias = &123; //~ ERROR `EqAlias` cannot be made into an object
- let _: &dyn IteratorAlias = &vec![123].into_iter(); //~ ERROR must be specified
-}
+++ /dev/null
-error[E0038]: the trait `EqAlias` cannot be made into an object
- --> $DIR/trait-alias-object.rs:7:13
- |
-LL | let _: &dyn EqAlias = &123;
- | ^^^^^^^^^^^ the trait `EqAlias` cannot be made into an object
- |
- = note: the trait cannot use `Self` as a type parameter in the supertraits or where-clauses
-
-error[E0191]: the value of the associated type `Item` (from the trait `std::iter::Iterator`) must be specified
- --> $DIR/trait-alias-object.rs:8:13
- |
-LL | let _: &dyn IteratorAlias = &vec![123].into_iter();
- | ^^^^^^^^^^^^^^^^^ associated type `Item` must be specified
-
-error: aborting due to 2 previous errors
-
-Some errors have detailed explanations: E0038, E0191.
-For more information about an error, try `rustc --explain E0038`.
+++ /dev/null
-#![feature(trait_alias)]
-
-trait Foo {}
-auto trait A = Foo; //~ ERROR trait aliases cannot be `auto`
-unsafe trait B = Foo; //~ ERROR trait aliases cannot be `unsafe`
-
-fn main() {}
+++ /dev/null
-error: trait aliases cannot be `auto`
- --> $DIR/trait-alias-syntax.rs:4:19
- |
-LL | auto trait A = Foo;
- | ^ trait aliases cannot be `auto`
-
-error: trait aliases cannot be `unsafe`
- --> $DIR/trait-alias-syntax.rs:5:21
- |
-LL | unsafe trait B = Foo;
- | ^ trait aliases cannot be `unsafe`
-
-error: aborting due to 2 previous errors
-
+++ /dev/null
-#![feature(trait_alias)]
-
-trait Foo {}
-trait A<T: Foo> {}
-trait B<T> = A<T>; //~ ERROR `T: Foo` is not satisfied
-
-fn main() {}
+++ /dev/null
-error[E0277]: the trait bound `T: Foo` is not satisfied
- --> $DIR/trait-alias-wf.rs:5:1
- |
-LL | trait B<T> = A<T>;
- | ^^^^^^^^^^^^^^^^^^ the trait `Foo` is not implemented for `T`
- |
- = help: consider adding a `where T: Foo` bound
-note: required by `A`
- --> $DIR/trait-alias-wf.rs:4:1
- |
-LL | trait A<T: Foo> {}
- | ^^^^^^^^^^^^^^^
-
-error: aborting due to previous error
-
-For more information about this error, try `rustc --explain E0277`.
--- /dev/null
+#![feature(trait_alias)]
+
+pub trait SendSync = Send + Sync;
--- /dev/null
+// run-pass
+
+#![feature(trait_alias)]
+
+use std::marker::PhantomData;
+
+trait Empty {}
+trait EmptyAlias = Empty;
+trait CloneDefault = Clone + Default;
+trait SendSyncAlias = Send + Sync;
+trait WhereSendAlias = where Self: Send;
+trait SendEqAlias<T> = Send where T: PartialEq<Self>;
+trait I32Iterator = Iterator<Item = i32>;
+
+#[allow(dead_code)]
+struct Foo<T: SendSyncAlias>(PhantomData<T>);
+#[allow(dead_code)]
+struct Bar<T>(PhantomData<T>) where T: SendSyncAlias;
+
+impl EmptyAlias {}
+
+impl<T: SendSyncAlias> Empty for T {}
+
+fn a<T: CloneDefault>() -> (T, T) {
+ let one = T::default();
+ let two = one.clone();
+ (one, two)
+}
+
+fn b(x: &impl SendEqAlias<i32>) -> bool {
+ 22_i32 == *x
+}
+
+fn c<T: I32Iterator>(x: &mut T) -> Option<i32> {
+ x.next()
+}
+
+fn d<T: SendSyncAlias>() {
+ is_send_and_sync::<T>();
+}
+
+fn is_send_and_sync<T: Send + Sync>() {}
+
+fn main() {
+ let both = a::<i32>();
+ assert_eq!(both.0, 0);
+ assert_eq!(both.1, 0);
+ let both: (i32, i32) = a();
+ assert_eq!(both.0, 0);
+ assert_eq!(both.1, 0);
+
+ assert!(b(&22));
+
+ assert_eq!(c(&mut vec![22].into_iter()), Some(22));
+
+ d::<i32>();
+}
--- /dev/null
+// aux-build:trait_alias.rs
+
+#![feature(trait_alias)]
+
+extern crate trait_alias;
+
+use std::rc::Rc;
+use trait_alias::SendSync;
+
+fn use_alias<T: SendSync>() {}
+
+fn main() {
+ use_alias::<u32>();
+ use_alias::<Rc<u32>>();
+ //~^ ERROR `std::rc::Rc<u32>` cannot be sent between threads safely [E0277]
+ //~^^ ERROR `std::rc::Rc<u32>` cannot be shared between threads safely [E0277]
+}
--- /dev/null
+error[E0277]: `std::rc::Rc<u32>` cannot be sent between threads safely
+ --> $DIR/trait-alias-cross-crate.rs:14:5
+ |
+LL | use_alias::<Rc<u32>>();
+ | ^^^^^^^^^^^^^^^^^^^^ `std::rc::Rc<u32>` cannot be sent between threads safely
+ |
+ = help: the trait `std::marker::Send` is not implemented for `std::rc::Rc<u32>`
+note: required by `use_alias`
+ --> $DIR/trait-alias-cross-crate.rs:10:1
+ |
+LL | fn use_alias<T: SendSync>() {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error[E0277]: `std::rc::Rc<u32>` cannot be shared between threads safely
+ --> $DIR/trait-alias-cross-crate.rs:14:5
+ |
+LL | use_alias::<Rc<u32>>();
+ | ^^^^^^^^^^^^^^^^^^^^ `std::rc::Rc<u32>` cannot be shared between threads safely
+ |
+ = help: the trait `std::marker::Sync` is not implemented for `std::rc::Rc<u32>`
+note: required by `use_alias`
+ --> $DIR/trait-alias-cross-crate.rs:10:1
+ |
+LL | fn use_alias<T: SendSync>() {}
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 2 previous errors
+
+For more information about this error, try `rustc --explain E0277`.
--- /dev/null
+#![feature(trait_alias)]
+
+trait DefaultAlias = Default;
+
+impl DefaultAlias for () {} //~ ERROR expected trait, found trait alias
+
+fn main() {}
--- /dev/null
+error[E0404]: expected trait, found trait alias `DefaultAlias`
+ --> $DIR/trait-alias-impl.rs:5:6
+ |
+LL | impl DefaultAlias for () {}
+ | ^^^^^^^^^^^^ not a trait
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0404`.
--- /dev/null
+// compile-pass
+
+// Test that `dyn ... + ?Sized + ...` resulting from the expansion of trait aliases is okay.
+
+#![feature(trait_alias)]
+
+trait Foo {}
+
+trait S = ?Sized;
+
+// Nest a couple of levels deep:
+trait _0 = S;
+trait _1 = _0;
+
+// Straight list expansion:
+type _T0 = dyn _1 + Foo;
+
+// In second position:
+type _T1 = dyn Foo + _1;
+
+// ... and with an auto trait:
+type _T2 = dyn Foo + Send + _1;
+
+// Twice:
+trait _2 = _1 + _1;
+
+type _T3 = dyn _2 + Foo;
+
+fn main() {}
--- /dev/null
+// The purpose of this test is to demonstrate that duplicating object safe traits
+// that are not auto traits is rejected with trait aliases even though one could
+// reasonably accept this.
+
+#![feature(trait_alias)]
+
+use std::marker::Unpin;
+
+// Some arbitrary object-safe trait:
+trait Obj {}
+
+// Nest a few levels deep:
+trait _0 = Obj;
+trait _1 = _0;
+
+type _T00 = dyn _0 + _0;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+type _T01 = dyn _1 + _0;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+type _T02 = dyn _1 + _1;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+type _T03 = dyn Obj + _1;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+type _T04 = dyn _1 + Obj;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+// Nest some more and in weird ways:
+
+trait _2 = _0 + _1;
+trait _3 = Obj;
+trait _4 = _3;
+
+type _T10 = dyn _2 + _3;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+type _T11 = dyn _3 + _2;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+type _T12 = dyn Obj + _2;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+type _T13 = dyn _2 + Obj;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+type _T14 = dyn _1 + _3;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+type _T15 = dyn _3 + _1;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+type _T16 = dyn _1 + _4;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+type _T17 = dyn _4 + _1;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+// Include auto traits:
+
+trait _5 = Obj + Send;
+
+type _T20 = dyn _5 + _5;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+type _T21 = dyn Obj + _5;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+type _T22 = dyn _5 + Obj;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+type _T23 = dyn _5 + Send + Sync + Obj;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+// Also nest:
+
+trait _6 = _5 + _5; // ==> Obj + Send + Obj + Send
+
+type _T30 = dyn _6;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+type _T31 = dyn _6 + Send;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+type _T32 = dyn Send + _6;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+// Nest some more:
+
+trait _7 = _5 + Sync;
+trait _8 = Unpin + _7;
+
+type _T40 = dyn _8 + Obj;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+type _T41 = dyn Obj + _8;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+type _T42 = dyn _8 + _4;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+type _T43 = dyn _4 + _8;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+type _T44 = dyn _4 + Send + Sync + _8;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+// Take higher ranked types into account.
+
+// Note that `'a` and `'b` are intentionally different to make sure we consider
+// them semantically the same.
+trait ObjL<'l> {}
+trait _9 = for<'a> ObjL<'a>;
+trait _10 = for<'b> ObjL<'b>;
+type _T50 = _9 + _10;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+trait ObjT<T> {}
+trait _11 = ObjT<for<'a> fn(&'a u8)>;
+trait _12 = ObjT<for<'b> fn(&'b u8)>;
+type _T60 = _11 + _12;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+fn main() {}
--- /dev/null
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-duplicates.rs:16:22
+ |
+LL | trait _0 = Obj;
+ | ---
+ | |
+ | additional non-auto trait
+ | first non-auto trait
+...
+LL | type _T00 = dyn _0 + _0;
+ | -- ^^ trait alias used in trait object type (additional use)
+ | |
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-duplicates.rs:19:22
+ |
+LL | trait _0 = Obj;
+ | ---
+ | |
+ | additional non-auto trait
+ | first non-auto trait
+LL | trait _1 = _0;
+ | -- referenced here (first use)
+...
+LL | type _T01 = dyn _1 + _0;
+ | -- ^^ trait alias used in trait object type (additional use)
+ | |
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-duplicates.rs:22:22
+ |
+LL | trait _0 = Obj;
+ | ---
+ | |
+ | additional non-auto trait
+ | first non-auto trait
+LL | trait _1 = _0;
+ | --
+ | |
+ | referenced here (additional use)
+ | referenced here (first use)
+...
+LL | type _T02 = dyn _1 + _1;
+ | -- ^^ trait alias used in trait object type (additional use)
+ | |
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-duplicates.rs:25:23
+ |
+LL | trait _0 = Obj;
+ | --- additional non-auto trait
+LL | trait _1 = _0;
+ | -- referenced here (additional use)
+...
+LL | type _T03 = dyn Obj + _1;
+ | --- ^^ trait alias used in trait object type (additional use)
+ | |
+ | first non-auto trait
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-duplicates.rs:28:22
+ |
+LL | trait _0 = Obj;
+ | --- first non-auto trait
+LL | trait _1 = _0;
+ | -- referenced here (first use)
+...
+LL | type _T04 = dyn _1 + Obj;
+ | -- ^^^
+ | | |
+ | | additional non-auto trait
+ | | trait alias used in trait object type (additional use)
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-duplicates.rs:37:17
+ |
+LL | trait _0 = Obj;
+ | ---
+ | |
+ | additional non-auto trait
+ | first non-auto trait
+LL | trait _1 = _0;
+ | -- referenced here (additional use)
+...
+LL | trait _2 = _0 + _1;
+ | -- -- referenced here (additional use)
+ | |
+ | referenced here (first use)
+...
+LL | type _T10 = dyn _2 + _3;
+ | ^^
+ | |
+ | trait alias used in trait object type (additional use)
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-duplicates.rs:40:22
+ |
+LL | trait _0 = Obj;
+ | --- additional non-auto trait
+...
+LL | trait _2 = _0 + _1;
+ | -- referenced here (additional use)
+LL | trait _3 = Obj;
+ | --- first non-auto trait
+...
+LL | type _T11 = dyn _3 + _2;
+ | -- ^^ trait alias used in trait object type (additional use)
+ | |
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-duplicates.rs:43:23
+ |
+LL | trait _0 = Obj;
+ | --- additional non-auto trait
+...
+LL | trait _2 = _0 + _1;
+ | -- referenced here (additional use)
+...
+LL | type _T12 = dyn Obj + _2;
+ | --- ^^ trait alias used in trait object type (additional use)
+ | |
+ | first non-auto trait
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-duplicates.rs:46:17
+ |
+LL | trait _0 = Obj;
+ | ---
+ | |
+ | additional non-auto trait
+ | first non-auto trait
+LL | trait _1 = _0;
+ | -- referenced here (additional use)
+...
+LL | trait _2 = _0 + _1;
+ | -- -- referenced here (additional use)
+ | |
+ | referenced here (first use)
+...
+LL | type _T13 = dyn _2 + Obj;
+ | ^^
+ | |
+ | trait alias used in trait object type (additional use)
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-duplicates.rs:49:22
+ |
+LL | trait _0 = Obj;
+ | --- first non-auto trait
+LL | trait _1 = _0;
+ | -- referenced here (first use)
+...
+LL | trait _3 = Obj;
+ | --- additional non-auto trait
+...
+LL | type _T14 = dyn _1 + _3;
+ | -- ^^ trait alias used in trait object type (additional use)
+ | |
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-duplicates.rs:52:22
+ |
+LL | trait _0 = Obj;
+ | --- additional non-auto trait
+LL | trait _1 = _0;
+ | -- referenced here (additional use)
+...
+LL | trait _3 = Obj;
+ | --- first non-auto trait
+...
+LL | type _T15 = dyn _3 + _1;
+ | -- ^^ trait alias used in trait object type (additional use)
+ | |
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-duplicates.rs:55:22
+ |
+LL | trait _0 = Obj;
+ | --- first non-auto trait
+LL | trait _1 = _0;
+ | -- referenced here (first use)
+...
+LL | trait _3 = Obj;
+ | --- additional non-auto trait
+LL | trait _4 = _3;
+ | -- referenced here (additional use)
+...
+LL | type _T16 = dyn _1 + _4;
+ | -- ^^ trait alias used in trait object type (additional use)
+ | |
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-duplicates.rs:58:22
+ |
+LL | trait _0 = Obj;
+ | --- additional non-auto trait
+LL | trait _1 = _0;
+ | -- referenced here (additional use)
+...
+LL | trait _3 = Obj;
+ | --- first non-auto trait
+LL | trait _4 = _3;
+ | -- referenced here (first use)
+...
+LL | type _T17 = dyn _4 + _1;
+ | -- ^^ trait alias used in trait object type (additional use)
+ | |
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-duplicates.rs:65:22
+ |
+LL | trait _5 = Obj + Send;
+ | ---
+ | |
+ | additional non-auto trait
+ | first non-auto trait
+LL |
+LL | type _T20 = dyn _5 + _5;
+ | -- ^^ trait alias used in trait object type (additional use)
+ | |
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-duplicates.rs:68:23
+ |
+LL | trait _5 = Obj + Send;
+ | --- additional non-auto trait
+...
+LL | type _T21 = dyn Obj + _5;
+ | --- ^^ trait alias used in trait object type (additional use)
+ | |
+ | first non-auto trait
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-duplicates.rs:71:22
+ |
+LL | trait _5 = Obj + Send;
+ | --- first non-auto trait
+...
+LL | type _T22 = dyn _5 + Obj;
+ | -- ^^^
+ | | |
+ | | additional non-auto trait
+ | | trait alias used in trait object type (additional use)
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-duplicates.rs:74:36
+ |
+LL | trait _5 = Obj + Send;
+ | --- first non-auto trait
+...
+LL | type _T23 = dyn _5 + Send + Sync + Obj;
+ | -- ^^^
+ | | |
+ | | additional non-auto trait
+ | | trait alias used in trait object type (additional use)
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-duplicates.rs:81:17
+ |
+LL | trait _5 = Obj + Send;
+ | ---
+ | |
+ | additional non-auto trait
+ | first non-auto trait
+...
+LL | trait _6 = _5 + _5; // ==> Obj + Send + Obj + Send
+ | -- -- referenced here (additional use)
+ | |
+ | referenced here (first use)
+LL |
+LL | type _T30 = dyn _6;
+ | ^^
+ | |
+ | trait alias used in trait object type (additional use)
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-duplicates.rs:84:17
+ |
+LL | trait _5 = Obj + Send;
+ | ---
+ | |
+ | additional non-auto trait
+ | first non-auto trait
+...
+LL | trait _6 = _5 + _5; // ==> Obj + Send + Obj + Send
+ | -- -- referenced here (additional use)
+ | |
+ | referenced here (first use)
+...
+LL | type _T31 = dyn _6 + Send;
+ | ^^
+ | |
+ | trait alias used in trait object type (additional use)
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-duplicates.rs:87:24
+ |
+LL | trait _5 = Obj + Send;
+ | ---
+ | |
+ | additional non-auto trait
+ | first non-auto trait
+...
+LL | trait _6 = _5 + _5; // ==> Obj + Send + Obj + Send
+ | -- -- referenced here (additional use)
+ | |
+ | referenced here (first use)
+...
+LL | type _T32 = dyn Send + _6;
+ | ^^
+ | |
+ | trait alias used in trait object type (additional use)
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-duplicates.rs:95:22
+ |
+LL | trait _5 = Obj + Send;
+ | --- first non-auto trait
+...
+LL | trait _7 = _5 + Sync;
+ | -- referenced here (first use)
+LL | trait _8 = Unpin + _7;
+ | -- referenced here (first use)
+LL |
+LL | type _T40 = dyn _8 + Obj;
+ | -- ^^^
+ | | |
+ | | additional non-auto trait
+ | | trait alias used in trait object type (additional use)
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-duplicates.rs:98:23
+ |
+LL | trait _5 = Obj + Send;
+ | --- additional non-auto trait
+...
+LL | trait _7 = _5 + Sync;
+ | -- referenced here (additional use)
+LL | trait _8 = Unpin + _7;
+ | -- referenced here (additional use)
+...
+LL | type _T41 = dyn Obj + _8;
+ | --- ^^ trait alias used in trait object type (additional use)
+ | |
+ | first non-auto trait
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-duplicates.rs:101:22
+ |
+LL | trait _3 = Obj;
+ | --- additional non-auto trait
+LL | trait _4 = _3;
+ | -- referenced here (additional use)
+...
+LL | trait _5 = Obj + Send;
+ | --- first non-auto trait
+...
+LL | trait _7 = _5 + Sync;
+ | -- referenced here (first use)
+LL | trait _8 = Unpin + _7;
+ | -- referenced here (first use)
+...
+LL | type _T42 = dyn _8 + _4;
+ | -- ^^ trait alias used in trait object type (additional use)
+ | |
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-duplicates.rs:104:22
+ |
+LL | trait _3 = Obj;
+ | --- first non-auto trait
+LL | trait _4 = _3;
+ | -- referenced here (first use)
+...
+LL | trait _5 = Obj + Send;
+ | --- additional non-auto trait
+...
+LL | trait _7 = _5 + Sync;
+ | -- referenced here (additional use)
+LL | trait _8 = Unpin + _7;
+ | -- referenced here (additional use)
+...
+LL | type _T43 = dyn _4 + _8;
+ | -- ^^ trait alias used in trait object type (additional use)
+ | |
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-duplicates.rs:107:36
+ |
+LL | trait _3 = Obj;
+ | --- first non-auto trait
+LL | trait _4 = _3;
+ | -- referenced here (first use)
+...
+LL | trait _5 = Obj + Send;
+ | --- additional non-auto trait
+...
+LL | trait _7 = _5 + Sync;
+ | -- referenced here (additional use)
+LL | trait _8 = Unpin + _7;
+ | -- referenced here (additional use)
+...
+LL | type _T44 = dyn _4 + Send + Sync + _8;
+ | -- ^^ trait alias used in trait object type (additional use)
+ | |
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-duplicates.rs:117:18
+ |
+LL | trait _9 = for<'a> ObjL<'a>;
+ | ---------------- first non-auto trait
+LL | trait _10 = for<'b> ObjL<'b>;
+ | ---------------- additional non-auto trait
+LL | type _T50 = _9 + _10;
+ | -- ^^^ trait alias used in trait object type (additional use)
+ | |
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-duplicates.rs:123:19
+ |
+LL | trait _11 = ObjT<for<'a> fn(&'a u8)>;
+ | ------------------------ first non-auto trait
+LL | trait _12 = ObjT<for<'b> fn(&'b u8)>;
+ | ------------------------ additional non-auto trait
+LL | type _T60 = _11 + _12;
+ | --- ^^^ trait alias used in trait object type (additional use)
+ | |
+ | trait alias used in trait object type (first use)
+
+error: aborting due to 27 previous errors
+
+For more information about this error, try `rustc --explain E0225`.
--- /dev/null
+// The purpose of this test is to demonstrate that trait alias expansion
+// preserves the rule that `dyn Trait` may only reference one non-auto trait.
+
+#![feature(trait_alias)]
+
+use std::marker::Unpin;
+
+// Some arbitrary object-safe traits:
+trait ObjA {}
+trait ObjB {}
+
+// Nest a few levels deep:
+trait _0 = ObjA;
+trait _1 = _0;
+
+type _T00 = dyn _0 + ObjB;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+type _T01 = dyn ObjB + _0;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+type _T02 = dyn ObjB + _1;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+type _T03 = dyn _1 + ObjB;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+// Nest some more and in weird ways:
+
+trait _2 = ObjB;
+trait _3 = _2;
+trait _4 = _3;
+
+type _T10 = dyn _2 + _3;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+type _T11 = dyn _3 + _2;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+type _T12 = dyn _2 + _4;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+type _T13 = dyn _4 + _2;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+// Include auto traits:
+
+trait _5 = Sync + ObjB + Send;
+
+type _T20 = dyn _5 + _1;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+type _T21 = dyn _1 + _5;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+type _T22 = dyn _5 + ObjA;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+type _T23 = dyn ObjA + _5;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+type _T24 = dyn Send + _5 + _1 + Sync;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+type _T25 = dyn _1 + Sync + _5 + Send;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+type _T26 = dyn Sync + Send + _5 + ObjA;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+type _T27 = dyn Send + Sync + ObjA + _5;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+// Also nest:
+
+trait _6 = _1 + _5;
+trait _7 = _6;
+trait _8 = _7;
+
+type _T30 = dyn _6;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+type _T31 = dyn _6 + Send;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+type _T32 = dyn Send + _6;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+type _T33 = dyn _8;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+type _T34 = dyn _8 + Send;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+type _T35 = dyn Send + _8;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+// Nest some more:
+
+trait _9 = _5 + Sync;
+trait _10 = Unpin + _9;
+
+type _T40 = dyn _10 + ObjA;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+type _T41 = dyn ObjA + _10;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+type _T42 = dyn _10 + _1;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+type _T43 = dyn Send + _10 + Sync + ObjA;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+type _T44 = dyn ObjA + _10 + Send + Sync;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+type _T45 = dyn Sync + Send + _10 + _1;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+fn main() {}
--- /dev/null
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-extra-traits.rs:16:22
+ |
+LL | trait _0 = ObjA;
+ | ---- first non-auto trait
+...
+LL | type _T00 = dyn _0 + ObjB;
+ | -- ^^^^
+ | | |
+ | | additional non-auto trait
+ | | trait alias used in trait object type (additional use)
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-extra-traits.rs:19:24
+ |
+LL | trait _0 = ObjA;
+ | ---- additional non-auto trait
+...
+LL | type _T01 = dyn ObjB + _0;
+ | ---- ^^ trait alias used in trait object type (additional use)
+ | |
+ | first non-auto trait
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-extra-traits.rs:22:24
+ |
+LL | trait _0 = ObjA;
+ | ---- additional non-auto trait
+LL | trait _1 = _0;
+ | -- referenced here (additional use)
+...
+LL | type _T02 = dyn ObjB + _1;
+ | ---- ^^ trait alias used in trait object type (additional use)
+ | |
+ | first non-auto trait
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-extra-traits.rs:25:22
+ |
+LL | trait _0 = ObjA;
+ | ---- first non-auto trait
+LL | trait _1 = _0;
+ | -- referenced here (first use)
+...
+LL | type _T03 = dyn _1 + ObjB;
+ | -- ^^^^
+ | | |
+ | | additional non-auto trait
+ | | trait alias used in trait object type (additional use)
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-extra-traits.rs:34:22
+ |
+LL | trait _2 = ObjB;
+ | ----
+ | |
+ | additional non-auto trait
+ | first non-auto trait
+LL | trait _3 = _2;
+ | -- referenced here (additional use)
+...
+LL | type _T10 = dyn _2 + _3;
+ | -- ^^ trait alias used in trait object type (additional use)
+ | |
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-extra-traits.rs:37:22
+ |
+LL | trait _2 = ObjB;
+ | ----
+ | |
+ | additional non-auto trait
+ | first non-auto trait
+LL | trait _3 = _2;
+ | -- referenced here (first use)
+...
+LL | type _T11 = dyn _3 + _2;
+ | -- ^^ trait alias used in trait object type (additional use)
+ | |
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-extra-traits.rs:40:22
+ |
+LL | trait _2 = ObjB;
+ | ----
+ | |
+ | additional non-auto trait
+ | first non-auto trait
+LL | trait _3 = _2;
+ | -- referenced here (additional use)
+LL | trait _4 = _3;
+ | -- referenced here (additional use)
+...
+LL | type _T12 = dyn _2 + _4;
+ | -- ^^ trait alias used in trait object type (additional use)
+ | |
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-extra-traits.rs:43:22
+ |
+LL | trait _2 = ObjB;
+ | ----
+ | |
+ | additional non-auto trait
+ | first non-auto trait
+LL | trait _3 = _2;
+ | -- referenced here (first use)
+LL | trait _4 = _3;
+ | -- referenced here (first use)
+...
+LL | type _T13 = dyn _4 + _2;
+ | -- ^^ trait alias used in trait object type (additional use)
+ | |
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-extra-traits.rs:50:22
+ |
+LL | trait _0 = ObjA;
+ | ---- additional non-auto trait
+LL | trait _1 = _0;
+ | -- referenced here (additional use)
+...
+LL | trait _5 = Sync + ObjB + Send;
+ | ---- first non-auto trait
+LL |
+LL | type _T20 = dyn _5 + _1;
+ | -- ^^ trait alias used in trait object type (additional use)
+ | |
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-extra-traits.rs:53:22
+ |
+LL | trait _0 = ObjA;
+ | ---- first non-auto trait
+LL | trait _1 = _0;
+ | -- referenced here (first use)
+...
+LL | trait _5 = Sync + ObjB + Send;
+ | ---- additional non-auto trait
+...
+LL | type _T21 = dyn _1 + _5;
+ | -- ^^ trait alias used in trait object type (additional use)
+ | |
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-extra-traits.rs:56:22
+ |
+LL | trait _5 = Sync + ObjB + Send;
+ | ---- first non-auto trait
+...
+LL | type _T22 = dyn _5 + ObjA;
+ | -- ^^^^
+ | | |
+ | | additional non-auto trait
+ | | trait alias used in trait object type (additional use)
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-extra-traits.rs:59:24
+ |
+LL | trait _5 = Sync + ObjB + Send;
+ | ---- additional non-auto trait
+...
+LL | type _T23 = dyn ObjA + _5;
+ | ---- ^^ trait alias used in trait object type (additional use)
+ | |
+ | first non-auto trait
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-extra-traits.rs:62:29
+ |
+LL | trait _0 = ObjA;
+ | ---- additional non-auto trait
+LL | trait _1 = _0;
+ | -- referenced here (additional use)
+...
+LL | trait _5 = Sync + ObjB + Send;
+ | ---- first non-auto trait
+...
+LL | type _T24 = dyn Send + _5 + _1 + Sync;
+ | -- ^^ trait alias used in trait object type (additional use)
+ | |
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-extra-traits.rs:65:29
+ |
+LL | trait _0 = ObjA;
+ | ---- first non-auto trait
+LL | trait _1 = _0;
+ | -- referenced here (first use)
+...
+LL | trait _5 = Sync + ObjB + Send;
+ | ---- additional non-auto trait
+...
+LL | type _T25 = dyn _1 + Sync + _5 + Send;
+ | -- ^^ trait alias used in trait object type (additional use)
+ | |
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-extra-traits.rs:68:36
+ |
+LL | trait _5 = Sync + ObjB + Send;
+ | ---- first non-auto trait
+...
+LL | type _T26 = dyn Sync + Send + _5 + ObjA;
+ | -- ^^^^
+ | | |
+ | | additional non-auto trait
+ | | trait alias used in trait object type (additional use)
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-extra-traits.rs:71:38
+ |
+LL | trait _5 = Sync + ObjB + Send;
+ | ---- additional non-auto trait
+...
+LL | type _T27 = dyn Send + Sync + ObjA + _5;
+ | ---- ^^ trait alias used in trait object type (additional use)
+ | |
+ | first non-auto trait
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-extra-traits.rs:80:17
+ |
+LL | trait _0 = ObjA;
+ | ---- first non-auto trait
+LL | trait _1 = _0;
+ | -- referenced here (first use)
+...
+LL | trait _5 = Sync + ObjB + Send;
+ | ---- additional non-auto trait
+...
+LL | trait _6 = _1 + _5;
+ | -- -- referenced here (additional use)
+ | |
+ | referenced here (first use)
+...
+LL | type _T30 = dyn _6;
+ | ^^
+ | |
+ | trait alias used in trait object type (additional use)
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-extra-traits.rs:83:17
+ |
+LL | trait _0 = ObjA;
+ | ---- first non-auto trait
+LL | trait _1 = _0;
+ | -- referenced here (first use)
+...
+LL | trait _5 = Sync + ObjB + Send;
+ | ---- additional non-auto trait
+...
+LL | trait _6 = _1 + _5;
+ | -- -- referenced here (additional use)
+ | |
+ | referenced here (first use)
+...
+LL | type _T31 = dyn _6 + Send;
+ | ^^
+ | |
+ | trait alias used in trait object type (additional use)
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-extra-traits.rs:86:24
+ |
+LL | trait _0 = ObjA;
+ | ---- first non-auto trait
+LL | trait _1 = _0;
+ | -- referenced here (first use)
+...
+LL | trait _5 = Sync + ObjB + Send;
+ | ---- additional non-auto trait
+...
+LL | trait _6 = _1 + _5;
+ | -- -- referenced here (additional use)
+ | |
+ | referenced here (first use)
+...
+LL | type _T32 = dyn Send + _6;
+ | ^^
+ | |
+ | trait alias used in trait object type (additional use)
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-extra-traits.rs:89:17
+ |
+LL | trait _0 = ObjA;
+ | ---- first non-auto trait
+LL | trait _1 = _0;
+ | -- referenced here (first use)
+...
+LL | trait _5 = Sync + ObjB + Send;
+ | ---- additional non-auto trait
+...
+LL | trait _6 = _1 + _5;
+ | -- -- referenced here (additional use)
+ | |
+ | referenced here (first use)
+LL | trait _7 = _6;
+ | --
+ | |
+ | referenced here (additional use)
+ | referenced here (first use)
+LL | trait _8 = _7;
+ | --
+ | |
+ | referenced here (additional use)
+ | referenced here (first use)
+...
+LL | type _T33 = dyn _8;
+ | ^^
+ | |
+ | trait alias used in trait object type (additional use)
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-extra-traits.rs:92:17
+ |
+LL | trait _0 = ObjA;
+ | ---- first non-auto trait
+LL | trait _1 = _0;
+ | -- referenced here (first use)
+...
+LL | trait _5 = Sync + ObjB + Send;
+ | ---- additional non-auto trait
+...
+LL | trait _6 = _1 + _5;
+ | -- -- referenced here (additional use)
+ | |
+ | referenced here (first use)
+LL | trait _7 = _6;
+ | --
+ | |
+ | referenced here (additional use)
+ | referenced here (first use)
+LL | trait _8 = _7;
+ | --
+ | |
+ | referenced here (additional use)
+ | referenced here (first use)
+...
+LL | type _T34 = dyn _8 + Send;
+ | ^^
+ | |
+ | trait alias used in trait object type (additional use)
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-extra-traits.rs:95:24
+ |
+LL | trait _0 = ObjA;
+ | ---- first non-auto trait
+LL | trait _1 = _0;
+ | -- referenced here (first use)
+...
+LL | trait _5 = Sync + ObjB + Send;
+ | ---- additional non-auto trait
+...
+LL | trait _6 = _1 + _5;
+ | -- -- referenced here (additional use)
+ | |
+ | referenced here (first use)
+LL | trait _7 = _6;
+ | --
+ | |
+ | referenced here (additional use)
+ | referenced here (first use)
+LL | trait _8 = _7;
+ | --
+ | |
+ | referenced here (additional use)
+ | referenced here (first use)
+...
+LL | type _T35 = dyn Send + _8;
+ | ^^
+ | |
+ | trait alias used in trait object type (additional use)
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-extra-traits.rs:103:23
+ |
+LL | trait _5 = Sync + ObjB + Send;
+ | ---- first non-auto trait
+...
+LL | trait _9 = _5 + Sync;
+ | -- referenced here (first use)
+LL | trait _10 = Unpin + _9;
+ | -- referenced here (first use)
+LL |
+LL | type _T40 = dyn _10 + ObjA;
+ | --- ^^^^
+ | | |
+ | | additional non-auto trait
+ | | trait alias used in trait object type (additional use)
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-extra-traits.rs:106:24
+ |
+LL | trait _5 = Sync + ObjB + Send;
+ | ---- additional non-auto trait
+...
+LL | trait _9 = _5 + Sync;
+ | -- referenced here (additional use)
+LL | trait _10 = Unpin + _9;
+ | -- referenced here (additional use)
+...
+LL | type _T41 = dyn ObjA + _10;
+ | ---- ^^^ trait alias used in trait object type (additional use)
+ | |
+ | first non-auto trait
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-extra-traits.rs:109:23
+ |
+LL | trait _0 = ObjA;
+ | ---- additional non-auto trait
+LL | trait _1 = _0;
+ | -- referenced here (additional use)
+...
+LL | trait _5 = Sync + ObjB + Send;
+ | ---- first non-auto trait
+...
+LL | trait _9 = _5 + Sync;
+ | -- referenced here (first use)
+LL | trait _10 = Unpin + _9;
+ | -- referenced here (first use)
+...
+LL | type _T42 = dyn _10 + _1;
+ | --- ^^ trait alias used in trait object type (additional use)
+ | |
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-extra-traits.rs:112:37
+ |
+LL | trait _5 = Sync + ObjB + Send;
+ | ---- first non-auto trait
+...
+LL | trait _9 = _5 + Sync;
+ | -- referenced here (first use)
+LL | trait _10 = Unpin + _9;
+ | -- referenced here (first use)
+...
+LL | type _T43 = dyn Send + _10 + Sync + ObjA;
+ | --- ^^^^
+ | | |
+ | | additional non-auto trait
+ | | trait alias used in trait object type (additional use)
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-extra-traits.rs:115:24
+ |
+LL | trait _5 = Sync + ObjB + Send;
+ | ---- additional non-auto trait
+...
+LL | trait _9 = _5 + Sync;
+ | -- referenced here (additional use)
+LL | trait _10 = Unpin + _9;
+ | -- referenced here (additional use)
+...
+LL | type _T44 = dyn ObjA + _10 + Send + Sync;
+ | ---- ^^^ trait alias used in trait object type (additional use)
+ | |
+ | first non-auto trait
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/trait-alias-no-extra-traits.rs:118:37
+ |
+LL | trait _0 = ObjA;
+ | ---- additional non-auto trait
+LL | trait _1 = _0;
+ | -- referenced here (additional use)
+...
+LL | trait _5 = Sync + ObjB + Send;
+ | ---- first non-auto trait
+...
+LL | trait _9 = _5 + Sync;
+ | -- referenced here (first use)
+LL | trait _10 = Unpin + _9;
+ | -- referenced here (first use)
+...
+LL | type _T45 = dyn Sync + Send + _10 + _1;
+ | --- ^^ trait alias used in trait object type (additional use)
+ | |
+ | trait alias used in trait object type (first use)
+
+error: aborting due to 28 previous errors
+
+For more information about this error, try `rustc --explain E0225`.
--- /dev/null
+#![feature(trait_alias)]
+
+trait EqAlias = Eq;
+trait IteratorAlias = Iterator;
+
+fn main() {
+ let _: &dyn EqAlias = &123;
+ //~^ ERROR the trait `std::cmp::Eq` cannot be made into an object [E0038]
+ let _: &dyn IteratorAlias = &vec![123].into_iter();
+ //~^ ERROR must be specified
+}
--- /dev/null
+error[E0038]: the trait `std::cmp::Eq` cannot be made into an object
+ --> $DIR/trait-alias-object-fail.rs:7:13
+ |
+LL | let _: &dyn EqAlias = &123;
+ | ^^^^^^^^^^^ the trait `std::cmp::Eq` cannot be made into an object
+ |
+ = note: the trait cannot use `Self` as a type parameter in the supertraits or where-clauses
+
+error[E0191]: the value of the associated type `Item` (from the trait `std::iter::Iterator`) must be specified
+ --> $DIR/trait-alias-object-fail.rs:9:13
+ |
+LL | let _: &dyn IteratorAlias = &vec![123].into_iter();
+ | ^^^^^^^^^^^^^^^^^ associated type `Item` must be specified
+
+error: aborting due to 2 previous errors
+
+Some errors have detailed explanations: E0038, E0191.
+For more information about an error, try `rustc --explain E0038`.
--- /dev/null
+// run-pass
+
+// This test checks that trait objects involving trait aliases are well-formed.
+
+#![feature(trait_alias)]
+
+trait Obj {}
+
+trait _0 = Send + Sync;
+
+// Just auto traits:
+
+trait _1 = _0 + Send + Sync;
+
+use std::marker::Unpin;
+
+fn _f0() {
+ let _: Box<dyn _0>;
+ let _: Box<dyn _1>;
+ let _: Box<dyn Unpin + _1 + Send + Sync>;
+}
+
+// Include object safe traits:
+
+fn _f1() {
+ let _: Box<dyn Obj + _0>;
+ let _: Box<dyn Obj + _1>;
+ let _: Box<dyn Obj + _1 + _0>;
+}
+
+// And when the object safe trait is in a trait alias:
+
+trait _2 = Obj;
+
+fn _f2() {
+ let _: Box<dyn _2 + _0>;
+ let _: Box<dyn _2 + _1>;
+ let _: Box<dyn _2 + _1 + _0>;
+}
+
+// And it should also work when that trait is has auto traits to the right of it.
+
+trait _3 = Obj + Unpin;
+
+fn _f3() {
+ let _: Box<dyn _3 + _0>;
+ let _: Box<dyn _3 + _1>;
+ let _: Box<dyn _3 + _1 + _0>;
+}
+
+// Nest the trait deeply:
+
+trait _4 = _3;
+trait _5 = _4 + Sync + _0 + Send;
+trait _6 = _5 + Send + _1 + Sync;
+
+fn _f4() {
+ let _: Box<dyn _6 + _0>;
+ let _: Box<dyn _6 + _1>;
+ let _: Box<dyn _6 + _1 + _0>;
+}
+
+// Just nest the trait alone:
+
+trait _7 = _2;
+trait _8 = _7;
+trait _9 = _8;
+
+fn _f5() {
+ let _: Box<dyn _9>;
+}
+
+// First bound is auto trait:
+
+trait _10 = Send + Obj;
+trait _11 = Obj + Send;
+trait _12 = Sync + _11;
+trait _13 = Send + _12;
+
+fn f6() {
+ let _: Box<dyn _10>;
+ let _: Box<dyn _13>;
+}
+
+fn main() {}
--- /dev/null
+// run-pass
+
+#![feature(trait_alias)]
+
+trait Foo = PartialEq<i32> + Send;
+trait Bar = Foo + Sync;
+
+trait I32Iterator = Iterator<Item = i32>;
+
+pub fn main() {
+ let a: &dyn Bar = &123;
+ assert!(*a == 123);
+ let b = Box::new(456) as Box<dyn Foo>;
+ assert!(*b == 456);
+
+ let c: &mut dyn I32Iterator = &mut vec![123].into_iter();
+ assert_eq!(c.next(), Some(123));
+}
--- /dev/null
+// Test that `dyn ?Sized` (i.e., a trait object with only a maybe buond) is not allowed, when just
+// `?Sized` results from trait alias expansion.
+
+#![feature(trait_alias)]
+
+trait S = ?Sized;
+
+// Nest a couple of levels deep:
+trait _0 = S;
+trait _1 = _0;
+
+// Straight list expansion:
+type _T0 = dyn _1;
+//~^ ERROR at least one non-builtin trait is required for an object type [E0224]
+
+// Twice:
+trait _2 = _1 + _1;
+
+type _T1 = dyn _2;
+//~^ ERROR at least one non-builtin trait is required for an object type [E0224]
+
+fn main() {}
--- /dev/null
+error[E0224]: at least one non-builtin trait is required for an object type
+ --> $DIR/trait-alias-only-maybe-bound.rs:13:12
+ |
+LL | type _T0 = dyn _1;
+ | ^^^^^^
+
+error[E0224]: at least one non-builtin trait is required for an object type
+ --> $DIR/trait-alias-only-maybe-bound.rs:19:12
+ |
+LL | type _T1 = dyn _2;
+ | ^^^^^^
+
+error: aborting due to 2 previous errors
+
--- /dev/null
+#![feature(trait_alias)]
+
+trait Foo {}
+auto trait A = Foo; //~ ERROR trait aliases cannot be `auto`
+unsafe trait B = Foo; //~ ERROR trait aliases cannot be `unsafe`
+
+fn main() {}
--- /dev/null
+error: trait aliases cannot be `auto`
+ --> $DIR/trait-alias-syntax-fail.rs:4:19
+ |
+LL | auto trait A = Foo;
+ | ^ trait aliases cannot be `auto`
+
+error: trait aliases cannot be `unsafe`
+ --> $DIR/trait-alias-syntax-fail.rs:5:21
+ |
+LL | unsafe trait B = Foo;
+ | ^ trait aliases cannot be `unsafe`
+
+error: aborting due to 2 previous errors
+
--- /dev/null
+// run-pass
+
+#![feature(trait_alias)]
+
+trait SimpleAlias = Default;
+trait GenericAlias<T> = Iterator<Item = T>;
+trait Partial<T> = IntoIterator<Item = T>;
+trait SpecificAlias = GenericAlias<i32>;
+trait PartialEqRef<'a, T: 'a> = PartialEq<&'a T>;
+trait StaticAlias = 'static;
+
+trait Things<T> {}
+trait Romeo {}
+#[allow(dead_code)]
+struct The<T>(T);
+#[allow(dead_code)]
+struct Fore<T>(T);
+impl<T, U> Things<T> for The<U> {}
+impl<T> Romeo for Fore<T> {}
+
+trait WithWhere<Art, Thou> = Romeo + Romeo where Fore<(Art, Thou)>: Romeo;
+trait BareWhere<Wild, Are> = where The<Wild>: Things<Are>;
+
+fn main() {}
--- /dev/null
+#![feature(trait_alias)]
+
+trait Foo {}
+trait A<T: Foo> {}
+trait B<T> = A<T>; //~ ERROR `T: Foo` is not satisfied
+
+fn main() {}
--- /dev/null
+error[E0277]: the trait bound `T: Foo` is not satisfied
+ --> $DIR/trait-alias-wf.rs:5:1
+ |
+LL | trait B<T> = A<T>;
+ | ^^^^^^^^^^^^^^^^^^ the trait `Foo` is not implemented for `T`
+ |
+ = help: consider adding a `where T: Foo` bound
+note: required by `A`
+ --> $DIR/trait-alias-wf.rs:4:1
+ |
+LL | trait A<T: Foo> {}
+ | ^^^^^^^^^^^^^^^
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0277`.
--- /dev/null
+// run-pass
+
+#![feature(trait_alias)]
+
+pub trait Foo {}
+pub trait FooAlias = Foo;
+
+fn main() {}
--- /dev/null
+// run-pass
+
+trait Foo<'a> {
+ fn xyz(self);
+}
+impl<'a, T> Foo<'a> for T where 'static: 'a {
+ fn xyz(self) {}
+}
+
+trait Bar {
+ fn uvw(self);
+}
+impl<T> Bar for T where for<'a> T: Foo<'a> {
+ fn uvw(self) { self.xyz(); }
+}
+
+fn foo<T>(t: T) where T: Bar {
+ t.uvw();
+}
+
+fn main() {
+ foo(0);
+}
--- /dev/null
+// compile-fail
+
+// Test that `dyn ... + ?Sized + ...` is okay (though `?Sized` has no effect in trait objects).
+
+trait Foo {}
+
+type _0 = dyn ?Sized + Foo;
+//~^ ERROR `?Trait` is not permitted in trait object types
+
+type _1 = dyn Foo + ?Sized;
+//~^ ERROR `?Trait` is not permitted in trait object types
+
+type _2 = dyn Foo + ?Sized + ?Sized;
+//~^ ERROR `?Trait` is not permitted in trait object types
+//~| ERROR `?Trait` is not permitted in trait object types
+
+type _3 = dyn ?Sized + Foo;
+//~^ ERROR `?Trait` is not permitted in trait object types
+
+fn main() {}
--- /dev/null
+error: `?Trait` is not permitted in trait object types
+ --> $DIR/wf-trait-object-maybe-bound.rs:7:15
+ |
+LL | type _0 = dyn ?Sized + Foo;
+ | ^^^^^^
+
+error: `?Trait` is not permitted in trait object types
+ --> $DIR/wf-trait-object-maybe-bound.rs:10:21
+ |
+LL | type _1 = dyn Foo + ?Sized;
+ | ^^^^^^
+
+error: `?Trait` is not permitted in trait object types
+ --> $DIR/wf-trait-object-maybe-bound.rs:13:21
+ |
+LL | type _2 = dyn Foo + ?Sized + ?Sized;
+ | ^^^^^^
+
+error: `?Trait` is not permitted in trait object types
+ --> $DIR/wf-trait-object-maybe-bound.rs:13:30
+ |
+LL | type _2 = dyn Foo + ?Sized + ?Sized;
+ | ^^^^^^
+
+error: `?Trait` is not permitted in trait object types
+ --> $DIR/wf-trait-object-maybe-bound.rs:17:15
+ |
+LL | type _3 = dyn ?Sized + Foo;
+ | ^^^^^^
+
+error: aborting due to 5 previous errors
+
--- /dev/null
+// The purpose of this test is to demonstrate that duplicating object safe traits
+// that are not auto-traits is rejected even though one could reasonably accept this.
+
+// Some arbitrary object-safe trait:
+trait Obj {}
+
+// Demonstrate that recursive expansion of trait aliases doesn't affect stable behavior:
+type _0 = dyn Obj + Obj;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+// Some variations:
+
+type _1 = dyn Send + Obj + Obj;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+type _2 = dyn Obj + Send + Obj;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+type _3 = dyn Obj + Send + Send; // But it is OK to duplicate auto traits.
+
+// Take higher ranked types into account.
+
+// Note that `'a` and `'b` are intentionally different to make sure we consider
+// them semantically the same.
+trait ObjL<'l> {}
+type _4 = dyn for<'a> ObjL<'a> + for<'b> ObjL<'b>;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+trait ObjT<T> {}
+type _5 = dyn ObjT<for<'a> fn(&'a u8)> + ObjT<for<'b> fn(&'b u8)>;
+//~^ ERROR only auto traits can be used as additional traits in a trait object [E0225]
+
+fn main() {}
--- /dev/null
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/wf-trait-object-no-duplicates.rs:8:21
+ |
+LL | type _0 = dyn Obj + Obj;
+ | --- ^^^
+ | | |
+ | | additional non-auto trait
+ | | trait alias used in trait object type (additional use)
+ | first non-auto trait
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/wf-trait-object-no-duplicates.rs:13:28
+ |
+LL | type _1 = dyn Send + Obj + Obj;
+ | --- ^^^
+ | | |
+ | | additional non-auto trait
+ | | trait alias used in trait object type (additional use)
+ | first non-auto trait
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/wf-trait-object-no-duplicates.rs:16:28
+ |
+LL | type _2 = dyn Obj + Send + Obj;
+ | --- ^^^
+ | | |
+ | | additional non-auto trait
+ | | trait alias used in trait object type (additional use)
+ | first non-auto trait
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/wf-trait-object-no-duplicates.rs:26:34
+ |
+LL | type _4 = dyn for<'a> ObjL<'a> + for<'b> ObjL<'b>;
+ | ---------------- ^^^^^^^^^^^^^^^^
+ | | |
+ | | additional non-auto trait
+ | | trait alias used in trait object type (additional use)
+ | first non-auto trait
+ | trait alias used in trait object type (first use)
+
+error[E0225]: only auto traits can be used as additional traits in a trait object
+ --> $DIR/wf-trait-object-no-duplicates.rs:30:42
+ |
+LL | type _5 = dyn ObjT<for<'a> fn(&'a u8)> + ObjT<for<'b> fn(&'b u8)>;
+ | ------------------------ ^^^^^^^^^^^^^^^^^^^^^^^^
+ | | |
+ | | additional non-auto trait
+ | | trait alias used in trait object type (additional use)
+ | first non-auto trait
+ | trait alias used in trait object type (first use)
+
+error: aborting due to 5 previous errors
+
+For more information about this error, try `rustc --explain E0225`.
--- /dev/null
+// Test that `dyn ?Sized` (i.e., a trait object with only a maybe buond) is not allowed.
+
+type _0 = dyn ?Sized;
+//~^ ERROR at least one non-builtin trait is required for an object type [E0224]
+//~| ERROR ?Trait` is not permitted in trait object types
+
+fn main() {}
--- /dev/null
+error: `?Trait` is not permitted in trait object types
+ --> $DIR/wf-trait-object-only-maybe-bound.rs:3:15
+ |
+LL | type _0 = dyn ?Sized;
+ | ^^^^^^
+
+error[E0224]: at least one non-builtin trait is required for an object type
+ --> $DIR/wf-trait-object-only-maybe-bound.rs:3:11
+ |
+LL | type _0 = dyn ?Sized;
+ | ^^^^^^^^^^
+
+error: aborting due to 2 previous errors
+
--- /dev/null
+// run-pass
+
+// Ensure that `dyn $($AutoTrait)+ ObjSafe` is well-formed.
+
+use std::marker::Unpin;
+
+// Some arbitrary object-safe trait:
+trait Obj {}
+
+type _0 = dyn Unpin;
+type _1 = dyn Send + Obj;
+type _2 = dyn Send + Unpin + Obj;
+type _3 = dyn Send + Unpin + Sync + Obj;
+
+fn main() {}
-Subproject commit 60a609acaed3bf2b3ec6ab995bccf0f03bc26060
+Subproject commit a8eeb7cdb135da1cd582c6093c1739732727a4a2
*slot.borrow_mut() = Some((None, String::from("https://play.rust-lang.org/")));
});
let (format, dst) = parse_args();
- let result = syntax::with_globals(move || {
+ let result = syntax::with_default_globals(move || {
main_with_result(format, &dst)
});
if let Err(e) = result {