"rustc_errors 0.0.0",
"rustc_target 0.0.0",
"serialize 0.0.0",
+ "smallvec 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)",
"stable_deref_trait 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"syntax 0.0.0",
"syntax_ext 0.0.0",
}
}
+ #[inline]
+ unsafe fn write_from_iter<T, I: Iterator<Item = T>>(
+ &self,
+ mut iter: I,
+ len: usize,
+ mem: *mut T,
+ ) -> &mut [T] {
+ let mut i = 0;
+ // Use a manual loop since LLVM manages to optimize it better for
+ // slice iterators
+ loop {
+ let value = iter.next();
+ if i >= len || value.is_none() {
+ // We only return as many items as the iterator gave us, even
+ // though it was supposed to give us `len`
+ return slice::from_raw_parts_mut(mem, i);
+ }
+ ptr::write(mem.offset(i as isize), value.unwrap());
+ i += 1;
+ }
+ }
+
#[inline]
pub fn alloc_from_iter<T, I: IntoIterator<Item = T>>(&self, iter: I) -> &mut [T] {
- let mut iter = iter.into_iter();
+ let iter = iter.into_iter();
assert!(mem::size_of::<T>() != 0);
assert!(!mem::needs_drop::<T>());
let size = len.checked_mul(mem::size_of::<T>()).unwrap();
let mem = self.alloc_raw(size, mem::align_of::<T>()) as *mut _ as *mut T;
unsafe {
- for i in 0..len {
- ptr::write(mem.offset(i as isize), iter.next().unwrap())
- }
- slice::from_raw_parts_mut(mem, len)
+ self.write_from_iter(iter, len, mem)
}
}
(_, _) => {
use std::marker::PhantomData;
use smallvec::SmallVec;
+/// This declares a list of types which can be allocated by `Arena`.
+///
+/// The `few` modifier will cause allocation to use the shared arena and recording the destructor.
+/// This is faster and more memory efficient if there's only a few allocations of the type.
+/// Leaving `few` out will cause the type to get its own dedicated `TypedArena` which is
+/// faster and more memory efficient if there is lots of allocations.
+///
+/// Specifying the `decode` modifier will add decode impls for &T and &[T] where T is the type
+/// listed. These impls will appear in the implement_ty_decoder! macro.
#[macro_export]
macro_rules! arena_types {
($macro:path, $args:tt, $tcx:lifetime) => (
rustc::hir::def_id::DefId,
rustc::ty::subst::SubstsRef<$tcx>
)>,
- [few] mir_keys: rustc::util::nodemap::DefIdSet,
+ [few, decode] mir_keys: rustc::util::nodemap::DefIdSet,
[decode] specialization_graph: rustc::traits::specialization_graph::Graph,
[] region_scope_tree: rustc::middle::region::ScopeTree,
[] item_local_set: rustc::util::nodemap::ItemLocalSet,
rustc::infer::canonical::Canonical<'tcx,
rustc::infer::canonical::QueryResponse<'tcx, rustc::ty::Ty<'tcx>>
>,
+ [few] crate_inherent_impls: rustc::ty::CrateInherentImpls,
+ [decode] borrowck: rustc::middle::borrowck::BorrowCheckResult,
+ [few] upstream_monomorphizations:
+ rustc::util::nodemap::DefIdMap<
+ rustc_data_structures::fx::FxHashMap<
+ rustc::ty::subst::SubstsRef<'tcx>,
+ rustc::hir::def_id::CrateNum
+ >
+ >,
+ [few] resolve_lifetimes: rustc::middle::resolve_lifetime::ResolveLifetimes,
+ [decode] generic_predicates: rustc::ty::GenericPredicates<'tcx>,
+ [few] lint_levels: rustc::lint::LintLevelMap,
+ [few] stability_index: rustc::middle::stability::Index<'tcx>,
+ [few] features: syntax::feature_gate::Features,
+ [few] all_traits: Vec<rustc::hir::def_id::DefId>,
+ [few] privacy_access_levels: rustc::middle::privacy::AccessLevels,
+ [few] target_features_whitelist: rustc_data_structures::fx::FxHashMap<
+ String,
+ Option<syntax::symbol::Symbol>
+ >,
+ [few] wasm_import_module_map: rustc_data_structures::fx::FxHashMap<
+ rustc::hir::def_id::DefId,
+ String
+ >,
+ [few] get_lib_features: rustc::middle::lib_features::LibFeatures,
+ [few] defined_lib_features: rustc::middle::lang_items::LanguageItems,
+ [few] visible_parent_map: rustc::util::nodemap::DefIdMap<rustc::hir::def_id::DefId>,
+ [few] foreign_module: rustc::middle::cstore::ForeignModule,
+ [few] foreign_modules: Vec<rustc::middle::cstore::ForeignModule>,
+ [few] reachable_non_generics: rustc::util::nodemap::DefIdMap<
+ rustc::middle::exported_symbols::SymbolExportLevel
+ >,
+ [few] crate_variances: rustc::ty::CrateVariancesMap<'tcx>,
+ [few] inferred_outlives_crate: rustc::ty::CratePredicatesMap<'tcx>,
], $tcx);
)
}
impl<T: Copy> ArenaAllocatable for T {}
-pub unsafe trait ArenaField<'tcx>: Sized {
+unsafe trait ArenaField<'tcx>: Sized {
/// Returns a specific arena to allocate from.
/// If None is returned, the DropArena will be used.
fn arena<'a>(arena: &'a Arena<'tcx>) -> Option<&'a TypedArena<Self>>;
fn print_literal(&mut self, lit: &hir::Lit) -> io::Result<()> {
self.maybe_print_comment(lit.span.lo())?;
- let (token, suffix) = lit.node.to_lit_token();
- self.writer().word(pprust::literal_to_string(token, suffix))
+ self.writer().word(pprust::literal_to_string(lit.node.to_lit_token()))
}
pub fn print_expr(&mut self, expr: &hir::Expr) -> io::Result<()> {
impl_stable_hash_for!(struct ::syntax::ast::Lit {
node,
token,
- suffix,
span
});
}
}
-impl_stable_hash_for!(enum token::Lit {
- Bool(val),
- Byte(val),
- Char(val),
- Err(val),
- Integer(val),
- Float(val),
- Str_(val),
- ByteStr(val),
- StrRaw(val, n),
- ByteStrRaw(val, n)
+impl_stable_hash_for!(enum token::LitKind {
+ Bool,
+ Byte,
+ Char,
+ Integer,
+ Float,
+ Str,
+ ByteStr,
+ StrRaw(n),
+ ByteStrRaw(n),
+ Err
+});
+
+impl_stable_hash_for!(struct token::Lit {
+ kind,
+ symbol,
+ suffix
});
fn hash_token<'a, 'gcx, W: StableHasherResult>(
token::Token::CloseDelim(delim_token) => {
std_hash::Hash::hash(&delim_token, hasher);
}
- token::Token::Literal(lit, opt_name) => {
- lit.hash_stable(hcx, hasher);
- opt_name.hash_stable(hcx, hasher);
- }
+ token::Token::Literal(lit) => lit.hash_stable(hcx, hasher),
token::Token::Ident(ident, is_raw) => {
ident.name.hash_stable(hcx, hasher);
pub use self::Level::*;
pub use self::LintSource::*;
-use rustc_data_structures::sync::{self, Lrc};
+use rustc_data_structures::sync;
use crate::hir::def_id::{CrateNum, LOCAL_CRATE};
use crate::hir::intravisit;
}
fn lint_levels<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, cnum: CrateNum)
- -> Lrc<LintLevelMap>
+ -> &'tcx LintLevelMap
{
assert_eq!(cnum, LOCAL_CRATE);
let mut builder = LintLevelMapBuilder {
intravisit::walk_crate(&mut builder, krate);
builder.levels.pop(push);
- Lrc::new(builder.levels.build_map())
+ tcx.arena.alloc(builder.levels.build_map())
}
struct LintLevelMapBuilder<'a, 'tcx: 'a> {
Some((cnum, path))
})
.collect::<Vec<_>>();
- let mut ordering = tcx.postorder_cnums(LOCAL_CRATE);
- Lrc::make_mut(&mut ordering).reverse();
+ let mut ordering = tcx.postorder_cnums(LOCAL_CRATE).to_owned();
+ ordering.reverse();
libs.sort_by_cached_key(|&(a, _)| {
ordering.iter().position(|x| *x == a)
});
use crate::session::Session;
use crate::util::nodemap::{DefIdMap, FxHashMap, FxHashSet, HirIdMap, HirIdSet};
use errors::{Applicability, DiagnosticBuilder};
-use rustc_data_structures::sync::Lrc;
use rustc_macros::HashStable;
use std::borrow::Cow;
use std::cell::Cell;
/// See [`NamedRegionMap`].
#[derive(Default)]
pub struct ResolveLifetimes {
- defs: FxHashMap<LocalDefId, Lrc<FxHashMap<ItemLocalId, Region>>>,
- late_bound: FxHashMap<LocalDefId, Lrc<FxHashSet<ItemLocalId>>>,
+ defs: FxHashMap<LocalDefId, FxHashMap<ItemLocalId, Region>>,
+ late_bound: FxHashMap<LocalDefId, FxHashSet<ItemLocalId>>,
object_lifetime_defaults:
- FxHashMap<LocalDefId, Lrc<FxHashMap<ItemLocalId, Lrc<Vec<ObjectLifetimeDefault>>>>>,
+ FxHashMap<LocalDefId, FxHashMap<ItemLocalId, Vec<ObjectLifetimeDefault>>>,
}
impl_stable_hash_for!(struct crate::middle::resolve_lifetime::ResolveLifetimes {
named_region_map: |tcx, id| {
let id = LocalDefId::from_def_id(DefId::local(id)); // (*)
- tcx.resolve_lifetimes(LOCAL_CRATE).defs.get(&id).cloned()
+ tcx.resolve_lifetimes(LOCAL_CRATE).defs.get(&id)
},
is_late_bound_map: |tcx, id| {
tcx.resolve_lifetimes(LOCAL_CRATE)
.late_bound
.get(&id)
- .cloned()
},
object_lifetime_defaults_map: |tcx, id| {
tcx.resolve_lifetimes(LOCAL_CRATE)
.object_lifetime_defaults
.get(&id)
- .cloned()
},
..*providers
fn resolve_lifetimes<'tcx>(
tcx: TyCtxt<'_, 'tcx, 'tcx>,
for_krate: CrateNum,
-) -> Lrc<ResolveLifetimes> {
+) -> &'tcx ResolveLifetimes {
assert_eq!(for_krate, LOCAL_CRATE);
let named_region_map = krate(tcx);
for (hir_id, v) in named_region_map.defs {
let map = rl.defs.entry(hir_id.owner_local_def_id()).or_default();
- Lrc::get_mut(map).unwrap().insert(hir_id.local_id, v);
+ map.insert(hir_id.local_id, v);
}
for hir_id in named_region_map.late_bound {
let map = rl.late_bound
.entry(hir_id.owner_local_def_id())
.or_default();
- Lrc::get_mut(map).unwrap().insert(hir_id.local_id);
+ map.insert(hir_id.local_id);
}
for (hir_id, v) in named_region_map.object_lifetime_defaults {
let map = rl.object_lifetime_defaults
.entry(hir_id.owner_local_def_id())
.or_default();
- Lrc::get_mut(map)
- .unwrap()
- .insert(hir_id.local_id, Lrc::new(v));
+ map.insert(hir_id.local_id, v);
}
- Lrc::new(rl)
+ tcx.arena.alloc(rl)
}
fn krate<'tcx>(tcx: TyCtxt<'_, 'tcx, 'tcx>) -> NamedRegionMap {
remaining_lib_features.remove(&Symbol::intern("test"));
let check_features =
- |remaining_lib_features: &mut FxHashMap<_, _>, defined_features: &Vec<_>| {
+ |remaining_lib_features: &mut FxHashMap<_, _>, defined_features: &[_]| {
for &(feature, since) in defined_features {
if let Some(since) = since {
if let Some(span) = remaining_lib_features.get(&feature) {
if remaining_lib_features.is_empty() {
break;
}
- check_features(&mut remaining_lib_features, &tcx.defined_lib_features(cnum));
+ check_features(&mut remaining_lib_features, tcx.defined_lib_features(cnum));
}
}
/// predicate gets in the way of some checks, which are intended
/// to operate over only the actual where-clauses written by the
/// user.)
- query predicates_of(_: DefId) -> Lrc<ty::GenericPredicates<'tcx>> {}
+ query predicates_of(_: DefId) -> &'tcx ty::GenericPredicates<'tcx> {}
query native_libraries(_: CrateNum) -> Lrc<Vec<NativeLibrary>> {
desc { "looking up the native libraries of a linked crate" }
}
- query lint_levels(_: CrateNum) -> Lrc<lint::LintLevelMap> {
+ query lint_levels(_: CrateNum) -> &'tcx lint::LintLevelMap {
eval_always
desc { "computing the lint levels for items in this crate" }
}
}
Linking {
- query wasm_import_module_map(_: CrateNum) -> Lrc<FxHashMap<DefId, String>> {
+ query wasm_import_module_map(_: CrateNum) -> &'tcx FxHashMap<DefId, String> {
desc { "wasm import module map" }
}
}
/// equal to the `explicit_predicates_of` predicates plus the
/// `inferred_outlives_of` predicates.
query predicates_defined_on(_: DefId)
- -> Lrc<ty::GenericPredicates<'tcx>> {}
+ -> &'tcx ty::GenericPredicates<'tcx> {}
/// Returns the predicates written explicitly by the user.
query explicit_predicates_of(_: DefId)
- -> Lrc<ty::GenericPredicates<'tcx>> {}
+ -> &'tcx ty::GenericPredicates<'tcx> {}
/// Returns the inferred outlives predicates (e.g., for `struct
/// Foo<'a, T> { x: &'a T }`, this would return `T: 'a`).
/// evaluate them even during type conversion, often before the
/// full predicates are available (note that supertraits have
/// additional acyclicity requirements).
- query super_predicates_of(key: DefId) -> Lrc<ty::GenericPredicates<'tcx>> {
+ query super_predicates_of(key: DefId) -> &'tcx ty::GenericPredicates<'tcx> {
desc { |tcx| "computing the supertraits of `{}`", tcx.def_path_str(key) }
}
/// To avoid cycles within the predicates of a single item we compute
/// per-type-parameter predicates for resolving `T::AssocTy`.
query type_param_predicates(key: (DefId, DefId))
- -> Lrc<ty::GenericPredicates<'tcx>> {
+ -> &'tcx ty::GenericPredicates<'tcx> {
no_force
desc { |tcx| "computing the bounds for type parameter `{}`", {
let id = tcx.hir().as_local_hir_id(key.1).unwrap();
query static_mutability(_: DefId) -> Option<hir::Mutability> {}
/// Gets a map with the variance of every item; use `item_variance` instead.
- query crate_variances(_: CrateNum) -> Lrc<ty::CrateVariancesMap<'tcx>> {
+ query crate_variances(_: CrateNum) -> &'tcx ty::CrateVariancesMap<'tcx> {
desc { "computing the variances for items in this crate" }
}
TypeChecking {
/// Maps from thee `DefId` of a type to its (inferred) outlives.
query inferred_outlives_crate(_: CrateNum)
- -> Lrc<ty::CratePredicatesMap<'tcx>> {
+ -> &'tcx ty::CratePredicatesMap<'tcx> {
desc { "computing the inferred outlives predicates for items in this crate" }
}
}
Other {
/// Maps from an impl/trait `DefId to a list of the `DefId`s of its items.
- query associated_item_def_ids(_: DefId) -> Lrc<Vec<DefId>> {}
+ query associated_item_def_ids(_: DefId) -> &'tcx [DefId] {}
/// Maps from a trait item to the trait item "descriptor".
query associated_item(_: DefId) -> ty::AssociatedItem {}
/// Maps a `DefId` of a type to a list of its inherent impls.
/// Contains implementations of methods that are inherent to a type.
/// Methods in these implementations don't need to be exported.
- query inherent_impls(_: DefId) -> Lrc<Vec<DefId>> {
+ query inherent_impls(_: DefId) -> &'tcx [DefId] {
eval_always
}
}
}
Other {
- query used_trait_imports(_: DefId) -> Lrc<DefIdSet> {}
+ query used_trait_imports(_: DefId) -> &'tcx DefIdSet {}
}
TypeChecking {
}
BorrowChecking {
- query borrowck(_: DefId) -> Lrc<BorrowCheckResult> {}
+ query borrowck(_: DefId) -> &'tcx BorrowCheckResult {}
/// Borrow-checks the function body. If this is a closure, returns
/// additional requirements that the closure's creator must verify.
/// Not meant to be used directly outside of coherence.
/// (Defined only for `LOCAL_CRATE`.)
query crate_inherent_impls(k: CrateNum)
- -> Lrc<CrateInherentImpls> {
+ -> &'tcx CrateInherentImpls {
eval_always
desc { "all inherent impls defined in crate `{:?}`", k }
}
query check_match(_: DefId) -> () {}
/// Performs part of the privacy check and computes "access levels".
- query privacy_access_levels(_: CrateNum) -> Lrc<AccessLevels> {
+ query privacy_access_levels(_: CrateNum) -> &'tcx AccessLevels {
eval_always
desc { "privacy access levels" }
}
Other {
query dylib_dependency_formats(_: CrateNum)
- -> Lrc<Vec<(CrateNum, LinkagePreference)>> {
+ -> &'tcx [(CrateNum, LinkagePreference)] {
desc { "dylib dependency formats of crate" }
}
}
desc { "test whether a crate has #![no_builtins]" }
}
- query extern_crate(_: DefId) -> Lrc<Option<ExternCrate>> {
+ query extern_crate(_: DefId) -> Option<&'tcx ExternCrate> {
eval_always
desc { "getting crate's ExternCrateData" }
}
desc { "computing whether impls specialize one another" }
}
query in_scope_traits_map(_: DefIndex)
- -> Option<Lrc<FxHashMap<ItemLocalId, Lrc<StableVec<TraitCandidate>>>>> {
+ -> Option<&'tcx FxHashMap<ItemLocalId, StableVec<TraitCandidate>>> {
eval_always
desc { "traits in scope at a block" }
}
}
Other {
- query module_exports(_: DefId) -> Option<Lrc<Vec<Export<hir::HirId>>>> {
+ query module_exports(_: DefId) -> Option<&'tcx [Export<hir::HirId>]> {
eval_always
}
}
// Does not include external symbols that don't have a corresponding DefId,
// like the compiler-generated `main` function and so on.
query reachable_non_generics(_: CrateNum)
- -> Lrc<DefIdMap<SymbolExportLevel>> {
+ -> &'tcx DefIdMap<SymbolExportLevel> {
desc { "looking up the exported symbols of a crate" }
}
query is_reachable_non_generic(_: DefId) -> bool {}
Codegen {
query upstream_monomorphizations(
k: CrateNum
- ) -> Lrc<DefIdMap<Lrc<FxHashMap<SubstsRef<'tcx>, CrateNum>>>> {
+ ) -> &'tcx DefIdMap<FxHashMap<SubstsRef<'tcx>, CrateNum>> {
desc { "collecting available upstream monomorphizations `{:?}`", k }
}
query upstream_monomorphizations_for(_: DefId)
- -> Option<Lrc<FxHashMap<SubstsRef<'tcx>, CrateNum>>> {}
+ -> Option<&'tcx FxHashMap<SubstsRef<'tcx>, CrateNum>> {}
}
Other {
- query foreign_modules(_: CrateNum) -> Lrc<Vec<ForeignModule>> {
+ query foreign_modules(_: CrateNum) -> &'tcx [ForeignModule] {
desc { "looking up the foreign modules of a linked crate" }
}
TypeChecking {
query implementations_of_trait(_: (CrateNum, DefId))
- -> Lrc<Vec<DefId>> {
+ -> &'tcx [DefId] {
no_force
desc { "looking up implementations of a trait in a crate" }
}
query all_trait_implementations(_: CrateNum)
- -> Lrc<Vec<DefId>> {
+ -> &'tcx [DefId] {
desc { "looking up all (?) trait implementations" }
}
}
Other {
query dllimport_foreign_items(_: CrateNum)
- -> Lrc<FxHashSet<DefId>> {
+ -> &'tcx FxHashSet<DefId> {
desc { "dllimport_foreign_items" }
}
query is_dllimport_foreign_item(_: DefId) -> bool {}
BorrowChecking {
// Lifetime resolution. See `middle::resolve_lifetimes`.
- query resolve_lifetimes(_: CrateNum) -> Lrc<ResolveLifetimes> {
+ query resolve_lifetimes(_: CrateNum) -> &'tcx ResolveLifetimes {
desc { "resolving lifetimes" }
}
query named_region_map(_: DefIndex) ->
- Option<Lrc<FxHashMap<ItemLocalId, Region>>> {
+ Option<&'tcx FxHashMap<ItemLocalId, Region>> {
desc { "looking up a named region" }
}
query is_late_bound_map(_: DefIndex) ->
- Option<Lrc<FxHashSet<ItemLocalId>>> {
+ Option<&'tcx FxHashSet<ItemLocalId>> {
desc { "testing if a region is late bound" }
}
query object_lifetime_defaults_map(_: DefIndex)
- -> Option<Lrc<FxHashMap<ItemLocalId, Lrc<Vec<ObjectLifetimeDefault>>>>> {
+ -> Option<&'tcx FxHashMap<ItemLocalId, Vec<ObjectLifetimeDefault>>> {
desc { "looking up lifetime defaults for a region" }
}
}
eval_always
desc { "fetching what a crate is named" }
}
- query item_children(_: DefId) -> Lrc<Vec<Export<hir::HirId>>> {}
+ query item_children(_: DefId) -> &'tcx [Export<hir::HirId>] {}
query extern_mod_stmt_cnum(_: DefId) -> Option<CrateNum> {}
- query get_lib_features(_: CrateNum) -> Lrc<LibFeatures> {
+ query get_lib_features(_: CrateNum) -> &'tcx LibFeatures {
eval_always
desc { "calculating the lib features map" }
}
query defined_lib_features(_: CrateNum)
- -> Lrc<Vec<(Symbol, Option<Symbol>)>> {
+ -> &'tcx [(Symbol, Option<Symbol>)] {
desc { "calculating the lib features defined in a crate" }
}
- query get_lang_items(_: CrateNum) -> Lrc<LanguageItems> {
+ query get_lang_items(_: CrateNum) -> &'tcx LanguageItems {
eval_always
desc { "calculating the lang items map" }
}
- query defined_lang_items(_: CrateNum) -> Lrc<Vec<(DefId, usize)>> {
+ query defined_lang_items(_: CrateNum) -> &'tcx [(DefId, usize)] {
desc { "calculating the lang items defined in a crate" }
}
- query missing_lang_items(_: CrateNum) -> Lrc<Vec<LangItem>> {
+ query missing_lang_items(_: CrateNum) -> &'tcx [LangItem] {
desc { "calculating the missing lang items in a crate" }
}
query visible_parent_map(_: CrateNum)
- -> Lrc<DefIdMap<DefId>> {
+ -> &'tcx DefIdMap<DefId> {
desc { "calculating the visible parent map" }
}
query missing_extern_crate_item(_: CrateNum) -> bool {
eval_always
desc { "looking at the source for a crate" }
}
- query postorder_cnums(_: CrateNum) -> Lrc<Vec<CrateNum>> {
+ query postorder_cnums(_: CrateNum) -> &'tcx [CrateNum] {
eval_always
desc { "generating a postorder list of CrateNums" }
}
- query upvars(_: DefId) -> Option<Lrc<Vec<hir::Upvar>>> {
+ query upvars(_: DefId) -> Option<&'tcx [hir::Upvar]> {
eval_always
}
query maybe_unused_trait_import(_: DefId) -> bool {
eval_always
}
query maybe_unused_extern_crates(_: CrateNum)
- -> Lrc<Vec<(DefId, Span)>> {
+ -> &'tcx [(DefId, Span)] {
eval_always
desc { "looking up all possibly unused extern crates" }
}
eval_always
}
- query stability_index(_: CrateNum) -> Lrc<stability::Index<'tcx>> {
+ query stability_index(_: CrateNum) -> &'tcx stability::Index<'tcx> {
eval_always
desc { "calculating the stability index for the local crate" }
}
- query all_crate_nums(_: CrateNum) -> Lrc<Vec<CrateNum>> {
+ query all_crate_nums(_: CrateNum) -> &'tcx [CrateNum] {
eval_always
desc { "fetching all foreign CrateNum instances" }
}
/// A vector of every trait accessible in the whole crate
/// (i.e., including those from subcrates). This is used only for
/// error reporting.
- query all_traits(_: CrateNum) -> Lrc<Vec<DefId>> {
+ query all_traits(_: CrateNum) -> &'tcx [DefId] {
desc { "fetching all foreign and local traits" }
}
}
}
Other {
- query target_features_whitelist(_: CrateNum) -> Lrc<FxHashMap<String, Option<Symbol>>> {
+ query target_features_whitelist(_: CrateNum) -> &'tcx FxHashMap<String, Option<Symbol>> {
eval_always
desc { "looking up the whitelist of target features" }
}
desc { |tcx| "estimating size for `{}`", tcx.def_path_str(def.def_id()) }
}
- query features_query(_: CrateNum) -> Lrc<feature_gate::Features> {
+ query features_query(_: CrateNum) -> &'tcx feature_gate::Features {
eval_always
desc { "looking up enabled feature gates" }
}
}
}
+pub struct Common<'tcx> {
+ pub empty_predicates: ty::GenericPredicates<'tcx>,
+}
+
pub struct CommonTypes<'tcx> {
pub unit: Ty<'tcx>,
pub bool: Ty<'tcx>,
pub dep_graph: DepGraph,
+ /// Common objects.
+ pub common: Common<'tcx>,
+
/// Common types, pre-interned for your convenience.
pub types: CommonTypes<'tcx>,
/// Map indicating what traits are in scope for places where this
/// is relevant; generated by resolve.
trait_map: FxHashMap<DefIndex,
- Lrc<FxHashMap<ItemLocalId,
- Lrc<StableVec<TraitCandidate>>>>>,
+ FxHashMap<ItemLocalId,
+ StableVec<TraitCandidate>>>,
/// Export map produced by name resolution.
- export_map: FxHashMap<DefId, Lrc<Vec<Export<hir::HirId>>>>,
+ export_map: FxHashMap<DefId, Vec<Export<hir::HirId>>>,
hir_map: hir_map::Map<'tcx>,
// Records the captured variables referenced by every closure
// expression. Do not track deps for this, just recompute it from
// scratch every time.
- upvars: FxHashMap<DefId, Lrc<Vec<hir::Upvar>>>,
+ upvars: FxHashMap<DefId, Vec<hir::Upvar>>,
maybe_unused_trait_imports: FxHashSet<DefId>,
maybe_unused_extern_crates: Vec<(DefId, Span)>,
s.fatal(&err);
});
let interners = CtxtInterners::new(&arenas.interner);
+ let common = Common {
+ empty_predicates: ty::GenericPredicates {
+ parent: None,
+ predicates: vec![],
+ },
+ };
let common_types = CommonTypes::new(&interners);
let common_lifetimes = CommonLifetimes::new(&interners);
let common_consts = CommonConsts::new(&interners, &common_types);
None
};
- let mut trait_map: FxHashMap<_, Lrc<FxHashMap<_, _>>> = FxHashMap::default();
+ let mut trait_map: FxHashMap<_, FxHashMap<_, _>> = FxHashMap::default();
for (k, v) in resolutions.trait_map {
let hir_id = hir.node_to_hir_id(k);
let map = trait_map.entry(hir_id.owner).or_default();
- Lrc::get_mut(map).unwrap()
- .insert(hir_id.local_id,
- Lrc::new(StableVec::new(v)));
+ map.insert(hir_id.local_id, StableVec::new(v));
}
GlobalCtxt {
global_arenas: &arenas.global,
global_interners: interners,
dep_graph,
+ common,
types: common_types,
lifetimes: common_lifetimes,
consts: common_consts,
let exports: Vec<_> = v.into_iter().map(|e| {
e.map_id(|id| hir.node_to_hir_id(id))
}).collect();
- (k, Lrc::new(exports))
+ (k, exports)
}).collect(),
upvars: resolutions.upvars.into_iter().map(|(k, v)| {
let vars: Vec<_> = v.into_iter().map(|e| {
e.map_id(|id| hir.node_to_hir_id(id))
}).collect();
- (hir.local_def_id(k), Lrc::new(vars))
+ (hir.local_def_id(k), vars)
}).collect(),
maybe_unused_trait_imports:
resolutions.maybe_unused_trait_imports
self.sess.consider_optimizing(&cname, msg)
}
- pub fn lib_features(self) -> Lrc<middle::lib_features::LibFeatures> {
+ pub fn lib_features(self) -> &'gcx middle::lib_features::LibFeatures {
self.get_lib_features(LOCAL_CRATE)
}
- pub fn lang_items(self) -> Lrc<middle::lang_items::LanguageItems> {
+ pub fn lang_items(self) -> &'gcx middle::lang_items::LanguageItems {
self.get_lang_items(LOCAL_CRATE)
}
else { None }
}
- pub fn stability(self) -> Lrc<stability::Index<'tcx>> {
+ pub fn stability(self) -> &'gcx stability::Index<'gcx> {
self.stability_index(LOCAL_CRATE)
}
- pub fn crates(self) -> Lrc<Vec<CrateNum>> {
+ pub fn crates(self) -> &'gcx [CrateNum] {
self.all_crate_nums(LOCAL_CRATE)
}
- pub fn features(self) -> Lrc<feature_gate::Features> {
+ pub fn features(self) -> &'gcx feature_gate::Features {
self.features_query(LOCAL_CRATE)
}
lint::struct_lint_level(self.sess, lint, level, src, None, msg)
}
- pub fn in_scope_traits(self, id: HirId) -> Option<Lrc<StableVec<TraitCandidate>>> {
+ pub fn in_scope_traits(self, id: HirId) -> Option<&'gcx StableVec<TraitCandidate>> {
self.in_scope_traits_map(id.owner)
- .and_then(|map| map.get(&id.local_id).cloned())
+ .and_then(|map| map.get(&id.local_id))
}
pub fn named_region(self, id: HirId) -> Option<resolve_lifetime::Region> {
}
pub fn object_lifetime_defaults(self, id: HirId)
- -> Option<Lrc<Vec<ObjectLifetimeDefault>>>
+ -> Option<&'gcx [ObjectLifetimeDefault]>
{
self.object_lifetime_defaults_map(id.owner)
- .and_then(|map| map.get(&id.local_id).cloned())
+ .and_then(|map| map.get(&id.local_id).map(|v| &**v))
}
}
}
pub fn provide(providers: &mut ty::query::Providers<'_>) {
- providers.in_scope_traits_map = |tcx, id| tcx.gcx.trait_map.get(&id).cloned();
- providers.module_exports = |tcx, id| tcx.gcx.export_map.get(&id).cloned();
+ providers.in_scope_traits_map = |tcx, id| tcx.gcx.trait_map.get(&id);
+ providers.module_exports = |tcx, id| tcx.gcx.export_map.get(&id).map(|v| &v[..]);
providers.crate_name = |tcx, id| {
assert_eq!(id, LOCAL_CRATE);
tcx.crate_name
};
providers.get_lib_features = |tcx, id| {
assert_eq!(id, LOCAL_CRATE);
- Lrc::new(middle::lib_features::collect(tcx))
+ tcx.arena.alloc(middle::lib_features::collect(tcx))
};
providers.get_lang_items = |tcx, id| {
assert_eq!(id, LOCAL_CRATE);
- Lrc::new(middle::lang_items::collect(tcx))
+ tcx.arena.alloc(middle::lang_items::collect(tcx))
};
- providers.upvars = |tcx, id| tcx.gcx.upvars.get(&id).cloned();
+ providers.upvars = |tcx, id| tcx.gcx.upvars.get(&id).map(|v| &v[..]);
providers.maybe_unused_trait_import = |tcx, id| {
tcx.maybe_unused_trait_imports.contains(&id)
};
providers.maybe_unused_extern_crates = |tcx, cnum| {
assert_eq!(cnum, LOCAL_CRATE);
- Lrc::new(tcx.maybe_unused_extern_crates.clone())
+ &tcx.maybe_unused_extern_crates[..]
};
providers.names_imported_by_glob_use = |tcx, id| {
assert_eq!(id.krate, LOCAL_CRATE);
providers.stability_index = |tcx, cnum| {
assert_eq!(cnum, LOCAL_CRATE);
- Lrc::new(stability::Index::new(tcx))
+ tcx.arena.alloc(stability::Index::new(tcx))
};
providers.lookup_stability = |tcx, id| {
assert_eq!(id.krate, LOCAL_CRATE);
};
providers.all_crate_nums = |tcx, cnum| {
assert_eq!(cnum, LOCAL_CRATE);
- Lrc::new(tcx.cstore.crates_untracked())
+ tcx.arena.alloc_slice(&tcx.cstore.crates_untracked())
};
providers.postorder_cnums = |tcx, cnum| {
assert_eq!(cnum, LOCAL_CRATE);
- Lrc::new(tcx.cstore.postorder_cnums_untracked())
+ tcx.arena.alloc_slice(&tcx.cstore.postorder_cnums_untracked())
};
providers.output_filenames = |tcx, cnum| {
assert_eq!(cnum, LOCAL_CRATE);
};
providers.features_query = |tcx, cnum| {
assert_eq!(cnum, LOCAL_CRATE);
- Lrc::new(tcx.sess.features_untracked().clone())
+ tcx.arena.alloc(tcx.sess.features_untracked().clone())
};
providers.is_panic_runtime = |tcx, cnum| {
assert_eq!(cnum, LOCAL_CRATE);
}
#[inline]
- pub fn predicates(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Lrc<GenericPredicates<'gcx>> {
+ pub fn predicates(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> &'tcx GenericPredicates<'gcx> {
tcx.predicates_of(self.did)
}
pub struct AssociatedItemsIterator<'a, 'gcx: 'tcx, 'tcx: 'a> {
tcx: TyCtxt<'a, 'gcx, 'tcx>,
- def_ids: Lrc<Vec<DefId>>,
+ def_ids: &'gcx [DefId],
next_index: usize,
}
fn associated_item_def_ids<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
def_id: DefId)
- -> Lrc<Vec<DefId>> {
+ -> &'tcx [DefId] {
let id = tcx.hir().as_local_hir_id(def_id).unwrap();
let item = tcx.hir().expect_item_by_hir_id(id);
- let vec: Vec<_> = match item.node {
+ match item.node {
hir::ItemKind::Trait(.., ref trait_item_refs) => {
- trait_item_refs.iter()
- .map(|trait_item_ref| trait_item_ref.id)
- .map(|id| tcx.hir().local_def_id_from_hir_id(id.hir_id))
- .collect()
+ tcx.arena.alloc_from_iter(
+ trait_item_refs.iter()
+ .map(|trait_item_ref| trait_item_ref.id)
+ .map(|id| tcx.hir().local_def_id_from_hir_id(id.hir_id))
+ )
}
hir::ItemKind::Impl(.., ref impl_item_refs) => {
- impl_item_refs.iter()
- .map(|impl_item_ref| impl_item_ref.id)
- .map(|id| tcx.hir().local_def_id_from_hir_id(id.hir_id))
- .collect()
+ tcx.arena.alloc_from_iter(
+ impl_item_refs.iter()
+ .map(|impl_item_ref| impl_item_ref.id)
+ .map(|id| tcx.hir().local_def_id_from_hir_id(id.hir_id))
+ )
}
- hir::ItemKind::TraitAlias(..) => vec![],
+ hir::ItemKind::TraitAlias(..) => &[],
_ => span_bug!(item.span, "associated_item_def_ids: not impl or trait")
- };
- Lrc::new(vec)
+ }
}
fn def_span<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> Span {
/// (constructing this map requires touching the entire crate).
#[derive(Clone, Debug, Default, HashStable)]
pub struct CrateInherentImpls {
- pub inherent_impls: DefIdMap<Lrc<Vec<DefId>>>,
+ pub inherent_impls: DefIdMap<Vec<DefId>>,
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, RustcEncodable, RustcDecodable)]
// 2. for an extern inferred from a path or an indirect crate,
// where there is no explicit `extern crate`, we just prepend
// the crate name.
- match *self.tcx().extern_crate(def_id) {
- Some(ExternCrate {
+ match self.tcx().extern_crate(def_id) {
+ Some(&ExternCrate {
src: ExternCrateSource::Extern(def_id),
direct: true,
span,
}
}
-impl<'tcx, T: Default> Value<'tcx> for T {
- default fn from_cycle_error<'a>(_: TyCtxt<'a, 'tcx, 'tcx>) -> T {
- T::default()
- }
-}
-
impl<'tcx> Value<'tcx> for Ty<'tcx> {
fn from_cycle_error<'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Ty<'tcx> {
tcx.types.err
use std::cell::{Cell, RefCell};
use std::fmt;
use std::rc::Rc;
-use rustc_data_structures::sync::Lrc;
use std::hash::{Hash, Hasher};
use syntax::source_map::CompilerDesugaringKind;
use syntax_pos::{MultiSpan, Span};
}
fn borrowck<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, owner_def_id: DefId)
- -> Lrc<BorrowCheckResult>
+ -> &'tcx BorrowCheckResult
{
assert!(tcx.use_ast_borrowck() || tcx.migrate_borrowck());
// those things (notably the synthesized constructors from
// tuple structs/variants) do not have an associated body
// and do not need borrowchecking.
- return Lrc::new(BorrowCheckResult {
+ return tcx.arena.alloc(BorrowCheckResult {
used_mut_nodes: Default::default(),
signalled_any_error: SignalledError::NoErrorsSeen,
})
check_loans::check_loans(&mut bccx, &loan_dfcx, &flowed_moves, &all_loans, body);
}
- Lrc::new(BorrowCheckResult {
+ tcx.arena.alloc(BorrowCheckResult {
used_mut_nodes: bccx.used_mut_nodes.into_inner(),
signalled_any_error: bccx.signalled_any_error.into_inner(),
})
use rustc::ty::layout::HasTyCtxt;
use rustc::ty::query::Providers;
use rustc_data_structures::small_c_str::SmallCStr;
-use rustc_data_structures::sync::Lrc;
use rustc_data_structures::fx::FxHashMap;
use rustc_target::spec::PanicStrategy;
use rustc_codegen_ssa::traits::*;
if tcx.sess.opts.actually_rustdoc {
// rustdoc needs to be able to document functions that use all the features, so
// whitelist them all
- Lrc::new(llvm_util::all_known_features()
+ tcx.arena.alloc(llvm_util::all_known_features()
.map(|(a, b)| (a.to_string(), b))
.collect())
} else {
- Lrc::new(llvm_util::target_feature_whitelist(tcx.sess)
+ tcx.arena.alloc(llvm_util::target_feature_whitelist(tcx.sess)
.iter()
.map(|&(a, b)| (a.to_string(), b))
.collect())
}));
}
- Lrc::new(ret)
+ tcx.arena.alloc(ret)
};
}
-use rustc_data_structures::sync::Lrc;
use std::sync::Arc;
use rustc::ty::Instance;
fn reachable_non_generics_provider<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
cnum: CrateNum)
- -> Lrc<DefIdMap<SymbolExportLevel>>
+ -> &'tcx DefIdMap<SymbolExportLevel>
{
assert_eq!(cnum, LOCAL_CRATE);
if !tcx.sess.opts.output_types.should_codegen() {
- return Default::default();
+ return tcx.arena.alloc(Default::default());
}
// Check to see if this crate is a "special runtime crate". These
reachable_non_generics.insert(id, SymbolExportLevel::C);
}
- Lrc::new(reachable_non_generics)
+ tcx.arena.alloc(reachable_non_generics)
}
fn is_reachable_non_generic_provider_local<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
fn upstream_monomorphizations_provider<'a, 'tcx>(
tcx: TyCtxt<'a, 'tcx, 'tcx>,
cnum: CrateNum)
- -> Lrc<DefIdMap<Lrc<FxHashMap<SubstsRef<'tcx>, CrateNum>>>>
+ -> &'tcx DefIdMap<FxHashMap<SubstsRef<'tcx>, CrateNum>>
{
debug_assert!(cnum == LOCAL_CRATE);
}
}
- Lrc::new(instances.into_iter()
- .map(|(key, value)| (key, Lrc::new(value)))
- .collect())
+ tcx.arena.alloc(instances)
}
fn upstream_monomorphizations_for_provider<'a, 'tcx>(
tcx: TyCtxt<'a, 'tcx, 'tcx>,
def_id: DefId)
- -> Option<Lrc<FxHashMap<SubstsRef<'tcx>, CrateNum>>>
+ -> Option<&'tcx FxHashMap<SubstsRef<'tcx>, CrateNum>>
{
debug_assert!(!def_id.is_local());
- tcx.upstream_monomorphizations(LOCAL_CRATE)
- .get(&def_id)
- .cloned()
+ tcx.upstream_monomorphizations(LOCAL_CRATE).get(&def_id)
}
fn is_unreachable_local_definition_provider(tcx: TyCtxt<'_, '_, '_>, def_id: DefId) -> bool {
use rustc_mir::monomorphize::partitioning::{CodegenUnit, CodegenUnitExt};
use rustc::util::nodemap::FxHashMap;
use rustc_data_structures::indexed_vec::Idx;
-use rustc_data_structures::sync::Lrc;
use rustc_codegen_utils::{symbol_names_test, check_for_rustc_errors_attr};
use rustc::ty::layout::{FAT_PTR_ADDR, FAT_PTR_EXTRA};
use crate::mir::place::PlaceRef;
.map(|id| &module_map[&id])
.flat_map(|module| module.foreign_items.iter().cloned())
.collect();
- Lrc::new(dllimports)
+ tcx.arena.alloc(dllimports)
};
providers.is_dllimport_foreign_item = |tcx, def_id| {
flate2 = "1.0"
log = "0.4"
memmap = "0.6"
+smallvec = { version = "0.6.7", features = ["union", "may_dangle"] }
rustc = { path = "../librustc" }
rustc_data_structures = { path = "../librustc_data_structures" }
errors = { path = "../librustc_errors", package = "rustc_errors" }
use rustc::util::nodemap::DefIdMap;
use rustc_data_structures::svh::Svh;
+use smallvec::SmallVec;
use std::any::Any;
use rustc_data_structures::sync::Lrc;
use std::sync::Arc;
generics_of => {
tcx.alloc_generics(cdata.get_generics(def_id.index, tcx.sess))
}
- predicates_of => { Lrc::new(cdata.get_predicates(def_id.index, tcx)) }
- predicates_defined_on => { Lrc::new(cdata.get_predicates_defined_on(def_id.index, tcx)) }
- super_predicates_of => { Lrc::new(cdata.get_super_predicates(def_id.index, tcx)) }
+ predicates_of => { tcx.arena.alloc(cdata.get_predicates(def_id.index, tcx)) }
+ predicates_defined_on => {
+ tcx.arena.alloc(cdata.get_predicates_defined_on(def_id.index, tcx))
+ }
+ super_predicates_of => { tcx.arena.alloc(cdata.get_super_predicates(def_id.index, tcx)) }
trait_def => {
tcx.alloc_trait_def(cdata.get_trait_def(def_id.index, tcx.sess))
}
}
variances_of => { tcx.arena.alloc_from_iter(cdata.get_item_variances(def_id.index)) }
associated_item_def_ids => {
- let mut result = vec![];
+ let mut result = SmallVec::<[_; 8]>::new();
cdata.each_child_of_item(def_id.index,
|child| result.push(child.res.def_id()), tcx.sess);
- Lrc::new(result)
+ tcx.arena.alloc_slice(&result)
}
associated_item => { cdata.get_associated_item(def_id.index) }
impl_trait_ref => { cdata.get_impl_trait(def_id.index, tcx) }
(cdata.mir_const_qualif(def_id.index), tcx.arena.alloc(BitSet::new_empty(0)))
}
fn_sig => { cdata.fn_sig(def_id.index, tcx) }
- inherent_impls => { Lrc::new(cdata.get_inherent_implementations_for_type(def_id.index)) }
+ inherent_impls => { cdata.get_inherent_implementations_for_type(tcx, def_id.index) }
is_const_fn_raw => { cdata.is_const_fn_raw(def_id.index) }
is_foreign_item => { cdata.is_foreign_item(def_id.index) }
static_mutability => { cdata.static_mutability(def_id.index) }
}
is_mir_available => { cdata.is_item_mir_available(def_id.index) }
- dylib_dependency_formats => { Lrc::new(cdata.get_dylib_dependency_formats()) }
+ dylib_dependency_formats => { cdata.get_dylib_dependency_formats(tcx) }
is_panic_runtime => { cdata.root.panic_runtime }
is_compiler_builtins => { cdata.root.compiler_builtins }
has_global_allocator => { cdata.root.has_global_allocator }
is_profiler_runtime => { cdata.root.profiler_runtime }
panic_strategy => { cdata.root.panic_strategy }
extern_crate => {
- let r = Lrc::new(*cdata.extern_crate.lock());
- r
+ let r = *cdata.extern_crate.lock();
+ r.map(|c| &*tcx.arena.alloc(c))
}
is_no_builtins => { cdata.root.no_builtins }
impl_defaultness => { cdata.get_impl_defaultness(def_id.index) }
})
.collect();
- Lrc::new(reachable_non_generics)
+ tcx.arena.alloc(reachable_non_generics)
}
native_libraries => { Lrc::new(cdata.get_native_libraries(tcx.sess)) }
- foreign_modules => { Lrc::new(cdata.get_foreign_modules(tcx.sess)) }
+ foreign_modules => { cdata.get_foreign_modules(tcx) }
plugin_registrar_fn => {
cdata.root.plugin_registrar_fn.map(|index| {
DefId { krate: def_id.krate, index }
extra_filename => { cdata.root.extra_filename.clone() }
-
implementations_of_trait => {
- let mut result = vec![];
- let filter = Some(other);
- cdata.get_implementations_for_trait(filter, &mut result);
- Lrc::new(result)
+ cdata.get_implementations_for_trait(tcx, Some(other))
}
all_trait_implementations => {
- let mut result = vec![];
- cdata.get_implementations_for_trait(None, &mut result);
- Lrc::new(result)
+ cdata.get_implementations_for_trait(tcx, None)
}
visibility => { cdata.get_visibility(def_id.index) }
}
crate_name => { cdata.name }
item_children => {
- let mut result = vec![];
+ let mut result = SmallVec::<[_; 8]>::new();
cdata.each_child_of_item(def_id.index, |child| result.push(child), tcx.sess);
- Lrc::new(result)
+ tcx.arena.alloc_slice(&result)
}
- defined_lib_features => { Lrc::new(cdata.get_lib_features()) }
- defined_lang_items => { Lrc::new(cdata.get_lang_items()) }
- missing_lang_items => { Lrc::new(cdata.get_missing_lang_items()) }
+ defined_lib_features => { cdata.get_lib_features(tcx) }
+ defined_lang_items => { cdata.get_lang_items(tcx) }
+ missing_lang_items => { cdata.get_missing_lang_items(tcx) }
missing_extern_crate_item => {
let r = match *cdata.extern_crate.borrow() {
},
foreign_modules: |tcx, cnum| {
assert_eq!(cnum, LOCAL_CRATE);
- Lrc::new(foreign_modules::collect(tcx))
+ &tcx.arena.alloc(foreign_modules::collect(tcx))[..]
},
link_args: |tcx, cnum| {
assert_eq!(cnum, LOCAL_CRATE);
// which is to say, its not deterministic in general. But
// we believe that libstd is consistently assigned crate
// num 1, so it should be enough to resolve #46112.
- let mut crates: Vec<CrateNum> = (*tcx.crates()).clone();
+ let mut crates: Vec<CrateNum> = (*tcx.crates()).to_owned();
crates.sort();
for &cnum in crates.iter() {
}
}
- Lrc::new(visible_parent_map)
+ tcx.arena.alloc(visible_parent_map)
},
..*providers
}
/// Iterates over all the stability attributes in the given crate.
- pub fn get_lib_features(&self) -> Vec<(ast::Name, Option<ast::Name>)> {
+ pub fn get_lib_features(
+ &self,
+ tcx: TyCtxt<'_, 'tcx, '_>,
+ ) -> &'tcx [(ast::Name, Option<ast::Name>)] {
// FIXME: For a proc macro crate, not sure whether we should return the "host"
// features or an empty Vec. Both don't cause ICEs.
- self.root
+ tcx.arena.alloc_from_iter(self.root
.lib_features
- .decode(self)
- .collect()
+ .decode(self))
}
/// Iterates over the language items in the given crate.
- pub fn get_lang_items(&self) -> Vec<(DefId, usize)> {
+ pub fn get_lang_items(
+ &self,
+ tcx: TyCtxt<'_, 'tcx, '_>,
+ ) -> &'tcx [(DefId, usize)] {
if self.proc_macros.is_some() {
// Proc macro crates do not export any lang-items to the target.
- vec![]
+ &[]
} else {
- self.root
+ tcx.arena.alloc_from_iter(self.root
.lang_items
.decode(self)
- .map(|(def_index, index)| (self.local_def_id(def_index), index))
- .collect()
+ .map(|(def_index, index)| (self.local_def_id(def_index), index)))
}
}
None
}
- pub fn get_inherent_implementations_for_type(&self, id: DefIndex) -> Vec<DefId> {
- self.entry(id)
- .inherent_impls
- .decode(self)
- .map(|index| self.local_def_id(index))
- .collect()
+ pub fn get_inherent_implementations_for_type(
+ &self,
+ tcx: TyCtxt<'_, 'tcx, '_>,
+ id: DefIndex
+ ) -> &'tcx [DefId] {
+ tcx.arena.alloc_from_iter(self.entry(id)
+ .inherent_impls
+ .decode(self)
+ .map(|index| self.local_def_id(index)))
}
- pub fn get_implementations_for_trait(&self,
- filter: Option<DefId>,
- result: &mut Vec<DefId>) {
+ pub fn get_implementations_for_trait(
+ &self,
+ tcx: TyCtxt<'_, 'tcx, '_>,
+ filter: Option<DefId>,
+ ) -> &'tcx [DefId] {
if self.proc_macros.is_some() {
// proc-macro crates export no trait impls.
- return
+ return &[]
}
// Do a reverse lookup beforehand to avoid touching the crate_num
// hash map in the loop below.
let filter = match filter.map(|def_id| self.reverse_translate_def_id(def_id)) {
Some(Some(def_id)) => Some((def_id.krate.as_u32(), def_id.index)),
- Some(None) => return,
+ Some(None) => return &[],
None => None,
};
if let Some(filter) = filter {
- if let Some(impls) = self.trait_impls
- .get(&filter) {
- result.extend(impls.decode(self).map(|idx| self.local_def_id(idx)));
+ if let Some(impls) = self.trait_impls.get(&filter) {
+ tcx.arena.alloc_from_iter(impls.decode(self).map(|idx| self.local_def_id(idx)))
+ } else {
+ &[]
}
} else {
- for impls in self.trait_impls.values() {
- result.extend(impls.decode(self).map(|idx| self.local_def_id(idx)));
- }
+ tcx.arena.alloc_from_iter(self.trait_impls.values().flat_map(|impls| {
+ impls.decode(self).map(|idx| self.local_def_id(idx))
+ }))
}
}
}
}
- pub fn get_foreign_modules(&self, sess: &Session) -> Vec<ForeignModule> {
+ pub fn get_foreign_modules(
+ &self,
+ tcx: TyCtxt<'_, 'tcx, '_>,
+ ) -> &'tcx [ForeignModule] {
if self.proc_macros.is_some() {
// Proc macro crates do not have any *target* foreign modules.
- vec![]
+ &[]
} else {
- self.root.foreign_modules.decode((self, sess)).collect()
+ tcx.arena.alloc_from_iter(self.root.foreign_modules.decode((self, tcx.sess)))
}
}
- pub fn get_dylib_dependency_formats(&self) -> Vec<(CrateNum, LinkagePreference)> {
- self.root
+ pub fn get_dylib_dependency_formats(
+ &self,
+ tcx: TyCtxt<'_, 'tcx, '_>,
+ ) -> &'tcx [(CrateNum, LinkagePreference)] {
+ tcx.arena.alloc_from_iter(self.root
.dylib_dependency_formats
.decode(self)
.enumerate()
.flat_map(|(i, link)| {
let cnum = CrateNum::new(i + 1);
link.map(|link| (self.cnum_map[cnum], link))
- })
- .collect()
+ }))
}
- pub fn get_missing_lang_items(&self) -> Vec<lang_items::LangItem> {
+ pub fn get_missing_lang_items(
+ &self,
+ tcx: TyCtxt<'_, 'tcx, '_>,
+ ) -> &'tcx [lang_items::LangItem] {
if self.proc_macros.is_some() {
// Proc macro crates do not depend on any target weak lang-items.
- vec![]
+ &[]
} else {
- self.root
+ tcx.arena.alloc_from_iter(self.root
.lang_items_missing
- .decode(self)
- .collect()
+ .decode(self))
}
}
let data = ModData {
reexports: match tcx.module_exports(def_id) {
- Some(ref exports) => self.lazy_seq_ref(&exports[..]),
+ Some(exports) => self.lazy_seq_ref(exports),
_ => LazySeq::empty(),
},
};
use rustc::ty::subst::InternalSubsts;
use rustc::util::nodemap::HirIdSet;
use rustc_data_structures::fx::FxHashSet;
-use rustc_data_structures::sync::Lrc;
use syntax::ast::Ident;
use syntax::attr;
use syntax::symbol::{kw, sym};
fn visit_trait(&mut self, trait_ref: TraitRef<'tcx>) -> bool {
self.skeleton().visit_trait(trait_ref)
}
- fn visit_predicates(&mut self, predicates: Lrc<ty::GenericPredicates<'tcx>>) -> bool {
+ fn visit_predicates(&mut self, predicates: &ty::GenericPredicates<'tcx>) -> bool {
self.skeleton().visit_predicates(predicates)
}
}
(!self.def_id_visitor.shallow() && substs.visit_with(self))
}
- fn visit_predicates(&mut self, predicates: Lrc<ty::GenericPredicates<'tcx>>) -> bool {
- let ty::GenericPredicates { parent: _, predicates } = &*predicates;
+ fn visit_predicates(&mut self, predicates: &ty::GenericPredicates<'tcx>) -> bool {
+ let ty::GenericPredicates { parent: _, predicates } = predicates;
for (predicate, _span) in predicates {
match predicate {
ty::Predicate::Trait(poly_predicate) => {
fn privacy_access_levels<'tcx>(
tcx: TyCtxt<'_, 'tcx, 'tcx>,
krate: CrateNum,
-) -> Lrc<AccessLevels> {
+) -> &'tcx AccessLevels {
assert_eq!(krate, LOCAL_CRATE);
// Build up a set of all exported items in the AST. This is a set of all
}
visitor.update(hir::CRATE_HIR_ID, Some(AccessLevel::Public));
- Lrc::new(visitor.access_levels)
+ tcx.arena.alloc(visitor.access_levels)
}
fn check_private_in_public<'tcx>(tcx: TyCtxt<'_, 'tcx, 'tcx>, krate: CrateNum) {
use rustc::ty::{self, DefIdTree, TyCtxt};
use rustc::{bug, span_bug};
use rustc_codegen_utils::link::{filename_for_metadata, out_filename};
-use rustc_data_structures::sync::Lrc;
use std::cell::Cell;
use std::default::Default;
let mut result = Vec::with_capacity(self.tcx.crates().len());
for &n in self.tcx.crates().iter() {
- let span = match *self.tcx.extern_crate(n.as_def_id()) {
- Some(ExternCrate { span, .. }) => span,
+ let span = match self.tcx.extern_crate(n.as_def_id()) {
+ Some(&ExternCrate { span, .. }) => span,
None => {
debug!("Skipping crate {}, no data", n);
continue;
// fallback in case the access levels couldn't have been correctly computed.
let access_levels = match tcx.sess.compile_status() {
Ok(..) => tcx.privacy_access_levels(LOCAL_CRATE),
- Err(..) => Lrc::new(AccessLevels::default()),
+ Err(..) => tcx.arena.alloc(AccessLevels::default()),
};
let save_ctxt = SaveContext {
use rustc::ty::subst::{Kind, Subst, InternalSubsts, SubstsRef};
use rustc::ty::wf::object_region_bounds;
use rustc::mir::interpret::ConstValue;
-use rustc_data_structures::sync::Lrc;
use rustc_target::spec::abi;
use crate::require_c_abi_if_c_variadic;
use smallvec::SmallVec;
/// Returns the set of bounds in scope for the type parameter with
/// the given id.
fn get_type_parameter_bounds(&self, span: Span, def_id: DefId)
- -> Lrc<ty::GenericPredicates<'tcx>>;
+ -> &'tcx ty::GenericPredicates<'tcx>;
/// What lifetime should we use when a lifetime is omitted (and not elided)?
fn re_infer(&self, span: Span, _def: Option<&ty::GenericParamDef>)
/// In addition of this check, it also checks between references mutability state. If the
/// expected is mutable but the provided isn't, maybe we could just say "Hey, try with
/// `&mut`!".
- pub fn check_ref(&self,
- expr: &hir::Expr,
- checked_ty: Ty<'tcx>,
- expected: Ty<'tcx>)
- -> Option<(Span, &'static str, String)> {
+ pub fn check_ref(
+ &self,
+ expr: &hir::Expr,
+ checked_ty: Ty<'tcx>,
+ expected: Ty<'tcx>,
+ ) -> Option<(Span, &'static str, String)> {
let cm = self.sess().source_map();
let sp = expr.span;
if !cm.span_to_filename(sp).is_real() {
} else {
String::new()
};
+ if let Some(hir::Node::Expr(hir::Expr {
+ node: hir::ExprKind::Assign(left_expr, _),
+ ..
+ })) = self.tcx.hir().find_by_hir_id(
+ self.tcx.hir().get_parent_node_by_hir_id(expr.hir_id),
+ ) {
+ if mutability == hir::Mutability::MutMutable {
+ // Found the following case:
+ // fn foo(opt: &mut Option<String>){ opt = None }
+ // --- ^^^^
+ // | |
+ // consider dereferencing here: `*opt` |
+ // expected mutable reference, found enum `Option`
+ if let Ok(src) = cm.span_to_snippet(left_expr.span) {
+ return Some((
+ left_expr.span,
+ "consider dereferencing here to assign to the mutable \
+ borrowed piece of memory",
+ format!("*{}", src),
+ ));
+ }
+ }
+ }
return Some(match mutability {
hir::Mutability::MutMutable => (
sp,
use crate::namespace::Namespace;
use crate::util::nodemap::FxHashSet;
use errors::{Applicability, DiagnosticBuilder};
-use rustc_data_structures::sync::Lrc;
use rustc::hir::{self, ExprKind, Node, QPath};
use rustc::hir::def::{Res, DefKind};
use rustc::hir::def_id::{CRATE_DEF_INDEX, LOCAL_CRATE, DefId};
pub fn provide(providers: &mut ty::query::Providers<'_>) {
providers.all_traits = |tcx, cnum| {
assert_eq!(cnum, LOCAL_CRATE);
- Lrc::new(compute_all_traits(tcx))
+ &tcx.arena.alloc(compute_all_traits(tcx))[..]
}
}
use rustc::infer::{self, InferCtxt, InferOk, InferResult};
use rustc::infer::canonical::{Canonical, OriginalQueryValues, QueryResponse};
use rustc_data_structures::indexed_vec::Idx;
-use rustc_data_structures::sync::Lrc;
use rustc_target::spec::abi::Abi;
use rustc::infer::opaque_types::OpaqueTypeDecl;
use rustc::infer::type_variable::{TypeVariableOrigin};
fn used_trait_imports<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
def_id: DefId)
- -> Lrc<DefIdSet> {
- tcx.typeck_tables_of(def_id).used_trait_imports.clone()
+ -> &'tcx DefIdSet {
+ &*tcx.typeck_tables_of(def_id).used_trait_imports
}
fn typeck_tables_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { self.tcx }
fn get_type_parameter_bounds(&self, _: Span, def_id: DefId)
- -> Lrc<ty::GenericPredicates<'tcx>>
+ -> &'tcx ty::GenericPredicates<'tcx>
{
let tcx = self.tcx;
let hir_id = tcx.hir().as_local_hir_id(def_id).unwrap();
let item_def_id = tcx.hir().local_def_id_from_hir_id(item_id);
let generics = tcx.generics_of(item_def_id);
let index = generics.param_def_id_to_index[&def_id];
- Lrc::new(ty::GenericPredicates {
+ tcx.arena.alloc(ty::GenericPredicates {
parent: None,
predicates: self.param_env.caller_bounds.iter().filter_map(|&predicate| {
match predicate {
use rustc::hir::itemlikevisit::ItemLikeVisitor;
use rustc::ty::{self, CrateInherentImpls, TyCtxt};
-use rustc_data_structures::sync::Lrc;
use syntax::ast;
use syntax_pos::Span;
/// On-demand query: yields a map containing all types mapped to their inherent impls.
pub fn crate_inherent_impls<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
crate_num: CrateNum)
- -> Lrc<CrateInherentImpls> {
+ -> &'tcx CrateInherentImpls {
assert_eq!(crate_num, LOCAL_CRATE);
let krate = tcx.hir().krate();
impls_map: Default::default(),
};
krate.visit_all_item_likes(&mut collect);
- Lrc::new(collect.impls_map)
+ tcx.arena.alloc(collect.impls_map)
}
/// On-demand query: yields a vector of the inherent impls for a specific type.
pub fn inherent_impls<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
ty_def_id: DefId)
- -> Lrc<Vec<DefId>> {
+ -> &'tcx [DefId] {
assert!(ty_def_id.is_local());
// NB. Until we adopt the red-green dep-tracking algorithm (see
//
// [the plan]: https://github.com/rust-lang/rust-roadmap/issues/4
- thread_local! {
- static EMPTY_DEF_ID_VEC: Lrc<Vec<DefId>> = Lrc::new(vec![])
- }
-
let result = tcx.dep_graph.with_ignore(|| {
let crate_map = tcx.crate_inherent_impls(ty_def_id.krate);
match crate_map.inherent_impls.get(&ty_def_id) {
- Some(v) => v.clone(),
- None => EMPTY_DEF_ID_VEC.with(|v| v.clone())
+ Some(v) => &v[..],
+ None => &[],
}
});
// type def ID, if there is a base type for this implementation and
// the implementation does not have any associated traits.
let impl_def_id = self.tcx.hir().local_def_id_from_hir_id(item.hir_id);
- let mut rc_vec = self.impls_map.inherent_impls
- .entry(def_id)
- .or_default();
-
- // At this point, there should not be any clones of the
- // `Lrc`, so we can still safely push into it in place:
- Lrc::get_mut(&mut rc_vec).unwrap().push(impl_def_id);
+ let vec = self.impls_map.inherent_impls.entry(def_id).or_default();
+ vec.push(impl_def_id);
} else {
struct_span_err!(self.tcx.sess,
item.span,
use rustc::ty::{ReprOptions, ToPredicate};
use rustc::util::captures::Captures;
use rustc::util::nodemap::FxHashMap;
-use rustc_data_structures::sync::Lrc;
use rustc_target::spec::abi;
use syntax::ast;
}
fn get_type_parameter_bounds(&self, span: Span, def_id: DefId)
- -> Lrc<ty::GenericPredicates<'tcx>> {
+ -> &'tcx ty::GenericPredicates<'tcx> {
self.tcx
.at(span)
.type_param_predicates((self.item_def_id, def_id))
fn type_param_predicates<'a, 'tcx>(
tcx: TyCtxt<'a, 'tcx, 'tcx>,
(item_def_id, def_id): (DefId, DefId),
-) -> Lrc<ty::GenericPredicates<'tcx>> {
+) -> &'tcx ty::GenericPredicates<'tcx> {
use rustc::hir::*;
// In the AST, bounds can derive from two places. Either
tcx.generics_of(item_def_id).parent
};
- let mut result = parent.map_or_else(
- || Lrc::new(ty::GenericPredicates {
- parent: None,
- predicates: vec![],
- }),
- |parent| {
- let icx = ItemCtxt::new(tcx, parent);
- icx.get_type_parameter_bounds(DUMMY_SP, def_id)
- },
- );
+ let result = parent.map_or(&tcx.common.empty_predicates, |parent| {
+ let icx = ItemCtxt::new(tcx, parent);
+ icx.get_type_parameter_bounds(DUMMY_SP, def_id)
+ });
+ let mut extend = None;
let item_hir_id = tcx.hir().as_local_hir_id(item_def_id).unwrap();
let ast_generics = match tcx.hir().get_by_hir_id(item_hir_id) {
// Implied `Self: Trait` and supertrait bounds.
if param_id == item_hir_id {
let identity_trait_ref = ty::TraitRef::identity(tcx, item_def_id);
- Lrc::make_mut(&mut result)
- .predicates
- .push((identity_trait_ref.to_predicate(), item.span));
+ extend = Some((identity_trait_ref.to_predicate(), item.span));
}
generics
}
};
let icx = ItemCtxt::new(tcx, item_def_id);
- Lrc::make_mut(&mut result)
- .predicates
- .extend(icx.type_parameter_bounds_in_generics(ast_generics, param_id, ty,
- OnlySelfBounds(true)));
- result
+ let mut result = (*result).clone();
+ result.predicates.extend(extend.into_iter());
+ result.predicates
+ .extend(icx.type_parameter_bounds_in_generics(ast_generics, param_id, ty,
+ OnlySelfBounds(true)));
+ tcx.arena.alloc(result)
}
impl<'a, 'tcx> ItemCtxt<'a, 'tcx> {
fn super_predicates_of<'a, 'tcx>(
tcx: TyCtxt<'a, 'tcx, 'tcx>,
trait_def_id: DefId,
-) -> Lrc<ty::GenericPredicates<'tcx>> {
+) -> &'tcx ty::GenericPredicates<'tcx> {
debug!("super_predicates(trait_def_id={:?})", trait_def_id);
let trait_hir_id = tcx.hir().as_local_hir_id(trait_def_id).unwrap();
}
}
- Lrc::new(ty::GenericPredicates {
+ tcx.arena.alloc(ty::GenericPredicates {
parent: None,
predicates: superbounds,
})
fn predicates_defined_on<'a, 'tcx>(
tcx: TyCtxt<'a, 'tcx, 'tcx>,
def_id: DefId,
-) -> Lrc<ty::GenericPredicates<'tcx>> {
+) -> &'tcx ty::GenericPredicates<'tcx> {
debug!("predicates_defined_on({:?})", def_id);
let mut result = tcx.explicit_predicates_of(def_id);
debug!(
def_id,
inferred_outlives,
);
- Lrc::make_mut(&mut result)
- .predicates
- .extend(inferred_outlives.iter().map(|&p| (p, span)));
+ let mut predicates = (*result).clone();
+ predicates.predicates.extend(inferred_outlives.iter().map(|&p| (p, span)));
+ result = tcx.arena.alloc(predicates);
}
debug!("predicates_defined_on({:?}) = {:?}", def_id, result);
result
fn predicates_of<'a, 'tcx>(
tcx: TyCtxt<'a, 'tcx, 'tcx>,
def_id: DefId,
-) -> Lrc<ty::GenericPredicates<'tcx>> {
+) -> &'tcx ty::GenericPredicates<'tcx> {
let mut result = tcx.predicates_defined_on(def_id);
if tcx.is_trait(def_id) {
// used, and adding the predicate into this list ensures
// that this is done.
let span = tcx.def_span(def_id);
- Lrc::make_mut(&mut result)
- .predicates
- .push((ty::TraitRef::identity(tcx, def_id).to_predicate(), span));
+ let mut predicates = (*result).clone();
+ predicates.predicates.push((ty::TraitRef::identity(tcx, def_id).to_predicate(), span));
+ result = tcx.arena.alloc(predicates);
}
debug!("predicates_of(def_id={:?}) = {:?}", def_id, result);
result
fn explicit_predicates_of<'a, 'tcx>(
tcx: TyCtxt<'a, 'tcx, 'tcx>,
def_id: DefId,
-) -> Lrc<ty::GenericPredicates<'tcx>> {
+) -> &'tcx ty::GenericPredicates<'tcx> {
use rustc::hir::*;
use rustc_data_structures::fx::FxHashSet;
if impl_trait_fn.is_some() {
// impl Trait
- return Lrc::new(ty::GenericPredicates {
+ return tcx.arena.alloc(ty::GenericPredicates {
parent: None,
predicates: bounds.predicates(tcx, opaque_ty),
});
);
}
- let result = Lrc::new(ty::GenericPredicates {
+ let result = tcx.arena.alloc(ty::GenericPredicates {
parent: generics.parent,
predicates,
});
use rustc::ty::query::Providers;
use rustc::ty::subst::UnpackedKind;
use rustc::ty::{self, CratePredicatesMap, TyCtxt};
-use rustc_data_structures::sync::Lrc;
use syntax::symbol::sym;
mod explicit;
fn inferred_outlives_crate<'tcx>(
tcx: TyCtxt<'_, 'tcx, 'tcx>,
crate_num: CrateNum,
-) -> Lrc<CratePredicatesMap<'tcx>> {
+) -> &'tcx CratePredicatesMap<'tcx> {
assert_eq!(crate_num, LOCAL_CRATE);
// Compute a map from each struct/enum/union S to the **explicit**
(def_id, &*predicates)
}).collect();
- Lrc::new(ty::CratePredicatesMap {
+ tcx.arena.alloc(ty::CratePredicatesMap {
predicates,
})
}
use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
use rustc::ty::{self, CrateVariancesMap, TyCtxt};
use rustc::ty::query::Providers;
-use rustc_data_structures::sync::Lrc;
/// Defines the `TermsContext` basically houses an arena where we can
/// allocate terms.
}
fn crate_variances<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, crate_num: CrateNum)
- -> Lrc<CrateVariancesMap<'tcx>> {
+ -> &'tcx CrateVariancesMap<'tcx> {
assert_eq!(crate_num, LOCAL_CRATE);
let mut arena = arena::TypedArena::default();
let terms_cx = terms::determine_parameters_to_be_inferred(tcx, &mut arena);
let constraints_cx = constraints::add_constraints_from_crate(terms_cx);
- Lrc::new(solve::solve_constraints(constraints_cx))
+ tcx.arena.alloc(solve::solve_constraints(constraints_cx))
}
fn variances_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, item_def_id: DefId)
// Instead, we generate `impl !Send for Foo<T>`, which better
// expresses the fact that `Foo<T>` never implements `Send`,
// regardless of the choice of `T`.
- let params = (self.cx.tcx.generics_of(param_env_def_id), &Default::default())
- .clean(self.cx).params;
+ let params = (
+ self.cx.tcx.generics_of(param_env_def_id),
+ &&self.cx.tcx.common.empty_predicates,
+ ).clean(self.cx).params;
Generics {
params,
mod blanket_impl;
use rustc_data_structures::indexed_vec::{IndexVec, Idx};
-use rustc_data_structures::sync::Lrc;
use rustc_target::spec::abi::Abi;
use rustc_typeck::hir_ty_to_ty;
use rustc::infer::region_constraints::{RegionConstraintData, Constraint};
}
impl<'a, 'tcx> Clean<Generics> for (&'a ty::Generics,
- &'a Lrc<ty::GenericPredicates<'tcx>>) {
+ &'a &'tcx ty::GenericPredicates<'tcx>) {
fn clean(&self, cx: &DocContext<'_>) -> Generics {
use self::WherePredicate as WP;
loop {
let segment = path_it.next()?;
- for item in mem::replace(&mut items, Lrc::new(vec![])).iter() {
+ for item in mem::replace(&mut items, &[]).iter() {
if item.ident.name == *segment {
if path_it.peek().is_none() {
return match item.res {
}
}
- token::Literal(lit, _suf) => {
- match lit {
+ token::Literal(lit) => {
+ match lit.kind {
// Text literals.
- token::Byte(..) | token::Char(..) | token::Err(..) |
- token::ByteStr(..) | token::ByteStrRaw(..) |
- token::Str_(..) | token::StrRaw(..) => Class::String,
+ token::Byte | token::Char | token::Err |
+ token::ByteStr | token::ByteStrRaw(..) |
+ token::Str | token::StrRaw(..) => Class::String,
// Number literals.
- token::Integer(..) | token::Float(..) => Class::Number,
+ token::Integer | token::Float => Class::Number,
- token::Bool(..) => panic!("literal token contains `Lit::Bool`"),
+ token::Bool => panic!("literal token contains `Lit::Bool`"),
}
}
pub struct Lit {
/// The original literal token as written in source code.
pub token: token::Lit,
- /// The original literal suffix as written in source code.
- pub suffix: Option<Symbol>,
/// The "semantic" representation of the literal lowered from the original tokens.
/// Strings are unescaped, hexadecimal forms are eliminated, etc.
/// FIXME: Remove this and only create the semantic representation during lowering to HIR.
Some(TokenTree::Token(_, token::Eq)) => {
tokens.next();
return if let Some(TokenTree::Token(span, token)) = tokens.next() {
- Lit::from_token(&token, span, None).map(MetaItemKind::NameValue)
+ Lit::from_token(&token, span).ok().map(MetaItemKind::NameValue)
} else {
None
};
where I: Iterator<Item = TokenTree>,
{
if let Some(TokenTree::Token(span, token)) = tokens.peek().cloned() {
- if let Some(lit) = Lit::from_token(&token, span, None) {
+ if let Ok(lit) = Lit::from_token(&token, span) {
tokens.next();
return Some(NestedMetaItem::Literal(lit));
}
},
(3, Some(&TokenTree::Token(_, token::Ident(ref code, _))),
Some(&TokenTree::Token(_, token::Comma)),
- Some(&TokenTree::Token(_, token::Literal(token::StrRaw(description, _), None)))) => {
- (code, Some(description))
+ Some(&TokenTree::Token(_, token::Literal(token::Lit { symbol, .. })))) => {
+ (code, Some(symbol))
}
_ => unreachable!()
};
use crate::ast::{self, Ident};
-use crate::parse::{token, ParseSess};
+use crate::parse::ParseSess;
+use crate::parse::token::{self, Token};
use crate::symbol::Symbol;
use crate::parse::unescape;
use crate::parse::unescape_error_reporting::{emit_unescape_error, push_escaped_char};
#[derive(Clone, Debug)]
pub struct TokenAndSpan {
- pub tok: token::Token,
+ pub tok: Token,
pub sp: Span,
}
/// Stop reading src at this index.
crate end_src_index: usize,
// cached:
- peek_tok: token::Token,
+ peek_tok: Token,
peek_span: Span,
peek_span_src_raw: Span,
fatal_errs: Vec<DiagnosticBuilder<'a>>,
}
/// Lex a LIT_INTEGER or a LIT_FLOAT
- fn scan_number(&mut self, c: char) -> token::Lit {
+ fn scan_number(&mut self, c: char) -> (token::LitKind, Symbol) {
let mut base = 10;
let start_bpos = self.pos;
self.bump();
}
_ => {
// just a 0
- return token::Integer(self.name_from(start_bpos));
+ return (token::Integer, self.name_from(start_bpos));
}
}
} else if c.is_digit(10) {
if num_digits == 0 {
self.err_span_(start_bpos, self.pos, "no valid digits found for number");
- return token::Integer(Symbol::intern("0"));
+ return (token::Integer, Symbol::intern("0"));
}
// might be a float, but don't be greedy if this is actually an
let pos = self.pos;
self.check_float_base(start_bpos, pos, base);
- token::Float(self.name_from(start_bpos))
+ (token::Float, self.name_from(start_bpos))
} else {
// it might be a float if it has an exponent
if self.ch_is('e') || self.ch_is('E') {
self.scan_float_exponent();
let pos = self.pos;
self.check_float_base(start_bpos, pos, base);
- return token::Float(self.name_from(start_bpos));
+ return (token::Float, self.name_from(start_bpos));
}
// but we certainly have an integer!
- token::Integer(self.name_from(start_bpos))
+ (token::Integer, self.name_from(start_bpos))
}
}
}
}
- fn binop(&mut self, op: token::BinOpToken) -> token::Token {
+ fn binop(&mut self, op: token::BinOpToken) -> Token {
self.bump();
if self.ch_is('=') {
self.bump();
/// Returns the next token from the string, advances the input past that
/// token, and updates the interner
- fn next_token_inner(&mut self) -> Result<token::Token, ()> {
+ fn next_token_inner(&mut self) -> Result<Token, ()> {
let c = self.ch;
if ident_start(c) {
}
if is_dec_digit(c) {
- let num = self.scan_number(c.unwrap());
+ let (kind, symbol) = self.scan_number(c.unwrap());
let suffix = self.scan_optional_raw_name();
- debug!("next_token_inner: scanned number {:?}, {:?}", num, suffix);
- return Ok(token::Literal(num, suffix));
+ debug!("next_token_inner: scanned number {:?}, {:?}, {:?}", kind, symbol, suffix);
+ return Ok(Token::lit(kind, symbol, suffix));
}
match c.expect("next_token_inner called at EOF") {
// lifetimes shouldn't end with a single quote
// if we find one, then this is an invalid character literal
if self.ch_is('\'') {
- let id = self.name_from(start);
+ let symbol = self.name_from(start);
self.bump();
self.validate_char_escape(start_with_quote);
- return Ok(token::Literal(token::Char(id), None))
+ return Ok(Token::lit(token::Char, symbol, None));
}
// Include the leading `'` in the real identifier, for macro
return Ok(token::Lifetime(ident));
}
let msg = "unterminated character literal";
- let id = self.scan_single_quoted_string(start_with_quote, msg);
+ let symbol = self.scan_single_quoted_string(start_with_quote, msg);
self.validate_char_escape(start_with_quote);
let suffix = self.scan_optional_raw_name();
- Ok(token::Literal(token::Char(id), suffix))
+ Ok(Token::lit(token::Char, symbol, suffix))
}
'b' => {
self.bump();
- let lit = match self.ch {
+ let (kind, symbol) = match self.ch {
Some('\'') => {
let start_with_quote = self.pos;
self.bump();
let msg = "unterminated byte constant";
- let id = self.scan_single_quoted_string(start_with_quote, msg);
+ let symbol = self.scan_single_quoted_string(start_with_quote, msg);
self.validate_byte_escape(start_with_quote);
- token::Byte(id)
+ (token::Byte, symbol)
},
Some('"') => {
let start_with_quote = self.pos;
let msg = "unterminated double quote byte string";
- let id = self.scan_double_quoted_string(msg);
+ let symbol = self.scan_double_quoted_string(msg);
self.validate_byte_str_escape(start_with_quote);
- token::ByteStr(id)
+ (token::ByteStr, symbol)
},
Some('r') => self.scan_raw_byte_string(),
_ => unreachable!(), // Should have been a token::Ident above.
};
let suffix = self.scan_optional_raw_name();
- Ok(token::Literal(lit, suffix))
+ Ok(Token::lit(kind, symbol, suffix))
}
'"' => {
let start_with_quote = self.pos;
let msg = "unterminated double quote string";
- let id = self.scan_double_quoted_string(msg);
+ let symbol = self.scan_double_quoted_string(msg);
self.validate_str_escape(start_with_quote);
let suffix = self.scan_optional_raw_name();
- Ok(token::Literal(token::Str_(id), suffix))
+ Ok(Token::lit(token::Str, symbol, suffix))
}
'r' => {
let start_bpos = self.pos;
}
self.bump();
- let id = if valid {
+ let symbol = if valid {
self.name_from_to(content_start_bpos, content_end_bpos)
} else {
Symbol::intern("??")
};
let suffix = self.scan_optional_raw_name();
- Ok(token::Literal(token::StrRaw(id, hash_count), suffix))
+ Ok(Token::lit(token::StrRaw(hash_count), symbol, suffix))
}
'-' => {
if self.nextch_is('>') {
id
}
- fn scan_raw_byte_string(&mut self) -> token::Lit {
+ fn scan_raw_byte_string(&mut self) -> (token::LitKind, Symbol) {
let start_bpos = self.pos;
self.bump();
let mut hash_count = 0;
self.bump();
- token::ByteStrRaw(self.name_from_to(content_start_bpos, content_end_bpos), hash_count)
+ (token::ByteStrRaw(hash_count), self.name_from_to(content_start_bpos, content_end_bpos))
}
fn validate_char_escape(&self, start_with_quote: BytePos) {
// check that the given reader produces the desired stream
// of tokens (stop checking after exhausting the expected vec)
- fn check_tokenization(mut string_reader: StringReader<'_>, expected: Vec<token::Token>) {
+ fn check_tokenization(mut string_reader: StringReader<'_>, expected: Vec<Token>) {
for expected_tok in &expected {
assert_eq!(&string_reader.next_token().tok, expected_tok);
}
}
// make the identifier by looking up the string in the interner
- fn mk_ident(id: &str) -> token::Token {
- token::Token::from_ast_ident(Ident::from_str(id))
+ fn mk_ident(id: &str) -> Token {
+ Token::from_ast_ident(Ident::from_str(id))
+ }
+
+ fn mk_lit(kind: token::LitKind, symbol: &str, suffix: Option<&str>) -> Token {
+ Token::lit(kind, Symbol::intern(symbol), suffix.map(Symbol::intern))
}
#[test]
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
assert_eq!(setup(&sm, &sh, "'a'".to_string()).next_token().tok,
- token::Literal(token::Char(Symbol::intern("a")), None));
+ mk_lit(token::Char, "a", None));
})
}
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
assert_eq!(setup(&sm, &sh, "' '".to_string()).next_token().tok,
- token::Literal(token::Char(Symbol::intern(" ")), None));
+ mk_lit(token::Char, " ", None));
})
}
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
assert_eq!(setup(&sm, &sh, "'\\n'".to_string()).next_token().tok,
- token::Literal(token::Char(Symbol::intern("\\n")), None));
+ mk_lit(token::Char, "\\n", None));
})
}
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
assert_eq!(setup(&sm, &sh, "'abc".to_string()).next_token().tok,
- token::Lifetime(Ident::from_str("'abc")));
+ token::Lifetime(Ident::from_str("'abc")));
})
}
with_default_globals(|| {
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
- assert_eq!(setup(&sm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string())
- .next_token()
- .tok,
- token::Literal(token::StrRaw(Symbol::intern("\"#a\\b\x00c\""), 3), None));
+ assert_eq!(setup(&sm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string()).next_token().tok,
+ mk_lit(token::StrRaw(3), "\"#a\\b\x00c\"", None));
})
}
macro_rules! test {
($input: expr, $tok_type: ident, $tok_contents: expr) => {{
assert_eq!(setup(&sm, &sh, format!("{}suffix", $input)).next_token().tok,
- token::Literal(token::$tok_type(Symbol::intern($tok_contents)),
- Some(Symbol::intern("suffix"))));
+ mk_lit(token::$tok_type, $tok_contents, Some("suffix")));
// with a whitespace separator:
assert_eq!(setup(&sm, &sh, format!("{} suffix", $input)).next_token().tok,
- token::Literal(token::$tok_type(Symbol::intern($tok_contents)),
- None));
+ mk_lit(token::$tok_type, $tok_contents, None));
}}
}
test!("'a'", Char, "a");
test!("b'a'", Byte, "a");
- test!("\"a\"", Str_, "a");
+ test!("\"a\"", Str, "a");
test!("b\"a\"", ByteStr, "a");
test!("1234", Integer, "1234");
test!("0b101", Integer, "0b101");
test!("1.0e10", Float, "1.0e10");
assert_eq!(setup(&sm, &sh, "2us".to_string()).next_token().tok,
- token::Literal(token::Integer(Symbol::intern("2")),
- Some(Symbol::intern("us"))));
+ mk_lit(token::Integer, "2", Some("us")));
assert_eq!(setup(&sm, &sh, "r###\"raw\"###suffix".to_string()).next_token().tok,
- token::Literal(token::StrRaw(Symbol::intern("raw"), 3),
- Some(Symbol::intern("suffix"))));
+ mk_lit(token::StrRaw(3), "raw", Some("suffix")));
assert_eq!(setup(&sm, &sh, "br###\"raw\"###suffix".to_string()).next_token().tok,
- token::Literal(token::ByteStrRaw(Symbol::intern("raw"), 3),
- Some(Symbol::intern("suffix"))));
+ mk_lit(token::ByteStrRaw(3), "raw", Some("suffix")));
})
}
token::Comment => {}
_ => panic!("expected a comment!"),
}
- assert_eq!(lexer.next_token().tok,
- token::Literal(token::Char(Symbol::intern("a")), None));
+ assert_eq!(lexer.next_token().tok, mk_lit(token::Char, "a", None));
})
}
use crate::parse::token::{self, Token};
use crate::parse::unescape::{unescape_str, unescape_char, unescape_byte_str, unescape_byte};
use crate::print::pprust;
-use crate::symbol::{kw, Symbol};
+use crate::symbol::{kw, sym, Symbol};
use crate::tokenstream::{TokenStream, TokenTree};
use errors::{Applicability, Handler};
use std::ascii;
-macro_rules! err {
- ($opt_diag:expr, |$span:ident, $diag:ident| $($body:tt)*) => {
- match $opt_diag {
- Some(($span, $diag)) => { $($body)* }
- None => return None,
+crate enum LitError {
+ NotLiteral,
+ LexerError,
+ InvalidSuffix,
+ InvalidIntSuffix,
+ InvalidFloatSuffix,
+ NonDecimalFloat(u32),
+ IntTooLarge,
+}
+
+impl LitError {
+ fn report(&self, diag: &Handler, lit: token::Lit, span: Span) {
+ let token::Lit { kind, suffix, .. } = lit;
+ match *self {
+ // `NotLiteral` is not an error by itself, so we don't report
+ // it and give the parser opportunity to try something else.
+ LitError::NotLiteral => {}
+ // `LexerError` *is* an error, but it was already reported
+ // by lexer, so here we don't report it the second time.
+ LitError::LexerError => {}
+ LitError::InvalidSuffix => {
+ expect_no_suffix(
+ diag, span, &format!("{} {} literal", kind.article(), kind.descr()), suffix
+ );
+ }
+ LitError::InvalidIntSuffix => {
+ let suf = suffix.expect("suffix error with no suffix").as_str();
+ if looks_like_width_suffix(&['i', 'u'], &suf) {
+ // If it looks like a width, try to be helpful.
+ let msg = format!("invalid width `{}` for integer literal", &suf[1..]);
+ diag.struct_span_err(span, &msg)
+ .help("valid widths are 8, 16, 32, 64 and 128")
+ .emit();
+ } else {
+ let msg = format!("invalid suffix `{}` for integer literal", suf);
+ diag.struct_span_err(span, &msg)
+ .span_label(span, format!("invalid suffix `{}`", suf))
+ .help("the suffix must be one of the integral types (`u32`, `isize`, etc)")
+ .emit();
+ }
+ }
+ LitError::InvalidFloatSuffix => {
+ let suf = suffix.expect("suffix error with no suffix").as_str();
+ if looks_like_width_suffix(&['f'], &suf) {
+ // If it looks like a width, try to be helpful.
+ let msg = format!("invalid width `{}` for float literal", &suf[1..]);
+ diag.struct_span_err(span, &msg)
+ .help("valid widths are 32 and 64")
+ .emit();
+ } else {
+ let msg = format!("invalid suffix `{}` for float literal", suf);
+ diag.struct_span_err(span, &msg)
+ .span_label(span, format!("invalid suffix `{}`", suf))
+ .help("valid suffixes are `f32` and `f64`")
+ .emit();
+ }
+ }
+ LitError::NonDecimalFloat(base) => {
+ let descr = match base {
+ 16 => "hexadecimal",
+ 8 => "octal",
+ 2 => "binary",
+ _ => unreachable!(),
+ };
+ diag.struct_span_err(span, &format!("{} float literal is not supported", descr))
+ .span_label(span, "not supported")
+ .emit();
+ }
+ LitError::IntTooLarge => {
+ diag.struct_span_err(span, "integer literal is too large")
+ .emit();
+ }
}
}
}
impl LitKind {
- /// Converts literal token with a suffix into a semantic literal.
- /// Works speculatively and may return `None` if diagnostic handler is not passed.
- /// If diagnostic handler is passed, always returns `Some`,
- /// possibly after reporting non-fatal errors and recovery.
- fn from_lit_token(
- lit: token::Lit,
- suf: Option<Symbol>,
- diag: Option<(Span, &Handler)>
- ) -> Option<LitKind> {
- if suf.is_some() && !lit.may_have_suffix() {
- err!(diag, |span, diag| {
- expect_no_suffix(span, diag, &format!("a {}", lit.literal_name()), suf)
- });
+ /// Converts literal token into a semantic literal.
+ fn from_lit_token(lit: token::Lit) -> Result<LitKind, LitError> {
+ let token::Lit { kind, symbol, suffix } = lit;
+ if suffix.is_some() && !kind.may_have_suffix() {
+ return Err(LitError::InvalidSuffix);
}
- Some(match lit {
- token::Bool(i) => {
- assert!(i == kw::True || i == kw::False);
- LitKind::Bool(i == kw::True)
+ Ok(match kind {
+ token::Bool => {
+ assert!(symbol == kw::True || symbol == kw::False);
+ LitKind::Bool(symbol == kw::True)
}
- token::Byte(i) => {
- match unescape_byte(&i.as_str()) {
- Ok(c) => LitKind::Byte(c),
- Err(_) => LitKind::Err(i),
- }
- },
- token::Char(i) => {
- match unescape_char(&i.as_str()) {
- Ok(c) => LitKind::Char(c),
- Err(_) => LitKind::Err(i),
- }
- },
- token::Err(i) => LitKind::Err(i),
+ token::Byte => return unescape_byte(&symbol.as_str())
+ .map(LitKind::Byte).map_err(|_| LitError::LexerError),
+ token::Char => return unescape_char(&symbol.as_str())
+ .map(LitKind::Char).map_err(|_| LitError::LexerError),
// There are some valid suffixes for integer and float literals,
// so all the handling is done internally.
- token::Integer(s) => return integer_lit(&s.as_str(), suf, diag),
- token::Float(s) => return float_lit(&s.as_str(), suf, diag),
+ token::Integer => return integer_lit(symbol, suffix),
+ token::Float => return float_lit(symbol, suffix),
- token::Str_(mut sym) => {
+ token::Str => {
// If there are no characters requiring special treatment we can
- // reuse the symbol from the Token. Otherwise, we must generate a
+ // reuse the symbol from the token. Otherwise, we must generate a
// new symbol because the string in the LitKind is different to the
- // string in the Token.
- let mut has_error = false;
- let s = &sym.as_str();
- if s.as_bytes().iter().any(|&c| c == b'\\' || c == b'\r') {
+ // string in the token.
+ let s = symbol.as_str();
+ let symbol = if s.contains(&['\\', '\r'][..]) {
let mut buf = String::with_capacity(s.len());
- unescape_str(s, &mut |_, unescaped_char| {
+ let mut error = Ok(());
+ unescape_str(&s, &mut |_, unescaped_char| {
match unescaped_char {
Ok(c) => buf.push(c),
- Err(_) => has_error = true,
+ Err(_) => error = Err(LitError::LexerError),
}
});
- if has_error {
- return Some(LitKind::Err(sym));
- }
- sym = Symbol::intern(&buf)
- }
-
- LitKind::Str(sym, ast::StrStyle::Cooked)
+ error?;
+ Symbol::intern(&buf)
+ } else {
+ symbol
+ };
+ LitKind::Str(symbol, ast::StrStyle::Cooked)
}
- token::StrRaw(mut sym, n) => {
+ token::StrRaw(n) => {
// Ditto.
- let s = &sym.as_str();
- if s.contains('\r') {
- sym = Symbol::intern(&raw_str_lit(s));
- }
- LitKind::Str(sym, ast::StrStyle::Raw(n))
+ let s = symbol.as_str();
+ let symbol = if s.contains('\r') {
+ Symbol::intern(&raw_str_lit(&s))
+ } else {
+ symbol
+ };
+ LitKind::Str(symbol, ast::StrStyle::Raw(n))
}
- token::ByteStr(i) => {
- let s = &i.as_str();
+ token::ByteStr => {
+ let s = symbol.as_str();
let mut buf = Vec::with_capacity(s.len());
- let mut has_error = false;
- unescape_byte_str(s, &mut |_, unescaped_byte| {
+ let mut error = Ok(());
+ unescape_byte_str(&s, &mut |_, unescaped_byte| {
match unescaped_byte {
Ok(c) => buf.push(c),
- Err(_) => has_error = true,
+ Err(_) => error = Err(LitError::LexerError),
}
});
- if has_error {
- return Some(LitKind::Err(i));
- }
+ error?;
buf.shrink_to_fit();
LitKind::ByteStr(Lrc::new(buf))
}
- token::ByteStrRaw(i, _) => {
- LitKind::ByteStr(Lrc::new(i.to_string().into_bytes()))
- }
+ token::ByteStrRaw(_) => LitKind::ByteStr(Lrc::new(symbol.to_string().into_bytes())),
+ token::Err => LitKind::Err(symbol),
})
}
/// Attempts to recover a token from semantic literal.
/// This function is used when the original token doesn't exist (e.g. the literal is created
/// by an AST-based macro) or unavailable (e.g. from HIR pretty-printing).
- pub fn to_lit_token(&self) -> (token::Lit, Option<Symbol>) {
- match *self {
+ pub fn to_lit_token(&self) -> token::Lit {
+ let (kind, symbol, suffix) = match *self {
LitKind::Str(string, ast::StrStyle::Cooked) => {
let escaped = string.as_str().escape_default().to_string();
- (token::Lit::Str_(Symbol::intern(&escaped)), None)
+ (token::Str, Symbol::intern(&escaped), None)
}
LitKind::Str(string, ast::StrStyle::Raw(n)) => {
- (token::Lit::StrRaw(string, n), None)
+ (token::StrRaw(n), string, None)
}
LitKind::ByteStr(ref bytes) => {
let string = bytes.iter().cloned().flat_map(ascii::escape_default)
.map(Into::<char>::into).collect::<String>();
- (token::Lit::ByteStr(Symbol::intern(&string)), None)
+ (token::ByteStr, Symbol::intern(&string), None)
}
LitKind::Byte(byte) => {
let string: String = ascii::escape_default(byte).map(Into::<char>::into).collect();
- (token::Lit::Byte(Symbol::intern(&string)), None)
+ (token::Byte, Symbol::intern(&string), None)
}
LitKind::Char(ch) => {
let string: String = ch.escape_default().map(Into::<char>::into).collect();
- (token::Lit::Char(Symbol::intern(&string)), None)
+ (token::Char, Symbol::intern(&string), None)
}
LitKind::Int(n, ty) => {
let suffix = match ty {
ast::LitIntType::Signed(ty) => Some(Symbol::intern(ty.ty_to_string())),
ast::LitIntType::Unsuffixed => None,
};
- (token::Lit::Integer(Symbol::intern(&n.to_string())), suffix)
+ (token::Integer, Symbol::intern(&n.to_string()), suffix)
}
LitKind::Float(symbol, ty) => {
- (token::Lit::Float(symbol), Some(Symbol::intern(ty.ty_to_string())))
+ (token::Float, symbol, Some(Symbol::intern(ty.ty_to_string())))
+ }
+ LitKind::FloatUnsuffixed(symbol) => {
+ (token::Float, symbol, None)
}
- LitKind::FloatUnsuffixed(symbol) => (token::Lit::Float(symbol), None),
LitKind::Bool(value) => {
- let kw = if value { kw::True } else { kw::False };
- (token::Lit::Bool(kw), None)
+ let symbol = if value { kw::True } else { kw::False };
+ (token::Bool, symbol, None)
}
- LitKind::Err(val) => (token::Lit::Err(val), None),
- }
+ LitKind::Err(symbol) => {
+ (token::Err, symbol, None)
+ }
+ };
+
+ token::Lit::new(kind, symbol, suffix)
}
}
impl Lit {
- /// Converts literal token with a suffix into an AST literal.
- /// Works speculatively and may return `None` if diagnostic handler is not passed.
- /// If diagnostic handler is passed, may return `Some`,
- /// possibly after reporting non-fatal errors and recovery, or `None` for irrecoverable errors.
- crate fn from_token(
- token: &token::Token,
- span: Span,
- diag: Option<(Span, &Handler)>,
- ) -> Option<Lit> {
- let (token, suffix) = match *token {
+ /// Converts literal token into an AST literal.
+ fn from_lit_token(token: token::Lit, span: Span) -> Result<Lit, LitError> {
+ Ok(Lit { token, node: LitKind::from_lit_token(token)?, span })
+ }
+
+ /// Converts arbitrary token into an AST literal.
+ crate fn from_token(token: &Token, span: Span) -> Result<Lit, LitError> {
+ let lit = match *token {
token::Ident(ident, false) if ident.name == kw::True || ident.name == kw::False =>
- (token::Bool(ident.name), None),
- token::Literal(token, suffix) =>
- (token, suffix),
+ token::Lit::new(token::Bool, ident.name, None),
+ token::Literal(lit) =>
+ lit,
token::Interpolated(ref nt) => {
if let token::NtExpr(expr) | token::NtLiteral(expr) = &**nt {
if let ast::ExprKind::Lit(lit) = &expr.node {
- return Some(lit.clone());
+ return Ok(lit.clone());
}
}
- return None;
+ return Err(LitError::NotLiteral);
}
- _ => return None,
+ _ => return Err(LitError::NotLiteral)
};
- let node = LitKind::from_lit_token(token, suffix, diag)?;
- Some(Lit { node, token, suffix, span })
+ Lit::from_lit_token(lit, span)
}
/// Attempts to recover an AST literal from semantic literal.
/// This function is used when the original token doesn't exist (e.g. the literal is created
/// by an AST-based macro) or unavailable (e.g. from HIR pretty-printing).
pub fn from_lit_kind(node: LitKind, span: Span) -> Lit {
- let (token, suffix) = node.to_lit_token();
- Lit { node, token, suffix, span }
+ Lit { token: node.to_lit_token(), node, span }
}
/// Losslessly convert an AST literal into a token stream.
crate fn tokens(&self) -> TokenStream {
- let token = match self.token {
- token::Bool(symbol) => Token::Ident(Ident::with_empty_ctxt(symbol), false),
- token => Token::Literal(token, self.suffix),
+ let token = match self.token.kind {
+ token::Bool => token::Ident(Ident::new(self.token.symbol, self.span), false),
+ _ => token::Literal(self.token),
};
TokenTree::Token(self.span, token).into()
}
impl<'a> Parser<'a> {
/// Matches `lit = true | false | token_lit`.
crate fn parse_lit(&mut self) -> PResult<'a, Lit> {
- let diag = Some((self.span, &self.sess.span_diagnostic));
- if let Some(lit) = Lit::from_token(&self.token, self.span, diag) {
- self.bump();
- return Ok(lit);
- } else if self.token == token::Dot {
- // Recover `.4` as `0.4`.
- let recovered = self.look_ahead(1, |t| {
- if let token::Literal(token::Integer(val), suf) = *t {
+ let mut recovered = None;
+ if self.token == token::Dot {
+ // Attempt to recover `.4` as `0.4`.
+ recovered = self.look_ahead(1, |t| {
+ if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = *t {
let next_span = self.look_ahead_span(1);
if self.span.hi() == next_span.lo() {
- let sym = String::from("0.") + &val.as_str();
- let token = token::Literal(token::Float(Symbol::intern(&sym)), suf);
+ let s = String::from("0.") + &symbol.as_str();
+ let token = Token::lit(token::Float, Symbol::intern(&s), suffix);
return Some((token, self.span.to(next_span)));
}
}
None
});
- if let Some((token, span)) = recovered {
+ if let Some((ref token, span)) = recovered {
+ self.bump();
self.diagnostic()
.struct_span_err(span, "float literals must have an integer part")
.span_suggestion(
Applicability::MachineApplicable,
)
.emit();
- let diag = Some((span, &self.sess.span_diagnostic));
- if let Some(lit) = Lit::from_token(&token, span, diag) {
- self.bump();
- self.bump();
- return Ok(lit);
- }
}
}
- Err(self.span_fatal(self.span, &format!("unexpected token: {}", self.this_token_descr())))
- }
-}
+ let (token, span) = recovered.as_ref().map_or((&self.token, self.span),
+ |(token, span)| (token, *span));
-crate fn expect_no_suffix(sp: Span, diag: &Handler, kind: &str, suffix: Option<ast::Name>) {
- match suffix {
- None => {/* everything ok */}
- Some(suf) => {
- let text = suf.as_str();
- if text.is_empty() {
- diag.span_bug(sp, "found empty literal suffix in Some")
+ match Lit::from_token(token, span) {
+ Ok(lit) => {
+ self.bump();
+ Ok(lit)
+ }
+ Err(LitError::NotLiteral) => {
+ let msg = format!("unexpected token: {}", self.this_token_descr());
+ Err(self.span_fatal(span, &msg))
+ }
+ Err(err) => {
+ let lit = token.expect_lit();
+ self.bump();
+ err.report(&self.sess.span_diagnostic, lit, span);
+ let lit = token::Lit::new(token::Err, lit.symbol, lit.suffix);
+ Lit::from_lit_token(lit, span).map_err(|_| unreachable!())
}
- let mut err = if kind == "a tuple index" &&
- ["i32", "u32", "isize", "usize"].contains(&text.to_string().as_str())
- {
- // #59553: warn instead of reject out of hand to allow the fix to percolate
- // through the ecosystem when people fix their macros
- let mut err = diag.struct_span_warn(
- sp,
- &format!("suffixes on {} are invalid", kind),
- );
- err.note(&format!(
- "`{}` is *temporarily* accepted on tuple index fields as it was \
- incorrectly accepted on stable for a few releases",
- text,
- ));
- err.help(
- "on proc macros, you'll want to use `syn::Index::from` or \
- `proc_macro::Literal::*_unsuffixed` for code that will desugar \
- to tuple field access",
- );
- err.note(
- "for more context, see https://github.com/rust-lang/rust/issues/60210",
- );
- err
- } else {
- diag.struct_span_err(sp, &format!("suffixes on {} are invalid", kind))
- };
- err.span_label(sp, format!("invalid suffix `{}`", text));
- err.emit();
}
}
}
+crate fn expect_no_suffix(diag: &Handler, sp: Span, kind: &str, suffix: Option<Symbol>) {
+ if let Some(suf) = suffix {
+ let mut err = if kind == "a tuple index" &&
+ [sym::i32, sym::u32, sym::isize, sym::usize].contains(&suf) {
+ // #59553: warn instead of reject out of hand to allow the fix to percolate
+ // through the ecosystem when people fix their macros
+ let mut err = diag.struct_span_warn(
+ sp,
+ &format!("suffixes on {} are invalid", kind),
+ );
+ err.note(&format!(
+ "`{}` is *temporarily* accepted on tuple index fields as it was \
+ incorrectly accepted on stable for a few releases",
+ suf,
+ ));
+ err.help(
+ "on proc macros, you'll want to use `syn::Index::from` or \
+ `proc_macro::Literal::*_unsuffixed` for code that will desugar \
+ to tuple field access",
+ );
+ err.note(
+ "for more context, see https://github.com/rust-lang/rust/issues/60210",
+ );
+ err
+ } else {
+ diag.struct_span_err(sp, &format!("suffixes on {} are invalid", kind))
+ };
+ err.span_label(sp, format!("invalid suffix `{}`", suf));
+ err.emit();
+ }
+}
+
/// Parses a string representing a raw string literal into its final form. The
/// only operation this does is convert embedded CRLF into a single LF.
fn raw_str_lit(lit: &str) -> String {
- debug!("raw_str_lit: given {}", lit.escape_default());
+ debug!("raw_str_lit: {:?}", lit);
let mut res = String::with_capacity(lit.len());
let mut chars = lit.chars().peekable();
res
}
-// check if `s` looks like i32 or u1234 etc.
+// Checks if `s` looks like i32 or u1234 etc.
fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool {
- s.starts_with(first_chars) && s[1..].chars().all(|c| c.is_ascii_digit())
+ s.len() > 1 && s.starts_with(first_chars) && s[1..].chars().all(|c| c.is_ascii_digit())
}
-fn filtered_float_lit(data: Symbol, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>)
- -> Option<LitKind> {
- debug!("filtered_float_lit: {}, {:?}", data, suffix);
- let suffix = match suffix {
- Some(suffix) => suffix,
- None => return Some(LitKind::FloatUnsuffixed(data)),
- };
-
- Some(match &*suffix.as_str() {
- "f32" => LitKind::Float(data, ast::FloatTy::F32),
- "f64" => LitKind::Float(data, ast::FloatTy::F64),
- suf => {
- err!(diag, |span, diag| {
- if suf.len() >= 2 && looks_like_width_suffix(&['f'], suf) {
- // if it looks like a width, lets try to be helpful.
- let msg = format!("invalid width `{}` for float literal", &suf[1..]);
- diag.struct_span_err(span, &msg).help("valid widths are 32 and 64").emit()
- } else {
- let msg = format!("invalid suffix `{}` for float literal", suf);
- diag.struct_span_err(span, &msg)
- .span_label(span, format!("invalid suffix `{}`", suf))
- .help("valid suffixes are `f32` and `f64`")
- .emit();
- }
- });
+fn strip_underscores(symbol: Symbol) -> Symbol {
+ // Do not allocate a new string unless necessary.
+ let s = symbol.as_str();
+ if s.contains('_') {
+ let mut s = s.to_string();
+ s.retain(|c| c != '_');
+ return Symbol::intern(&s);
+ }
+ symbol
+}
- LitKind::FloatUnsuffixed(data)
+fn filtered_float_lit(symbol: Symbol, suffix: Option<Symbol>, base: u32)
+ -> Result<LitKind, LitError> {
+ debug!("filtered_float_lit: {:?}, {:?}, {:?}", symbol, suffix, base);
+ if base != 10 {
+ return Err(LitError::NonDecimalFloat(base));
+ }
+ Ok(match suffix {
+ Some(suf) => match suf {
+ sym::f32 => LitKind::Float(symbol, ast::FloatTy::F32),
+ sym::f64 => LitKind::Float(symbol, ast::FloatTy::F64),
+ _ => return Err(LitError::InvalidFloatSuffix),
}
+ None => LitKind::FloatUnsuffixed(symbol)
})
}
-fn float_lit(s: &str, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>)
- -> Option<LitKind> {
- debug!("float_lit: {:?}, {:?}", s, suffix);
- // FIXME #2252: bounds checking float literals is deferred until trans
-
- // Strip underscores without allocating a new String unless necessary.
- let s2;
- let s = if s.chars().any(|c| c == '_') {
- s2 = s.chars().filter(|&c| c != '_').collect::<String>();
- &s2
- } else {
- s
- };
- filtered_float_lit(Symbol::intern(s), suffix, diag)
+fn float_lit(symbol: Symbol, suffix: Option<Symbol>) -> Result<LitKind, LitError> {
+ debug!("float_lit: {:?}, {:?}", symbol, suffix);
+ filtered_float_lit(strip_underscores(symbol), suffix, 10)
}
-fn integer_lit(s: &str, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>)
- -> Option<LitKind> {
- // s can only be ascii, byte indexing is fine
-
- // Strip underscores without allocating a new String unless necessary.
- let s2;
- let mut s = if s.chars().any(|c| c == '_') {
- s2 = s.chars().filter(|&c| c != '_').collect::<String>();
- &s2
- } else {
- s
- };
-
- debug!("integer_lit: {}, {:?}", s, suffix);
+fn integer_lit(symbol: Symbol, suffix: Option<Symbol>) -> Result<LitKind, LitError> {
+ debug!("integer_lit: {:?}, {:?}", symbol, suffix);
+ let symbol = strip_underscores(symbol);
+ let s = symbol.as_str();
let mut base = 10;
- let orig = s;
- let mut ty = ast::LitIntType::Unsuffixed;
-
- if s.starts_with('0') && s.len() > 1 {
+ if s.len() > 1 && s.as_bytes()[0] == b'0' {
match s.as_bytes()[1] {
b'x' => base = 16,
b'o' => base = 8,
b'b' => base = 2,
- _ => { }
+ _ => {}
}
}
- // 1f64 and 2f32 etc. are valid float literals.
- if let Some(suf) = suffix {
- if looks_like_width_suffix(&['f'], &suf.as_str()) {
- let err = match base {
- 16 => Some("hexadecimal float literal is not supported"),
- 8 => Some("octal float literal is not supported"),
- 2 => Some("binary float literal is not supported"),
- _ => None,
- };
- if let Some(err) = err {
- err!(diag, |span, diag| {
- diag.struct_span_err(span, err)
- .span_label(span, "not supported")
- .emit();
- });
- }
- return filtered_float_lit(Symbol::intern(s), Some(suf), diag)
- }
- }
-
- if base != 10 {
- s = &s[2..];
- }
-
- if let Some(suf) = suffix {
- if suf.as_str().is_empty() {
- err!(diag, |span, diag| diag.span_bug(span, "found empty literal suffix in Some"));
- }
- ty = match &*suf.as_str() {
- "isize" => ast::LitIntType::Signed(ast::IntTy::Isize),
- "i8" => ast::LitIntType::Signed(ast::IntTy::I8),
- "i16" => ast::LitIntType::Signed(ast::IntTy::I16),
- "i32" => ast::LitIntType::Signed(ast::IntTy::I32),
- "i64" => ast::LitIntType::Signed(ast::IntTy::I64),
- "i128" => ast::LitIntType::Signed(ast::IntTy::I128),
- "usize" => ast::LitIntType::Unsigned(ast::UintTy::Usize),
- "u8" => ast::LitIntType::Unsigned(ast::UintTy::U8),
- "u16" => ast::LitIntType::Unsigned(ast::UintTy::U16),
- "u32" => ast::LitIntType::Unsigned(ast::UintTy::U32),
- "u64" => ast::LitIntType::Unsigned(ast::UintTy::U64),
- "u128" => ast::LitIntType::Unsigned(ast::UintTy::U128),
- suf => {
- // i<digits> and u<digits> look like widths, so lets
- // give an error message along those lines
- err!(diag, |span, diag| {
- if looks_like_width_suffix(&['i', 'u'], suf) {
- let msg = format!("invalid width `{}` for integer literal", &suf[1..]);
- diag.struct_span_err(span, &msg)
- .help("valid widths are 8, 16, 32, 64 and 128")
- .emit();
- } else {
- let msg = format!("invalid suffix `{}` for numeric literal", suf);
- diag.struct_span_err(span, &msg)
- .span_label(span, format!("invalid suffix `{}`", suf))
- .help("the suffix must be one of the integral types \
- (`u32`, `isize`, etc)")
- .emit();
- }
- });
-
- ty
- }
+ let ty = match suffix {
+ Some(suf) => match suf {
+ sym::isize => ast::LitIntType::Signed(ast::IntTy::Isize),
+ sym::i8 => ast::LitIntType::Signed(ast::IntTy::I8),
+ sym::i16 => ast::LitIntType::Signed(ast::IntTy::I16),
+ sym::i32 => ast::LitIntType::Signed(ast::IntTy::I32),
+ sym::i64 => ast::LitIntType::Signed(ast::IntTy::I64),
+ sym::i128 => ast::LitIntType::Signed(ast::IntTy::I128),
+ sym::usize => ast::LitIntType::Unsigned(ast::UintTy::Usize),
+ sym::u8 => ast::LitIntType::Unsigned(ast::UintTy::U8),
+ sym::u16 => ast::LitIntType::Unsigned(ast::UintTy::U16),
+ sym::u32 => ast::LitIntType::Unsigned(ast::UintTy::U32),
+ sym::u64 => ast::LitIntType::Unsigned(ast::UintTy::U64),
+ sym::u128 => ast::LitIntType::Unsigned(ast::UintTy::U128),
+ // `1f64` and `2f32` etc. are valid float literals, and
+ // `fxxx` looks more like an invalid float literal than invalid integer literal.
+ _ if suf.as_str().starts_with('f') => return filtered_float_lit(symbol, suffix, base),
+ _ => return Err(LitError::InvalidIntSuffix),
}
- }
-
- debug!("integer_lit: the type is {:?}, base {:?}, the new string is {:?}, the original \
- string was {:?}, the original suffix was {:?}", ty, base, s, orig, suffix);
-
- Some(match u128::from_str_radix(s, base) {
- Ok(r) => LitKind::Int(r, ty),
- Err(_) => {
- // small bases are lexed as if they were base 10, e.g, the string
- // might be `0b10201`. This will cause the conversion above to fail,
- // but these cases have errors in the lexer: we don't want to emit
- // two errors, and we especially don't want to emit this error since
- // it isn't necessarily true.
- let already_errored = base < 10 &&
- s.chars().any(|c| c.to_digit(10).map_or(false, |d| d >= base));
+ _ => ast::LitIntType::Unsuffixed
+ };
- if !already_errored {
- err!(diag, |span, diag| diag.span_err(span, "int literal is too large"));
- }
- LitKind::Int(0, ty)
- }
+ let s = &s[if base != 10 { 2 } else { 0 } ..];
+ u128::from_str_radix(s, base).map(|i| LitKind::Int(i, ty)).map_err(|_| {
+ // Small bases are lexed as if they were base 10, e.g, the string
+ // might be `0b10201`. This will cause the conversion above to fail,
+ // but these kinds of errors are already reported by the lexer.
+ let from_lexer =
+ base < 10 && s.chars().any(|c| c.to_digit(10).map_or(false, |d| d >= base));
+ if from_lexer { LitError::LexerError } else { LitError::IntTooLarge }
})
}
let body = TokenTree::Delimited(
delim_span,
token::Bracket,
- [TokenTree::Token(sp, token::Ident(ast::Ident::with_empty_ctxt(sym::doc), false)),
- TokenTree::Token(sp, token::Eq),
- TokenTree::Token(sp, token::Literal(
- token::StrRaw(Symbol::intern(&stripped), num_of_hashes), None))
+ [
+ TokenTree::Token(sp, token::Ident(ast::Ident::with_empty_ctxt(sym::doc), false)),
+ TokenTree::Token(sp, token::Eq),
+ TokenTree::Token(sp, token::Token::lit(
+ token::StrRaw(num_of_hashes), Symbol::intern(&stripped), None
+ )),
]
.iter().cloned().collect::<TokenStream>().into(),
);
}
fn expect_no_suffix(&self, sp: Span, kind: &str, suffix: Option<ast::Name>) {
- literal::expect_no_suffix(sp, &self.sess.span_diagnostic, kind, suffix)
+ literal::expect_no_suffix(&self.sess.span_diagnostic, sp, kind, suffix)
}
/// Attempts to consume a `<`. If `<<` is seen, replaces it with a single
}
fn parse_field_name(&mut self) -> PResult<'a, Ident> {
- if let token::Literal(token::Integer(name), suffix) = self.token {
+ if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = self.token {
self.expect_no_suffix(self.span, "a tuple index", suffix);
self.bump();
- Ok(Ident::new(name, self.prev_span))
+ Ok(Ident::new(symbol, self.prev_span))
} else {
self.parse_ident_common(false)
}
token::Ident(..) => {
e = self.parse_dot_suffix(e, lo)?;
}
- token::Literal(token::Integer(name), suffix) => {
+ token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) => {
let span = self.span;
self.bump();
- let field = ExprKind::Field(e, Ident::new(name, span));
+ let field = ExprKind::Field(e, Ident::new(symbol, span));
e = self.mk_expr(lo.to(span), field, ThinVec::new());
self.expect_no_suffix(span, "a tuple index", suffix);
}
- token::Literal(token::Float(n), _suf) => {
+ token::Literal(token::Lit { kind: token::Float, symbol, .. }) => {
self.bump();
- let fstr = n.as_str();
- let mut err = self.diagnostic()
- .struct_span_err(self.prev_span, &format!("unexpected token: `{}`", n));
+ let fstr = symbol.as_str();
+ let msg = format!("unexpected token: `{}`", symbol);
+ let mut err = self.diagnostic().struct_span_err(self.prev_span, &msg);
err.span_label(self.prev_span, "unexpected token");
if fstr.chars().all(|x| "0123456789.".contains(x)) {
let float = match fstr.parse::<f64>().ok() {
/// the `extern` keyword, if one is found.
fn parse_opt_abi(&mut self) -> PResult<'a, Option<Abi>> {
match self.token {
- token::Literal(token::Str_(s), suf) | token::Literal(token::StrRaw(s, _), suf) => {
+ token::Literal(token::Lit { kind: token::Str, symbol, suffix }) |
+ token::Literal(token::Lit { kind: token::StrRaw(..), symbol, suffix }) => {
let sp = self.span;
- self.expect_no_suffix(sp, "an ABI spec", suf);
+ self.expect_no_suffix(sp, "an ABI spec", suffix);
self.bump();
- match abi::lookup(&s.as_str()) {
+ match abi::lookup(&symbol.as_str()) {
Some(abi) => Ok(Some(abi)),
None => {
let prev_span = self.prev_span;
prev_span,
E0703,
"invalid ABI: found `{}`",
- s);
+ symbol);
err.span_label(prev_span, "invalid ABI");
err.help(&format!("valid ABIs: {}", abi::all_names().join(", ")));
err.emit();
pub fn parse_optional_str(&mut self) -> Option<(Symbol, ast::StrStyle, Option<ast::Name>)> {
let ret = match self.token {
- token::Literal(token::Str_(s), suf) => (s, ast::StrStyle::Cooked, suf),
- token::Literal(token::StrRaw(s, n), suf) => (s, ast::StrStyle::Raw(n), suf),
+ token::Literal(token::Lit { kind: token::Str, symbol, suffix }) =>
+ (symbol, ast::StrStyle::Cooked, suffix),
+ token::Literal(token::Lit { kind: token::StrRaw(n), symbol, suffix }) =>
+ (symbol, ast::StrStyle::Raw(n), suffix),
_ => return None
};
self.bump();
pub use BinOpToken::*;
pub use Nonterminal::*;
pub use DelimToken::*;
-pub use Lit::*;
+pub use LitKind::*;
pub use Token::*;
use crate::ast::{self};
}
}
-#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
-pub enum Lit {
- Bool(ast::Name), // AST only, must never appear in a `Token`
- Byte(ast::Name),
- Char(ast::Name),
- Err(ast::Name),
- Integer(ast::Name),
- Float(ast::Name),
- Str_(ast::Name),
- StrRaw(ast::Name, u16), /* raw str delimited by n hash symbols */
- ByteStr(ast::Name),
- ByteStrRaw(ast::Name, u16), /* raw byte str delimited by n hash symbols */
+#[derive(Clone, Copy, PartialEq, RustcEncodable, RustcDecodable, Debug)]
+pub enum LitKind {
+ Bool, // AST only, must never appear in a `Token`
+ Byte,
+ Char,
+ Integer,
+ Float,
+ Str,
+ StrRaw(u16), // raw string delimited by `n` hash symbols
+ ByteStr,
+ ByteStrRaw(u16), // raw byte string delimited by `n` hash symbols
+ Err,
}
-#[cfg(target_arch = "x86_64")]
-static_assert_size!(Lit, 8);
+/// A literal token.
+#[derive(Clone, Copy, PartialEq, RustcEncodable, RustcDecodable, Debug)]
+pub struct Lit {
+ pub kind: LitKind,
+ pub symbol: Symbol,
+ pub suffix: Option<Symbol>,
+}
-impl Lit {
- crate fn literal_name(&self) -> &'static str {
- match *self {
- Bool(_) => panic!("literal token contains `Lit::Bool`"),
- Byte(_) => "byte literal",
- Char(_) => "char literal",
- Err(_) => "invalid literal",
- Integer(_) => "integer literal",
- Float(_) => "float literal",
- Str_(_) | StrRaw(..) => "string literal",
- ByteStr(_) | ByteStrRaw(..) => "byte string literal"
+impl LitKind {
+ /// An English article for the literal token kind.
+ crate fn article(self) -> &'static str {
+ match self {
+ Integer | Err => "an",
+ _ => "a",
}
}
- crate fn may_have_suffix(&self) -> bool {
- match *self {
- Integer(..) | Float(..) => true,
+ crate fn descr(self) -> &'static str {
+ match self {
+ Bool => panic!("literal token contains `Lit::Bool`"),
+ Byte => "byte",
+ Char => "char",
+ Integer => "integer",
+ Float => "float",
+ Str | StrRaw(..) => "string",
+ ByteStr | ByteStrRaw(..) => "byte string",
+ Err => "error",
+ }
+ }
+
+ crate fn may_have_suffix(self) -> bool {
+ match self {
+ Integer | Float | Err => true,
_ => false,
}
}
+}
- // See comments in `Nonterminal::to_tokenstream` for why we care about
- // *probably* equal here rather than actual equality
- fn probably_equal_for_proc_macro(&self, other: &Lit) -> bool {
- mem::discriminant(self) == mem::discriminant(other)
+impl Lit {
+ pub fn new(kind: LitKind, symbol: Symbol, suffix: Option<Symbol>) -> Lit {
+ Lit { kind, symbol, suffix }
}
}
CloseDelim(DelimToken),
/* Literals */
- Literal(Lit, Option<ast::Name>),
+ Literal(Lit),
/* Name components */
Ident(ast::Ident, /* is_raw */ bool),
self == &Question || self == &OpenDelim(Paren)
}
+ pub fn lit(kind: LitKind, symbol: Symbol, suffix: Option<Symbol>) -> Token {
+ Literal(Lit::new(kind, symbol, suffix))
+ }
+
/// Returns `true` if the token is any literal
crate fn is_lit(&self) -> bool {
match *self {
}
}
+ crate fn expect_lit(&self) -> Lit {
+ match *self {
+ Literal(lit) => lit,
+ _=> panic!("`expect_lit` called on non-literal"),
+ }
+ }
+
/// Returns `true` if the token is any literal, a minus (which can prefix a literal,
/// for example a '-42', or one of the boolean idents).
crate fn can_begin_literal_or_bool(&self) -> bool {
(&DocComment(a), &DocComment(b)) |
(&Shebang(a), &Shebang(b)) => a == b,
+ (&Literal(a), &Literal(b)) => a == b,
+
(&Lifetime(a), &Lifetime(b)) => a.name == b.name,
(&Ident(a, b), &Ident(c, d)) => b == d && (a.name == c.name ||
a.name == kw::DollarCrate ||
c.name == kw::DollarCrate),
- (&Literal(ref a, b), &Literal(ref c, d)) => {
- b == d && a.probably_equal_for_proc_macro(c)
- }
-
(&Interpolated(_), &Interpolated(_)) => false,
_ => panic!("forgot to add a token?"),
}
}
-pub fn literal_to_string(lit: token::Lit, suffix: Option<ast::Name>) -> String {
- let mut out = match lit {
- token::Byte(b) => format!("b'{}'", b),
- token::Char(c) => format!("'{}'", c),
- token::Err(c) => format!("'{}'", c),
- token::Bool(c) |
- token::Float(c) |
- token::Integer(c) => c.to_string(),
- token::Str_(s) => format!("\"{}\"", s),
- token::StrRaw(s, n) => format!("r{delim}\"{string}\"{delim}",
- delim="#".repeat(n as usize),
- string=s),
- token::ByteStr(v) => format!("b\"{}\"", v),
- token::ByteStrRaw(s, n) => format!("br{delim}\"{string}\"{delim}",
- delim="#".repeat(n as usize),
- string=s),
+pub fn literal_to_string(lit: token::Lit) -> String {
+ let token::Lit { kind, symbol, suffix } = lit;
+ let mut out = match kind {
+ token::Byte => format!("b'{}'", symbol),
+ token::Char => format!("'{}'", symbol),
+ token::Bool |
+ token::Float |
+ token::Integer => symbol.to_string(),
+ token::Str => format!("\"{}\"", symbol),
+ token::StrRaw(n) => format!("r{delim}\"{string}\"{delim}",
+ delim="#".repeat(n as usize),
+ string=symbol),
+ token::ByteStr => format!("b\"{}\"", symbol),
+ token::ByteStrRaw(n) => format!("br{delim}\"{string}\"{delim}",
+ delim="#".repeat(n as usize),
+ string=symbol),
+ token::Err => format!("'{}'", symbol),
};
if let Some(suffix) = suffix {
token::SingleQuote => "'".to_string(),
/* Literals */
- token::Literal(lit, suf) => literal_to_string(lit, suf),
+ token::Literal(lit) => literal_to_string(lit),
/* Name components */
token::Ident(s, false) => s.to_string(),
fn print_literal(&mut self, lit: &ast::Lit) -> io::Result<()> {
self.maybe_print_comment(lit.span.lo())?;
- self.writer().word(literal_to_string(lit.token, lit.suffix))
+ self.writer().word(literal_to_string(lit.token))
}
fn print_string(&mut self, st: &str,
use syntax::source_map::Spanned;
use syntax::ext::base::*;
use syntax::ext::build::AstBuilder;
-use syntax::parse::token;
+use syntax::parse::token::{self, Token};
use syntax::parse::parser::Parser;
use syntax::print::pprust;
use syntax::ptr::P;
tts: custom_message.unwrap_or_else(|| {
TokenStream::from(TokenTree::Token(
DUMMY_SP,
- token::Literal(
- token::Lit::Str_(Name::intern(&format!(
- "assertion failed: {}",
- pprust::expr_to_string(&cond_expr).escape_debug()
- ))),
- None,
- ),
+ Token::lit(token::Str, Symbol::intern(&format!(
+ "assertion failed: {}",
+ pprust::expr_to_string(&cond_expr).escape_debug()
+ )), None),
))
}).into(),
delim: MacDelimiter::Parenthesis,
//
// Parse this as an actual message, and suggest inserting a comma. Eventually, this should be
// turned into an error.
- let custom_message = if let token::Literal(token::Lit::Str_(_), _) = parser.token {
+ let custom_message = if let token::Literal(token::Lit { kind: token::Str, .. }) = parser.token {
let mut err = cx.struct_span_warn(parser.span, "unexpected string literal");
let comma_span = cx.source_map().next_point(parser.prev_span);
err.span_suggestion_short(
stack.push(tt!(Ident::new(ident.name, false)));
tt!(Punct::new('\'', true))
}
- Literal(lit, suffix) => tt!(Literal { lit, suffix }),
+ Literal(lit) => tt!(Literal { lit }),
DocComment(c) => {
let style = comments::doc_comment_style(&c.as_str());
let stripped = comments::strip_doc_comment_decoration(&c.as_str());
let stream = vec![
Ident(ast::Ident::new(Symbol::intern("doc"), span), false),
Eq,
- Literal(Lit::Str_(Symbol::intern(&escaped)), None),
+ Token::lit(token::Str, Symbol::intern(&escaped), None),
]
.into_iter()
.map(|token| tokenstream::TokenTree::Token(span, token))
return tokenstream::TokenTree::Token(span, token).into();
}
TokenTree::Literal(self::Literal {
- lit: Lit::Integer(ref a),
- suffix,
+ lit: token::Lit { kind: token::Integer, symbol, suffix },
span,
- }) if a.as_str().starts_with("-") => {
+ }) if symbol.as_str().starts_with("-") => {
let minus = BinOp(BinOpToken::Minus);
- let integer = Symbol::intern(&a.as_str()[1..]);
- let integer = Literal(Lit::Integer(integer), suffix);
+ let symbol = Symbol::intern(&symbol.as_str()[1..]);
+ let integer = Token::lit(token::Integer, symbol, suffix);
let a = tokenstream::TokenTree::Token(span, minus);
let b = tokenstream::TokenTree::Token(span, integer);
return vec![a, b].into_iter().collect();
}
TokenTree::Literal(self::Literal {
- lit: Lit::Float(ref a),
- suffix,
+ lit: token::Lit { kind: token::Float, symbol, suffix },
span,
- }) if a.as_str().starts_with("-") => {
+ }) if symbol.as_str().starts_with("-") => {
let minus = BinOp(BinOpToken::Minus);
- let float = Symbol::intern(&a.as_str()[1..]);
- let float = Literal(Lit::Float(float), suffix);
+ let symbol = Symbol::intern(&symbol.as_str()[1..]);
+ let float = Token::lit(token::Float, symbol, suffix);
let a = tokenstream::TokenTree::Token(span, minus);
let b = tokenstream::TokenTree::Token(span, float);
return vec![a, b].into_iter().collect();
}
- TokenTree::Literal(self::Literal { lit, suffix, span }) => {
- return tokenstream::TokenTree::Token(span, Literal(lit, suffix)).into()
+ TokenTree::Literal(self::Literal { lit, span }) => {
+ return tokenstream::TokenTree::Token(span, Literal(lit)).into()
}
};
#[derive(Clone, Debug)]
pub struct Literal {
lit: token::Lit,
- suffix: Option<Symbol>,
span: Span,
}
call_site: to_span(Transparency::Transparent),
}
}
+
+ fn lit(&mut self, kind: token::LitKind, symbol: Symbol, suffix: Option<Symbol>) -> Literal {
+ Literal {
+ lit: token::Lit::new(kind, symbol, suffix),
+ span: server::Span::call_site(self),
+ }
+ }
}
impl server::Types for Rustc<'_> {
format!("{:?}", literal)
}
fn integer(&mut self, n: &str) -> Self::Literal {
- Literal {
- lit: token::Lit::Integer(Symbol::intern(n)),
- suffix: None,
- span: server::Span::call_site(self),
- }
+ self.lit(token::Integer, Symbol::intern(n), None)
}
fn typed_integer(&mut self, n: &str, kind: &str) -> Self::Literal {
- Literal {
- lit: token::Lit::Integer(Symbol::intern(n)),
- suffix: Some(Symbol::intern(kind)),
- span: server::Span::call_site(self),
- }
+ self.lit(token::Integer, Symbol::intern(n), Some(Symbol::intern(kind)))
}
fn float(&mut self, n: &str) -> Self::Literal {
- Literal {
- lit: token::Lit::Float(Symbol::intern(n)),
- suffix: None,
- span: server::Span::call_site(self),
- }
+ self.lit(token::Float, Symbol::intern(n), None)
}
fn f32(&mut self, n: &str) -> Self::Literal {
- Literal {
- lit: token::Lit::Float(Symbol::intern(n)),
- suffix: Some(Symbol::intern("f32")),
- span: server::Span::call_site(self),
- }
+ self.lit(token::Float, Symbol::intern(n), Some(Symbol::intern("f32")))
}
fn f64(&mut self, n: &str) -> Self::Literal {
- Literal {
- lit: token::Lit::Float(Symbol::intern(n)),
- suffix: Some(Symbol::intern("f64")),
- span: server::Span::call_site(self),
- }
+ self.lit(token::Float, Symbol::intern(n), Some(Symbol::intern("f64")))
}
fn string(&mut self, string: &str) -> Self::Literal {
let mut escaped = String::new();
for ch in string.chars() {
escaped.extend(ch.escape_debug());
}
- Literal {
- lit: token::Lit::Str_(Symbol::intern(&escaped)),
- suffix: None,
- span: server::Span::call_site(self),
- }
+ self.lit(token::Str, Symbol::intern(&escaped), None)
}
fn character(&mut self, ch: char) -> Self::Literal {
let mut escaped = String::new();
escaped.extend(ch.escape_unicode());
- Literal {
- lit: token::Lit::Char(Symbol::intern(&escaped)),
- suffix: None,
- span: server::Span::call_site(self),
- }
+ self.lit(token::Char, Symbol::intern(&escaped), None)
}
fn byte_string(&mut self, bytes: &[u8]) -> Self::Literal {
let string = bytes
.flat_map(ascii::escape_default)
.map(Into::<char>::into)
.collect::<String>();
- Literal {
- lit: token::Lit::ByteStr(Symbol::intern(&string)),
- suffix: None,
- span: server::Span::call_site(self),
- }
+ self.lit(token::ByteStr, Symbol::intern(&string), None)
}
fn span(&mut self, literal: &Self::Literal) -> Self::Span {
literal.span
extern_prelude,
extern_types,
f16c_target_feature,
+ f32,
+ f64,
feature,
ffi_returns_twice,
field_init_shorthand,
#[cfg(cfail2)]
pub trait T2: T1 { }
//[cfail2]~^ ERROR cycle detected when computing the supertraits of `T2`
-//[cfail2]~| ERROR cycle detected when computing the supertraits of `T2`
pub trait T1: T2 { }
fn provide(&self, providers: &mut Providers) {
rustc_codegen_utils::symbol_names::provide(providers);
- providers.target_features_whitelist = |_tcx, _cnum| {
- Default::default() // Just a dummy
+ providers.target_features_whitelist = |tcx, _cnum| {
+ tcx.arena.alloc(Default::default()) // Just a dummy
};
providers.is_reachable_non_generic = |_tcx, _defid| true;
providers.exported_symbols = |_tcx, _crate| Arc::new(Vec::new());
where T : Trait,
T : Add<T::Item>
//~^ ERROR cycle detected
- //~| ERROR associated type `Item` not found for `T`
{
data: T
}
LL | T : Add<T::Item>
| ^^^^^^^
-error[E0220]: associated type `Item` not found for `T`
- --> $DIR/cycle-projection-based-on-where-clause.rs:17:19
- |
-LL | T : Add<T::Item>
- | ^^^^^^^ associated type `Item` not found
-
-error: aborting due to 2 previous errors
+error: aborting due to previous error
-Some errors have detailed explanations: E0220, E0391.
-For more information about an error, try `rustc --explain E0220`.
+For more information about this error, try `rustc --explain E0391`.
trait T : Iterator<Item=Self::Item>
//~^ ERROR cycle detected
-//~| ERROR associated type `Item` not found for `Self`
{}
fn main() {}
|
LL | / trait T : Iterator<Item=Self::Item>
LL | |
-LL | |
LL | | {}
| |__^
|
|
LL | / trait T : Iterator<Item=Self::Item>
LL | |
-LL | |
LL | | {}
| |__^
-error[E0220]: associated type `Item` not found for `Self`
- --> $DIR/issue-20772.rs:1:25
- |
-LL | trait T : Iterator<Item=Self::Item>
- | ^^^^^^^^^^ associated type `Item` not found
-
-error: aborting due to 2 previous errors
+error: aborting due to previous error
-Some errors have detailed explanations: E0220, E0391.
-For more information about an error, try `rustc --explain E0220`.
+For more information about this error, try `rustc --explain E0391`.
fn foo<T: Trait<A = T::B>>() { }
//~^ ERROR cycle detected
-//~| ERROR associated type `B` not found for `T`
fn main() { }
LL | fn foo<T: Trait<A = T::B>>() { }
| ^^^^
-error[E0220]: associated type `B` not found for `T`
- --> $DIR/issue-21177.rs:6:21
- |
-LL | fn foo<T: Trait<A = T::B>>() { }
- | ^^^^ associated type `B` not found
-
-error: aborting due to 2 previous errors
+error: aborting due to previous error
-Some errors have detailed explanations: E0220, E0391.
-For more information about an error, try `rustc --explain E0220`.
+For more information about this error, try `rustc --explain E0391`.
const A: i32 = B; //~ ERROR cycle detected
-//~^ ERROR cycle detected
const B: i32 = A;
LL | const A: i32 = B;
| ^
note: ...which requires const checking if rvalue is promotable to static `B`...
- --> $DIR/issue-23302-3.rs:4:1
+ --> $DIR/issue-23302-3.rs:3:1
|
LL | const B: i32 = A;
| ^^^^^^^^^^^^^^^^^
note: ...which requires checking which parts of `B` are promotable to static...
- --> $DIR/issue-23302-3.rs:4:16
+ --> $DIR/issue-23302-3.rs:3:16
|
LL | const B: i32 = A;
| ^
= note: ...which again requires const checking if rvalue is promotable to static `A`, completing the cycle
= note: cycle used when running analysis passes on this crate
-error[E0391]: cycle detected when processing `A`
- --> $DIR/issue-23302-3.rs:1:16
- |
-LL | const A: i32 = B;
- | ^
- |
-note: ...which requires processing `B`...
- --> $DIR/issue-23302-3.rs:4:16
- |
-LL | const B: i32 = A;
- | ^
- = note: ...which again requires processing `A`, completing the cycle
-note: cycle used when processing `A`
- --> $DIR/issue-23302-3.rs:1:1
- |
-LL | const A: i32 = B;
- | ^^^^^^^^^^^^^^^^^
-
-error: aborting due to 2 previous errors
+error: aborting due to previous error
For more information about this error, try `rustc --explain E0391`.
-error: invalid suffix `is` for numeric literal
+error: invalid suffix `is` for integer literal
--> $DIR/old-suffixes-are-really-forbidden.rs:2:13
|
LL | let a = 1_is;
|
= help: the suffix must be one of the integral types (`u32`, `isize`, etc)
-error: invalid suffix `us` for numeric literal
+error: invalid suffix `us` for integer literal
--> $DIR/old-suffixes-are-really-forbidden.rs:3:13
|
LL | let b = 2_us;
1234f1024; //~ ERROR invalid width `1024` for float literal
1234.5f1024; //~ ERROR invalid width `1024` for float literal
- 1234suffix; //~ ERROR invalid suffix `suffix` for numeric literal
- 0b101suffix; //~ ERROR invalid suffix `suffix` for numeric literal
+ 1234suffix; //~ ERROR invalid suffix `suffix` for integer literal
+ 0b101suffix; //~ ERROR invalid suffix `suffix` for integer literal
1.0suffix; //~ ERROR invalid suffix `suffix` for float literal
1.0e10suffix; //~ ERROR invalid suffix `suffix` for float literal
}
|
= help: valid widths are 32 and 64
-error: invalid suffix `suffix` for numeric literal
+error: invalid suffix `suffix` for integer literal
--> $DIR/bad-lit-suffixes.rs:25:5
|
LL | 1234suffix;
|
= help: the suffix must be one of the integral types (`u32`, `isize`, etc)
-error: invalid suffix `suffix` for numeric literal
+error: invalid suffix `suffix` for integer literal
--> $DIR/bad-lit-suffixes.rs:26:5
|
LL | 0b101suffix;
fn main() {
9999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999
- //~^ ERROR int literal is too large
+ //~^ ERROR integer literal is too large
; // the span shouldn't point to this.
}
-error: int literal is too large
+error: integer literal is too large
--> $DIR/int-literal-too-large-span.rs:4:5
|
LL | 9999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999
fn main() {
let __isize = 340282366920938463463374607431768211456; // 2^128
- //~^ ERROR int literal is too large
+ //~^ ERROR integer literal is too large
}
-error: int literal is too large
+error: integer literal is too large
--> $DIR/issue-5544-a.rs:2:19
|
LL | let __isize = 340282366920938463463374607431768211456; // 2^128
fn main() {
let __isize = 0xffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ff;
- //~^ ERROR int literal is too large
+ //~^ ERROR integer literal is too large
}
-error: int literal is too large
+error: integer literal is too large
--> $DIR/issue-5544-b.rs:2:19
|
LL | let __isize = 0xffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ff;
0o; //~ ERROR: no valid digits
1e+; //~ ERROR: expected at least one digit in exponent
0x539.0; //~ ERROR: hexadecimal float literal is not supported
- 9900000000000000000000000000999999999999999999999999999999; //~ ERROR: int literal is too large
- 9900000000000000000000000000999999999999999999999999999999; //~ ERROR: int literal is too large
+ 9900000000000000000000000000999999999999999999999999999999;
+ //~^ ERROR: integer literal is too large
+ 9900000000000000000000000000999999999999999999999999999999;
+ //~^ ERROR: integer literal is too large
0x; //~ ERROR: no valid digits
0xu32; //~ ERROR: no valid digits
0ou32; //~ ERROR: no valid digits
| ^^^^^^^
error: no valid digits found for number
- --> $DIR/lex-bad-numeric-literals.rs:18:5
+ --> $DIR/lex-bad-numeric-literals.rs:20:5
|
LL | 0x;
| ^^
error: no valid digits found for number
- --> $DIR/lex-bad-numeric-literals.rs:19:5
+ --> $DIR/lex-bad-numeric-literals.rs:21:5
|
LL | 0xu32;
| ^^
error: no valid digits found for number
- --> $DIR/lex-bad-numeric-literals.rs:20:5
+ --> $DIR/lex-bad-numeric-literals.rs:22:5
|
LL | 0ou32;
| ^^
error: no valid digits found for number
- --> $DIR/lex-bad-numeric-literals.rs:21:5
+ --> $DIR/lex-bad-numeric-literals.rs:23:5
|
LL | 0bu32;
| ^^
error: no valid digits found for number
- --> $DIR/lex-bad-numeric-literals.rs:22:5
+ --> $DIR/lex-bad-numeric-literals.rs:24:5
|
LL | 0b;
| ^^
error: octal float literal is not supported
- --> $DIR/lex-bad-numeric-literals.rs:24:5
+ --> $DIR/lex-bad-numeric-literals.rs:26:5
|
LL | 0o123.456;
| ^^^^^^^^^
error: binary float literal is not supported
- --> $DIR/lex-bad-numeric-literals.rs:26:5
+ --> $DIR/lex-bad-numeric-literals.rs:28:5
|
LL | 0b111.101;
| ^^^^^^^^^
LL | 0o2f32;
| ^^^^^^ not supported
-error: int literal is too large
+error: integer literal is too large
--> $DIR/lex-bad-numeric-literals.rs:16:5
|
LL | 9900000000000000000000000000999999999999999999999999999999;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-error: int literal is too large
- --> $DIR/lex-bad-numeric-literals.rs:17:5
+error: integer literal is too large
+ --> $DIR/lex-bad-numeric-literals.rs:18:5
|
LL | 9900000000000000000000000000999999999999999999999999999999;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: octal float literal is not supported
- --> $DIR/lex-bad-numeric-literals.rs:23:5
+ --> $DIR/lex-bad-numeric-literals.rs:25:5
|
LL | 0o123f64;
| ^^^^^^^^ not supported
error: binary float literal is not supported
- --> $DIR/lex-bad-numeric-literals.rs:25:5
+ --> $DIR/lex-bad-numeric-literals.rs:27:5
|
LL | 0b101f64;
| ^^^^^^^^ not supported
0b101.010;
//~^ ERROR binary float literal is not supported
0b101p4f64;
- //~^ ERROR invalid suffix `p4f64` for numeric literal
+ //~^ ERROR invalid suffix `p4f64` for integer literal
}
LL | 0b101010f64;
| ^^^^^^^^^^^ not supported
-error: invalid suffix `p4f64` for numeric literal
+error: invalid suffix `p4f64` for integer literal
--> $DIR/no-binary-float-literal.rs:6:5
|
LL | 0b101p4f64;
--- /dev/null
+fn suggestion(opt: &mut Option<String>) {
+ opt = None; //~ ERROR mismatched types
+}
+
+fn no_suggestion(opt: &mut Result<String, ()>) {
+ opt = None //~ ERROR mismatched types
+}
+
+fn suggestion2(opt: &mut Option<String>) {
+ opt = Some(String::new())//~ ERROR mismatched types
+}
+
+fn no_suggestion2(opt: &mut Option<String>) {
+ opt = Some(42)//~ ERROR mismatched types
+}
+
+fn main() {}
--- /dev/null
+error[E0308]: mismatched types
+ --> $DIR/mut-ref-reassignment.rs:2:11
+ |
+LL | opt = None;
+ | ^^^^ expected mutable reference, found enum `std::option::Option`
+ |
+ = note: expected type `&mut std::option::Option<std::string::String>`
+ found type `std::option::Option<_>`
+help: consider dereferencing here to assign to the mutable borrowed piece of memory
+ |
+LL | *opt = None;
+ | ^^^^
+
+error[E0308]: mismatched types
+ --> $DIR/mut-ref-reassignment.rs:6:11
+ |
+LL | opt = None
+ | ^^^^ expected mutable reference, found enum `std::option::Option`
+ |
+ = note: expected type `&mut std::result::Result<std::string::String, ()>`
+ found type `std::option::Option<_>`
+
+error[E0308]: mismatched types
+ --> $DIR/mut-ref-reassignment.rs:10:11
+ |
+LL | opt = Some(String::new())
+ | ^^^^^^^^^^^^^^^^^^^ expected mutable reference, found enum `std::option::Option`
+ |
+ = note: expected type `&mut std::option::Option<std::string::String>`
+ found type `std::option::Option<std::string::String>`
+help: consider dereferencing here to assign to the mutable borrowed piece of memory
+ |
+LL | *opt = Some(String::new())
+ | ^^^^
+
+error[E0308]: mismatched types
+ --> $DIR/mut-ref-reassignment.rs:14:11
+ |
+LL | opt = Some(42)
+ | ^^^^^^^^ expected mutable reference, found enum `std::option::Option`
+ |
+ = note: expected type `&mut std::option::Option<std::string::String>`
+ found type `std::option::Option<{integer}>`
+
+error: aborting due to 4 previous errors
+
+For more information about this error, try `rustc --explain E0308`.