From f745e5f9b676be02cc1dfbab0bfb338dc72b4dd3 Mon Sep 17 00:00:00 2001 From: Vadim Petrochenkov Date: Wed, 5 Jun 2019 11:56:06 +0300 Subject: [PATCH] syntax: Remove duplicate span from `token::Ident` --- src/librustc/ich/impls_syntax.rs | 4 +- src/librustdoc/html/highlight.rs | 4 +- src/libsyntax/attr/mod.rs | 10 +-- src/libsyntax/diagnostics/plugin.rs | 18 ++--- src/libsyntax/ext/base.rs | 2 +- src/libsyntax/ext/tt/macro_parser.rs | 39 +++++------ src/libsyntax/ext/tt/macro_rules.rs | 12 ++-- src/libsyntax/ext/tt/quoted.rs | 5 +- src/libsyntax/mut_visit.rs | 1 - src/libsyntax/parse/diagnostics.rs | 6 +- src/libsyntax/parse/lexer/mod.rs | 22 ++---- src/libsyntax/parse/literal.rs | 8 +-- src/libsyntax/parse/mod.rs | 31 ++++----- src/libsyntax/parse/parser.rs | 48 ++++++------- src/libsyntax/parse/token.rs | 94 ++++++++++++++++---------- src/libsyntax/tokenstream.rs | 4 +- src/libsyntax_ext/concat_idents.rs | 4 +- src/libsyntax_ext/format.rs | 6 +- src/libsyntax_ext/proc_macro_decls.rs | 4 +- src/libsyntax_ext/proc_macro_server.rs | 13 ++-- src/libsyntax_pos/symbol.rs | 30 ++++---- 21 files changed, 181 insertions(+), 184 deletions(-) diff --git a/src/librustc/ich/impls_syntax.rs b/src/librustc/ich/impls_syntax.rs index 20d308e5fe8..abe4196abd1 100644 --- a/src/librustc/ich/impls_syntax.rs +++ b/src/librustc/ich/impls_syntax.rs @@ -353,8 +353,8 @@ fn hash_stable(&self, } token::Literal(lit) => lit.hash_stable(hcx, hasher), - token::Ident(ident, is_raw) => { - ident.name.hash_stable(hcx, hasher); + token::Ident(name, is_raw) => { + name.hash_stable(hcx, hasher); is_raw.hash_stable(hcx, hasher); } token::Lifetime(name) => name.hash_stable(hcx, hasher), diff --git a/src/librustdoc/html/highlight.rs b/src/librustdoc/html/highlight.rs index bc6eaaaa8b9..281bd72deeb 100644 --- a/src/librustdoc/html/highlight.rs +++ b/src/librustdoc/html/highlight.rs @@ -325,8 +325,8 @@ fn write_token(&mut self, } // Keywords are also included in the identifier set. - token::Ident(ident, is_raw) => { - match ident.name { + token::Ident(name, is_raw) => { + match name { kw::Ref | kw::Mut if !is_raw => Class::RefKeyWord, kw::SelfLower | kw::SelfUpper => Class::Self_, diff --git a/src/libsyntax/attr/mod.rs b/src/libsyntax/attr/mod.rs index 56afc8728b4..39ffabaa4a9 100644 --- a/src/libsyntax/attr/mod.rs +++ b/src/libsyntax/attr/mod.rs @@ -482,19 +482,19 @@ fn from_tokens(tokens: &mut iter::Peekable) -> Option let path = match tokens.next() { Some(TokenTree::Token(Token { kind: kind @ token::Ident(..), span })) | Some(TokenTree::Token(Token { kind: kind @ token::ModSep, span })) => 'arm: { - let mut segments = if let token::Ident(ident, _) = kind { + let mut segments = if let token::Ident(name, _) = kind { if let Some(TokenTree::Token(Token { kind: token::ModSep, .. })) = tokens.peek() { tokens.next(); - vec![PathSegment::from_ident(ident.with_span_pos(span))] + vec![PathSegment::from_ident(Ident::new(name, span))] } else { - break 'arm Path::from_ident(ident.with_span_pos(span)); + break 'arm Path::from_ident(Ident::new(name, span)); } } else { vec![PathSegment::path_root(span)] }; loop { - if let Some(TokenTree::Token(Token { kind: token::Ident(ident, _), span })) = tokens.next() { - segments.push(PathSegment::from_ident(ident.with_span_pos(span))); + if let Some(TokenTree::Token(Token { kind: token::Ident(name, _), span })) = tokens.next() { + segments.push(PathSegment::from_ident(Ident::new(name, span))); } else { return None; } diff --git a/src/libsyntax/diagnostics/plugin.rs b/src/libsyntax/diagnostics/plugin.rs index b342e4bc472..8d9848d98fb 100644 --- a/src/libsyntax/diagnostics/plugin.rs +++ b/src/libsyntax/diagnostics/plugin.rs @@ -39,7 +39,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt<'_>, }; ecx.parse_sess.registered_diagnostics.with_lock(|diagnostics| { - match diagnostics.get_mut(&code.name) { + match diagnostics.get_mut(&code) { // Previously used errors. Some(&mut ErrorInfo { description: _, use_site: Some(previous_span) }) => { ecx.struct_span_warn(span, &format!( @@ -72,10 +72,10 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt<'_>, token_tree.get(1), token_tree.get(2) ) { - (1, Some(&TokenTree::Token(Token { kind: token::Ident(ref code, _), .. })), None, None) => { + (1, Some(&TokenTree::Token(Token { kind: token::Ident(code, _), .. })), None, None) => { (code, None) }, - (3, Some(&TokenTree::Token(Token { kind: token::Ident(ref code, _), .. })), + (3, Some(&TokenTree::Token(Token { kind: token::Ident(code, _), .. })), Some(&TokenTree::Token(Token { kind: token::Comma, .. })), Some(&TokenTree::Token(Token { kind: token::Literal(token::Lit { symbol, .. }), .. }))) => { (code, Some(symbol)) @@ -112,7 +112,7 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt<'_>, description, use_site: None }; - if diagnostics.insert(code.name, info).is_some() { + if diagnostics.insert(code, info).is_some() { ecx.span_err(span, &format!( "diagnostic code {} already registered", code )); @@ -140,13 +140,13 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt<'_>, token_tree: &[TokenTree]) -> Box { assert_eq!(token_tree.len(), 3); - let (crate_name, name) = match (&token_tree[0], &token_tree[2]) { + let (crate_name, ident) = match (&token_tree[0], &token_tree[2]) { ( // Crate name. - &TokenTree::Token(Token { kind: token::Ident(ref crate_name, _), .. }), + &TokenTree::Token(Token { kind: token::Ident(crate_name, _), .. }), // DIAGNOSTICS ident. - &TokenTree::Token(Token { kind: token::Ident(ref name, _), .. }) - ) => (*&crate_name, name), + &TokenTree::Token(Token { kind: token::Ident(name, _), span }) + ) => (crate_name, Ident::new(name, span)), _ => unreachable!() }; @@ -209,7 +209,7 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt<'_>, MacEager::items(smallvec![ P(ast::Item { - ident: *name, + ident, attrs: Vec::new(), id: ast::DUMMY_NODE_ID, node: ast::ItemKind::Const( diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index 0c2ab672407..3b24837e365 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -269,7 +269,7 @@ fn visit_tt(&mut self, tt: &mut tokenstream::TokenTree) { if let token::Interpolated(nt) = &token.kind { if let token::NtIdent(ident, is_raw) = **nt { *tt = tokenstream::TokenTree::token(ident.span, - token::Ident(ident, is_raw)); + token::Ident(ident.name, is_raw)); } } } diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index f93b548c501..82cc9e8ac22 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -74,7 +74,7 @@ pub use ParseResult::*; use TokenTreeOrTokenTreeSlice::*; -use crate::ast::Ident; +use crate::ast::{Ident, Name}; use crate::ext::tt::quoted::{self, TokenTree}; use crate::parse::{Directory, ParseSess}; use crate::parse::parser::{Parser, PathStyle}; @@ -429,8 +429,8 @@ pub fn parse_failure_msg(tok: TokenKind) -> String { /// Performs a token equality check, ignoring syntax context (that is, an unhygienic comparison) fn token_name_eq(t1: &TokenKind, t2: &TokenKind) -> bool { - if let (Some((id1, is_raw1)), Some((id2, is_raw2))) = (t1.ident(), t2.ident()) { - id1.name == id2.name && is_raw1 == is_raw2 + if let (Some((name1, is_raw1)), Some((name2, is_raw2))) = (t1.ident_name(), t2.ident_name()) { + name1 == name2 && is_raw1 == is_raw2 } else if let (Some(name1), Some(name2)) = (t1.lifetime_name(), t2.lifetime_name()) { name1 == name2 } else { @@ -466,8 +466,7 @@ fn inner_parse_loop<'root, 'tt>( next_items: &mut Vec>, eof_items: &mut SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>, bb_items: &mut SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>, - token: &TokenKind, - span: syntax_pos::Span, + token: &Token, ) -> ParseResult<()> { // Pop items from `cur_items` until it is empty. while let Some(mut item) = cur_items.pop() { @@ -510,7 +509,7 @@ fn inner_parse_loop<'root, 'tt>( // Add matches from this repetition to the `matches` of `up` for idx in item.match_lo..item.match_hi { let sub = item.matches[idx].clone(); - let span = DelimSpan::from_pair(item.sp_open, span); + let span = DelimSpan::from_pair(item.sp_open, token.span); new_pos.push_match(idx, MatchedSeq(sub, span)); } @@ -598,7 +597,7 @@ fn inner_parse_loop<'root, 'tt>( TokenTree::MetaVarDecl(_, _, id) => { // Built-in nonterminals never start with these tokens, // so we can eliminate them from consideration. - if may_begin_with(id.name, token) { + if may_begin_with(token, id.name) { bb_items.push(item); } } @@ -698,7 +697,6 @@ pub fn parse( &mut eof_items, &mut bb_items, &parser.token, - parser.span, ) { Success(_) => {} Failure(token, msg) => return Failure(token, msg), @@ -806,10 +804,9 @@ pub fn parse( /// The token is an identifier, but not `_`. /// We prohibit passing `_` to macros expecting `ident` for now. -fn get_macro_ident(token: &TokenKind) -> Option<(Ident, bool)> { +fn get_macro_name(token: &TokenKind) -> Option<(Name, bool)> { match *token { - token::Ident(ident, is_raw) if ident.name != kw::Underscore => - Some((ident, is_raw)), + token::Ident(name, is_raw) if name != kw::Underscore => Some((name, is_raw)), _ => None, } } @@ -818,7 +815,7 @@ fn get_macro_ident(token: &TokenKind) -> Option<(Ident, bool)> { /// /// Returning `false` is a *stability guarantee* that such a matcher will *never* begin with that /// token. Be conservative (return true) if not sure. -fn may_begin_with(name: Symbol, token: &TokenKind) -> bool { +fn may_begin_with(token: &Token, name: Name) -> bool { /// Checks whether the non-terminal may contain a single (non-keyword) identifier. fn may_be_ident(nt: &token::Nonterminal) -> bool { match *nt { @@ -830,14 +827,14 @@ fn may_be_ident(nt: &token::Nonterminal) -> bool { match name { sym::expr => token.can_begin_expr(), sym::ty => token.can_begin_type(), - sym::ident => get_macro_ident(token).is_some(), + sym::ident => get_macro_name(token).is_some(), sym::literal => token.can_begin_literal_or_bool(), - sym::vis => match *token { + sym::vis => match token.kind { // The follow-set of :vis + "priv" keyword + interpolated token::Comma | token::Ident(..) | token::Interpolated(_) => true, _ => token.can_begin_type(), }, - sym::block => match *token { + sym::block => match token.kind { token::OpenDelim(token::Brace) => true, token::Interpolated(ref nt) => match **nt { token::NtItem(_) @@ -851,7 +848,7 @@ fn may_be_ident(nt: &token::Nonterminal) -> bool { }, _ => false, }, - sym::path | sym::meta => match *token { + sym::path | sym::meta => match token.kind { token::ModSep | token::Ident(..) => true, token::Interpolated(ref nt) => match **nt { token::NtPath(_) | token::NtMeta(_) => true, @@ -859,7 +856,7 @@ fn may_be_ident(nt: &token::Nonterminal) -> bool { }, _ => false, }, - sym::pat => match *token { + sym::pat => match token.kind { token::Ident(..) | // box, ref, mut, and other identifiers (can stricten) token::OpenDelim(token::Paren) | // tuple pattern token::OpenDelim(token::Bracket) | // slice pattern @@ -875,7 +872,7 @@ fn may_be_ident(nt: &token::Nonterminal) -> bool { token::Interpolated(ref nt) => may_be_ident(nt), _ => false, }, - sym::lifetime => match *token { + sym::lifetime => match token.kind { token::Lifetime(_) => true, token::Interpolated(ref nt) => match **nt { token::NtLifetime(_) | token::NtTT(_) => true, @@ -883,7 +880,7 @@ fn may_be_ident(nt: &token::Nonterminal) -> bool { }, _ => false, }, - _ => match *token { + _ => match token.kind { token::CloseDelim(_) => false, _ => true, }, @@ -929,10 +926,10 @@ fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: Symbol) -> Nonterminal { sym::literal => token::NtLiteral(panictry!(p.parse_literal_maybe_minus())), sym::ty => token::NtTy(panictry!(p.parse_ty())), // this could be handled like a token, since it is one - sym::ident => if let Some((ident, is_raw)) = get_macro_ident(&p.token) { + sym::ident => if let Some((name, is_raw)) = get_macro_name(&p.token) { let span = p.span; p.bump(); - token::NtIdent(Ident::new(ident.name, span), is_raw) + token::NtIdent(Ident::new(name, span), is_raw) } else { let token_str = pprust::token_to_string(&p.token); p.fatal(&format!("expected ident, found {}", &token_str)).emit(); diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 05e921b1bfd..77f53c35b0b 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -1046,8 +1046,7 @@ fn is_in_follow(tok: "ed::TokenTree, frag: &str) -> IsInFollow { match tok { TokenTree::Token(token) => match token.kind { FatArrow | Comma | Eq | BinOp(token::Or) => IsInFollow::Yes, - Ident(i, false) if i.name == kw::If || - i.name == kw::In => IsInFollow::Yes, + Ident(name, false) if name == kw::If || name == kw::In => IsInFollow::Yes, _ => IsInFollow::No(tokens), }, _ => IsInFollow::No(tokens), @@ -1064,8 +1063,8 @@ fn is_in_follow(tok: "ed::TokenTree, frag: &str) -> IsInFollow { OpenDelim(token::DelimToken::Bracket) | Comma | FatArrow | Colon | Eq | Gt | BinOp(token::Shr) | Semi | BinOp(token::Or) => IsInFollow::Yes, - Ident(i, false) if i.name == kw::As || - i.name == kw::Where => IsInFollow::Yes, + Ident(name, false) if name == kw::As || + name == kw::Where => IsInFollow::Yes, _ => IsInFollow::No(tokens), }, TokenTree::MetaVarDecl(_, _, frag) if frag.name == sym::block => @@ -1092,9 +1091,8 @@ fn is_in_follow(tok: "ed::TokenTree, frag: &str) -> IsInFollow { match tok { TokenTree::Token(token) => match token.kind { Comma => IsInFollow::Yes, - Ident(i, is_raw) if is_raw || i.name != kw::Priv => - IsInFollow::Yes, - ref tok => if tok.can_begin_type() { + Ident(name, is_raw) if is_raw || name != kw::Priv => IsInFollow::Yes, + _ => if token.can_begin_type() { IsInFollow::Yes } else { IsInFollow::No(tokens) diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/tt/quoted.rs index 558b07af611..582d87b911d 100644 --- a/src/libsyntax/ext/tt/quoted.rs +++ b/src/libsyntax/ext/tt/quoted.rs @@ -323,10 +323,9 @@ fn parse_tree( // metavariable that names the crate of the invocation. Some(tokenstream::TokenTree::Token(token)) if token.is_ident() => { let (ident, is_raw) = token.ident().unwrap(); - let span = token.span.with_lo(span.lo()); + let span = ident.span.with_lo(span.lo()); if ident.name == kw::Crate && !is_raw { - let ident = ast::Ident::new(kw::DollarCrate, ident.span); - TokenTree::token(span, token::Ident(ident, is_raw)) + TokenTree::token(span, token::Ident(kw::DollarCrate, is_raw)) } else { TokenTree::MetaVar(span, ident) } diff --git a/src/libsyntax/mut_visit.rs b/src/libsyntax/mut_visit.rs index 3bb36605299..7eb88de2281 100644 --- a/src/libsyntax/mut_visit.rs +++ b/src/libsyntax/mut_visit.rs @@ -598,7 +598,6 @@ pub fn noop_visit_tts(TokenStream(tts): &mut TokenStream, vis: &m // apply ident visitor if it's an ident, apply other visits to interpolated nodes pub fn noop_visit_token(t: &mut TokenKind, vis: &mut T) { match t { - token::Ident(id, _is_raw) => vis.visit_ident(id), token::Interpolated(nt) => { let mut nt = Lrc::make_mut(nt); vis.visit_interpolated(&mut nt); diff --git a/src/libsyntax/parse/diagnostics.rs b/src/libsyntax/parse/diagnostics.rs index 1759a229cf4..7830b2ce880 100644 --- a/src/libsyntax/parse/diagnostics.rs +++ b/src/libsyntax/parse/diagnostics.rs @@ -201,12 +201,12 @@ pub fn span_err>(&self, sp: S, m: &str) { self.span, &format!("expected identifier, found {}", self.this_token_descr()), ); - if let token::Ident(ident, false) = &self.token.kind { - if ident.is_raw_guess() { + if let token::Ident(name, false) = self.token.kind { + if Ident::new(name, self.span).is_raw_guess() { err.span_suggestion( self.span, "you can escape reserved keywords to use them as identifiers", - format!("r#{}", ident), + format!("r#{}", name), Applicability::MaybeIncorrect, ); } diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index da8c6f5ac22..e3d959c2c54 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -1,4 +1,4 @@ -use crate::ast::{self, Ident}; +use crate::ast; use crate::parse::ParseSess; use crate::parse::token::{self, Token, TokenKind}; use crate::symbol::{sym, Symbol}; @@ -61,15 +61,6 @@ fn mk_sp_and_raw(&self, lo: BytePos, hi: BytePos) -> (Span, Span) { (real, raw) } - fn mk_ident(&self, string: &str) -> Ident { - let mut ident = Ident::from_str(string); - if let Some(span) = self.override_span { - ident.span = span; - } - - ident - } - fn unwrap_or_abort(&mut self, res: Result) -> Token { match res { Ok(tok) => tok, @@ -858,17 +849,17 @@ fn next_token_inner(&mut self) -> Result { return Ok(self.with_str_from(start, |string| { // FIXME: perform NFKC normalization here. (Issue #2253) - let ident = self.mk_ident(string); + let name = ast::Name::intern(string); if is_raw_ident { let span = self.mk_sp(raw_start, self.pos); - if !ident.can_be_raw() { - self.err_span(span, &format!("`{}` cannot be a raw identifier", ident)); + if !name.can_be_raw() { + self.err_span(span, &format!("`{}` cannot be a raw identifier", name)); } self.sess.raw_identifier_spans.borrow_mut().push(span); } - token::Ident(ident, is_raw_ident) + token::Ident(name, is_raw_ident) })); } } @@ -1567,12 +1558,11 @@ fn t1() { &sh, "/* my source file */ fn main() { println!(\"zebra\"); }\n" .to_string()); - let id = Ident::from_str("fn"); assert_eq!(string_reader.next_token(), token::Comment); assert_eq!(string_reader.next_token(), token::Whitespace); let tok1 = string_reader.next_token(); let tok2 = Token::new( - token::Ident(id, false), + token::Ident(Symbol::intern("fn"), false), Span::new(BytePos(21), BytePos(23), NO_EXPANSION), ); assert_eq!(tok1.kind, tok2.kind); diff --git a/src/libsyntax/parse/literal.rs b/src/libsyntax/parse/literal.rs index 978fd205ea4..7b27304071c 100644 --- a/src/libsyntax/parse/literal.rs +++ b/src/libsyntax/parse/literal.rs @@ -1,6 +1,6 @@ //! Code related to parsing literals. -use crate::ast::{self, Ident, Lit, LitKind}; +use crate::ast::{self, Lit, LitKind}; use crate::parse::parser::Parser; use crate::parse::PResult; use crate::parse::token::{self, Token, TokenKind}; @@ -230,8 +230,8 @@ fn from_lit_token(token: token::Lit, span: Span) -> Result { /// Converts arbitrary token into an AST literal. crate fn from_token(token: &TokenKind, span: Span) -> Result { let lit = match *token { - token::Ident(ident, false) if ident.name == kw::True || ident.name == kw::False => - token::Lit::new(token::Bool, ident.name, None), + token::Ident(name, false) if name == kw::True || name == kw::False => + token::Lit::new(token::Bool, name, None), token::Literal(lit) => lit, token::Interpolated(ref nt) => { @@ -258,7 +258,7 @@ pub fn from_lit_kind(node: LitKind, span: Span) -> Lit { /// Losslessly convert an AST literal into a token stream. crate fn tokens(&self) -> TokenStream { let token = match self.token.kind { - token::Bool => token::Ident(Ident::new(self.token.symbol, self.span), false), + token::Bool => token::Ident(self.token.symbol, false), _ => token::Literal(self.token), }; TokenTree::token(self.span, token).into() diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 5187621258d..2b82767d7e9 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -382,11 +382,12 @@ pub fn none() -> SeqSep { #[cfg(test)] mod tests { use super::*; - use crate::ast::{self, Ident, PatKind}; + use crate::ast::{self, Name, PatKind}; use crate::attr::first_attr_value_str_by_name; use crate::ptr::P; use crate::parse::token::Token; use crate::print::pprust::item_to_string; + use crate::symbol::{kw, sym}; use crate::tokenstream::{DelimSpan, TokenTree}; use crate::util::parser_testing::string_to_stream; use crate::util::parser_testing::{string_to_expr, string_to_item}; @@ -418,8 +419,6 @@ fn sp(a: u32, b: u32) -> Span { #[test] fn string_to_tts_macro () { with_default_globals(|| { - use crate::symbol::sym; - let tts: Vec<_> = string_to_stream("macro_rules! zip (($a)=>($a))".to_string()).trees().collect(); let tts: &[TokenTree] = &tts[..]; @@ -432,8 +431,7 @@ fn string_to_tts_macro () { Some(&TokenTree::Token(Token { kind: token::Ident(name_zip, false), .. })), Some(&TokenTree::Delimited(_, macro_delim, ref macro_tts)), ) - if name_macro_rules.name == sym::macro_rules - && name_zip.name.as_str() == "zip" => { + if name_macro_rules == sym::macro_rules && name_zip.as_str() == "zip" => { let tts = ¯o_tts.trees().collect::>(); match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) { ( @@ -448,9 +446,9 @@ fn string_to_tts_macro () { ( 2, Some(&TokenTree::Token(Token { kind: token::Dollar, .. })), - Some(&TokenTree::Token(Token { kind: token::Ident(ident, false), .. })), + Some(&TokenTree::Token(Token { kind: token::Ident(name, false), .. })), ) - if first_delim == token::Paren && ident.name.as_str() == "a" => {}, + if first_delim == token::Paren && name.as_str() == "a" => {}, _ => panic!("value 3: {:?} {:?}", first_delim, first_tts), } let tts = &second_tts.trees().collect::>(); @@ -458,9 +456,9 @@ fn string_to_tts_macro () { ( 2, Some(&TokenTree::Token(Token { kind: token::Dollar, .. })), - Some(&TokenTree::Token(Token { kind: token::Ident(ident, false), .. })), + Some(&TokenTree::Token(Token { kind: token::Ident(name, false), .. })), ) - if second_delim == token::Paren && ident.name.as_str() == "a" => {}, + if second_delim == token::Paren && name.as_str() == "a" => {}, _ => panic!("value 4: {:?} {:?}", second_delim, second_tts), } }, @@ -478,25 +476,22 @@ fn string_to_tts_1() { let tts = string_to_stream("fn a (b : i32) { b; }".to_string()); let expected = TokenStream::new(vec![ - TokenTree::token(sp(0, 2), token::Ident(Ident::from_str("fn"), false)).into(), - TokenTree::token(sp(3, 4), token::Ident(Ident::from_str("a"), false)).into(), + TokenTree::token(sp(0, 2), token::Ident(kw::Fn, false)).into(), + TokenTree::token(sp(3, 4), token::Ident(Name::intern("a"), false)).into(), TokenTree::Delimited( DelimSpan::from_pair(sp(5, 6), sp(13, 14)), token::DelimToken::Paren, TokenStream::new(vec![ - TokenTree::token(sp(6, 7), - token::Ident(Ident::from_str("b"), false)).into(), + TokenTree::token(sp(6, 7), token::Ident(Name::intern("b"), false)).into(), TokenTree::token(sp(8, 9), token::Colon).into(), - TokenTree::token(sp(10, 13), - token::Ident(Ident::from_str("i32"), false)).into(), + TokenTree::token(sp(10, 13), token::Ident(sym::i32, false)).into(), ]).into(), ).into(), TokenTree::Delimited( DelimSpan::from_pair(sp(15, 16), sp(20, 21)), token::DelimToken::Brace, TokenStream::new(vec![ - TokenTree::token(sp(17, 18), - token::Ident(Ident::from_str("b"), false)).into(), + TokenTree::token(sp(17, 18), token::Ident(Name::intern("b"), false)).into(), TokenTree::token(sp(18, 19), token::Semi).into(), ]).into(), ).into() @@ -604,8 +599,6 @@ fn wb() -> c_int { O_WRONLY as c_int } #[test] fn crlf_doc_comments() { with_default_globals(|| { - use crate::symbol::sym; - let sess = ParseSess::new(FilePathMapping::empty()); let name_1 = FileName::Custom("crlf_source_1".to_string()); diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 362f81d02a0..57a49d1524d 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -362,7 +362,7 @@ fn next_desugared(&mut self) -> Token { delim_span, token::Bracket, [ - TokenTree::token(sp, token::Ident(ast::Ident::with_empty_ctxt(sym::doc), false)), + TokenTree::token(sp, token::Ident(sym::doc, false)), TokenTree::token(sp, token::Eq), TokenTree::token(sp, token::TokenKind::lit( token::StrRaw(num_of_hashes), Symbol::intern(&stripped), None @@ -541,9 +541,9 @@ pub fn this_token_to_string(&self) -> String { crate fn token_descr(&self) -> Option<&'static str> { Some(match &self.token.kind { - t if t.is_special_ident() => "reserved identifier", - t if t.is_used_keyword() => "keyword", - t if t.is_unused_keyword() => "reserved keyword", + _ if self.token.is_special_ident() => "reserved identifier", + _ if self.token.is_used_keyword() => "keyword", + _ if self.token.is_unused_keyword() => "reserved keyword", token::DocComment(..) => "doc comment", _ => return None, }) @@ -619,7 +619,7 @@ pub fn parse_ident(&mut self) -> PResult<'a, ast::Ident> { fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, ast::Ident> { match self.token.kind { - token::Ident(ident, _) => { + token::Ident(name, _) => { if self.token.is_reserved_ident() { let mut err = self.expected_ident_found(); if recover { @@ -630,7 +630,7 @@ fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, ast::Ident> { } let span = self.span; self.bump(); - Ok(Ident::new(ident.name, span)) + Ok(Ident::new(name, span)) } _ => { Err(if self.prev_token_kind == PrevTokenKind::DocComment { @@ -1618,10 +1618,10 @@ fn maybe_parse_fixed_length_of_vec(&mut self) -> PResult<'a, Option fn parse_path_segment_ident(&mut self) -> PResult<'a, ast::Ident> { match self.token.kind { - token::Ident(ident, _) if self.token.is_path_segment_keyword() => { + token::Ident(name, _) if name.is_path_segment_keyword() => { let span = self.span; self.bump(); - Ok(Ident::new(ident.name, span)) + Ok(Ident::new(name, span)) } _ => self.parse_ident(), } @@ -1629,10 +1629,10 @@ fn parse_path_segment_ident(&mut self) -> PResult<'a, ast::Ident> { fn parse_ident_or_underscore(&mut self) -> PResult<'a, ast::Ident> { match self.token.kind { - token::Ident(ident, false) if ident.name == kw::Underscore => { + token::Ident(name, false) if name == kw::Underscore => { let span = self.span; self.bump(); - Ok(Ident::new(ident.name, span)) + Ok(Ident::new(name, span)) } _ => self.parse_ident(), } @@ -2368,13 +2368,11 @@ fn parse_struct_expr(&mut self, lo: Span, pth: ast::Path, mut attrs: ThinVec, lo: Span) -> PResult<'a, self.look_ahead(1, |t| t.is_ident()) => { self.bump(); let name = match self.token.kind { - token::Ident(ident, _) => ident, + token::Ident(name, _) => name, _ => unreachable!() }; let mut err = self.fatal(&format!("unknown macro variable `{}`", name)); @@ -2651,7 +2649,7 @@ fn parse_dot_or_call_expr_with_(&mut self, e0: P, lo: Span) -> PResult<'a, // Interpolated identifier and lifetime tokens are replaced with usual identifier // and lifetime tokens, so the former are never encountered during normal parsing. match **nt { - token::NtIdent(ident, is_raw) => Token::new(token::Ident(ident, is_raw), ident.span), + token::NtIdent(ident, is_raw) => Token::new(token::Ident(ident.name, is_raw), ident.span), token::NtLifetime(ident) => Token::new(token::Lifetime(ident.name), ident.span), _ => return, } @@ -2766,7 +2764,7 @@ fn parse_prefix_expr(&mut self, let token_cannot_continue_expr = |t: &Token| match t.kind { // These tokens can start an expression after `!`, but // can't continue an expression after an ident - token::Ident(ident, is_raw) => token::ident_can_begin_expr(ident, is_raw), + token::Ident(name, is_raw) => token::ident_can_begin_expr(name, t.span, is_raw), token::Literal(..) | token::Pound => true, token::Interpolated(ref nt) => match **nt { token::NtIdent(..) | token::NtExpr(..) | @@ -4328,7 +4326,7 @@ fn eat_macro_def(&mut self, attrs: &[Attribute], vis: &Visibility, lo: Span) -> PResult<'a, Option>> { let token_lo = self.span; let (ident, def) = match self.token.kind { - token::Ident(ident, false) if ident.name == kw::Macro => { + token::Ident(name, false) if name == kw::Macro => { self.bump(); let ident = self.parse_ident()?; let tokens = if self.check(&token::OpenDelim(token::Brace)) { @@ -4356,8 +4354,8 @@ fn eat_macro_def(&mut self, attrs: &[Attribute], vis: &Visibility, lo: Span) (ident, ast::MacroDef { tokens: tokens.into(), legacy: false }) } - token::Ident(ident, _) if ident.name == sym::macro_rules && - self.look_ahead(1, |t| *t == token::Not) => { + token::Ident(name, _) if name == sym::macro_rules && + self.look_ahead(1, |t| *t == token::Not) => { let prev_span = self.prev_span; self.complain_if_pub_macro(&vis.node, prev_span); self.bump(); @@ -5481,8 +5479,8 @@ fn parse_fn_decl(&mut self, allow_c_variadic: bool) -> PResult<'a, P> { fn parse_self_arg(&mut self) -> PResult<'a, Option> { let expect_ident = |this: &mut Self| match this.token.kind { // Preserve hygienic context. - token::Ident(ident, _) => - { let span = this.span; this.bump(); Ident::new(ident.name, span) } + token::Ident(name, _) => + { let span = this.span; this.bump(); Ident::new(name, span) } _ => unreachable!() }; let isolated_self = |this: &mut Self, n| { @@ -5805,11 +5803,7 @@ fn complain_if_pub_macro(&self, vis: &VisibilityKind, sp: Span) { match *vis { VisibilityKind::Inherited => {} _ => { - let is_macro_rules: bool = match self.token.kind { - token::Ident(sid, _) => sid.name == sym::macro_rules, - _ => false, - }; - let mut err = if is_macro_rules { + let mut err = if self.token.is_keyword(sym::macro_rules) { let mut err = self.diagnostic() .struct_span_err(sp, "can't qualify macro_rules invocation with `pub`"); err.span_suggestion( diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 81c93a4179e..ba7c88e7000 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -118,8 +118,8 @@ pub fn new(kind: LitKind, symbol: Symbol, suffix: Option) -> Lit { } } -pub(crate) fn ident_can_begin_expr(ident: ast::Ident, is_raw: bool) -> bool { - let ident_token: TokenKind = Ident(ident, is_raw); +pub(crate) fn ident_can_begin_expr(name: ast::Name, span: Span, is_raw: bool) -> bool { + let ident_token = Token::new(Ident(name, is_raw), span); !ident_token.is_reserved_ident() || ident_token.is_path_segment_keyword() || @@ -146,11 +146,11 @@ pub(crate) fn ident_can_begin_expr(ident: ast::Ident, is_raw: bool) -> bool { kw::While, kw::Yield, kw::Static, - ].contains(&ident.name) + ].contains(&name) } -fn ident_can_begin_type(ident: ast::Ident, is_raw: bool) -> bool { - let ident_token: TokenKind = Ident(ident, is_raw); +fn ident_can_begin_type(name: ast::Name, span: Span, is_raw: bool) -> bool { + let ident_token = Token::new(Ident(name, is_raw), span); !ident_token.is_reserved_ident() || ident_token.is_path_segment_keyword() || @@ -163,7 +163,7 @@ fn ident_can_begin_type(ident: ast::Ident, is_raw: bool) -> bool { kw::Extern, kw::Typeof, kw::Dyn, - ].contains(&ident.name) + ].contains(&name) } #[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)] @@ -210,7 +210,7 @@ pub enum TokenKind { Literal(Lit), /* Name components */ - Ident(ast::Ident, /* is_raw */ bool), + Ident(ast::Name, /* is_raw */ bool), Lifetime(ast::Name), Interpolated(Lrc), @@ -245,7 +245,7 @@ pub struct Token { impl TokenKind { /// Recovers a `TokenKind` from an `ast::Ident`. This creates a raw identifier if necessary. pub fn from_ast_ident(ident: ast::Ident) -> TokenKind { - Ident(ident, ident.is_raw_guess()) + Ident(ident.name, ident.is_raw_guess()) } crate fn is_like_plus(&self) -> bool { @@ -254,12 +254,14 @@ pub fn from_ast_ident(ident: ast::Ident) -> TokenKind { _ => false, } } +} +impl Token { /// Returns `true` if the token can appear at the start of an expression. crate fn can_begin_expr(&self) -> bool { - match *self { - Ident(ident, is_raw) => - ident_can_begin_expr(ident, is_raw), // value name or keyword + match self.kind { + Ident(name, is_raw) => + ident_can_begin_expr(name, self.span, is_raw), // value name or keyword OpenDelim(..) | // tuple, array or block Literal(..) | // literal Not | // operator not @@ -289,9 +291,9 @@ pub fn from_ast_ident(ident: ast::Ident) -> TokenKind { /// Returns `true` if the token can appear at the start of a type. crate fn can_begin_type(&self) -> bool { - match *self { - Ident(ident, is_raw) => - ident_can_begin_type(ident, is_raw), // type name or keyword + match self.kind { + Ident(name, is_raw) => + ident_can_begin_type(name, self.span, is_raw), // type name or keyword OpenDelim(Paren) | // tuple OpenDelim(Bracket) | // array Not | // never @@ -309,7 +311,9 @@ pub fn from_ast_ident(ident: ast::Ident) -> TokenKind { _ => false, } } +} +impl TokenKind { /// Returns `true` if the token can appear at the start of a const param. pub fn can_begin_const_arg(&self) -> bool { match self { @@ -323,13 +327,17 @@ pub fn can_begin_const_arg(&self) -> bool { _ => self.can_begin_literal_or_bool(), } } +} +impl Token { /// Returns `true` if the token can appear at the start of a generic bound. crate fn can_begin_bound(&self) -> bool { self.is_path_start() || self.is_lifetime() || self.is_keyword(kw::For) || self == &Question || self == &OpenDelim(Paren) } +} +impl TokenKind { pub fn lit(kind: LitKind, symbol: Symbol, suffix: Option) -> TokenKind { Literal(Lit::new(kind, symbol, suffix)) } @@ -355,8 +363,8 @@ pub fn lit(kind: LitKind, symbol: Symbol, suffix: Option) -> TokenKind { match *self { Literal(..) => true, BinOp(Minus) => true, - Ident(ident, false) if ident.name == kw::True => true, - Ident(ident, false) if ident.name == kw::False => true, + Ident(name, false) if name == kw::True => true, + Ident(name, false) if name == kw::False => true, Interpolated(ref nt) => match **nt { NtLiteral(..) => true, _ => false, @@ -367,6 +375,18 @@ pub fn lit(kind: LitKind, symbol: Symbol, suffix: Option) -> TokenKind { } impl Token { + /// Returns an identifier if this token is an identifier. + pub fn ident(&self) -> Option<(ast::Ident, /* is_raw */ bool)> { + match self.kind { + Ident(name, is_raw) => Some((ast::Ident::new(name, self.span), is_raw)), + Interpolated(ref nt) => match **nt { + NtIdent(ident, is_raw) => Some((ident, is_raw)), + _ => None, + }, + _ => None, + } + } + /// Returns a lifetime identifier if this token is a lifetime. pub fn lifetime(&self) -> Option { match self.kind { @@ -381,12 +401,12 @@ pub fn lifetime(&self) -> Option { } impl TokenKind { - /// Returns an identifier if this token is an identifier. - pub fn ident(&self) -> Option<(ast::Ident, /* is_raw */ bool)> { + /// Returns an identifier name if this token is an identifier. + pub fn ident_name(&self) -> Option<(ast::Name, /* is_raw */ bool)> { match *self { - Ident(ident, is_raw) => Some((ident, is_raw)), + Ident(name, is_raw) => Some((name, is_raw)), Interpolated(ref nt) => match **nt { - NtIdent(ident, is_raw) => Some((ident, is_raw)), + NtIdent(ident, is_raw) => Some((ident.name, is_raw)), _ => None, }, _ => None, @@ -405,7 +425,7 @@ pub fn lifetime_name(&self) -> Option { } /// Returns `true` if the token is an identifier. pub fn is_ident(&self) -> bool { - self.ident().is_some() + self.ident_name().is_some() } /// Returns `true` if the token is a lifetime. crate fn is_lifetime(&self) -> bool { @@ -415,10 +435,7 @@ pub fn is_ident(&self) -> bool { /// Returns `true` if the token is a identifier whose name is the given /// string slice. crate fn is_ident_named(&self, name: Symbol) -> bool { - match self.ident() { - Some((ident, _)) => ident.name == name, - None => false - } + self.ident_name().map_or(false, |(ident_name, _)| ident_name == name) } /// Returns `true` if the token is an interpolated path. @@ -440,24 +457,30 @@ fn is_path(&self) -> bool { crate fn is_qpath_start(&self) -> bool { self == &Lt || self == &BinOp(Shl) } +} +impl Token { crate fn is_path_start(&self) -> bool { self == &ModSep || self.is_qpath_start() || self.is_path() || self.is_path_segment_keyword() || self.is_ident() && !self.is_reserved_ident() } +} +impl TokenKind { /// Returns `true` if the token is a given keyword, `kw`. pub fn is_keyword(&self, kw: Symbol) -> bool { - self.ident().map(|(ident, is_raw)| ident.name == kw && !is_raw).unwrap_or(false) + self.ident_name().map(|(name, is_raw)| name == kw && !is_raw).unwrap_or(false) } pub fn is_path_segment_keyword(&self) -> bool { - match self.ident() { - Some((id, false)) => id.is_path_segment_keyword(), + match self.ident_name() { + Some((name, false)) => name.is_path_segment_keyword(), _ => false, } } +} +impl Token { // Returns true for reserved identifiers used internally for elided lifetimes, // unnamed method parameters, crate root module, error recovery etc. pub fn is_special_ident(&self) -> bool { @@ -490,7 +513,9 @@ pub fn is_reserved_ident(&self) -> bool { _ => false, } } +} +impl TokenKind { crate fn glue(self, joint: TokenKind) -> Option { Some(match self { Eq => match joint { @@ -537,7 +562,7 @@ pub fn is_reserved_ident(&self) -> bool { _ => return None, }, SingleQuote => match joint { - Ident(ident, false) => Lifetime(Symbol::intern(&format!("'{}", ident))), + Ident(name, false) => Lifetime(Symbol::intern(&format!("'{}", name))), _ => return None, }, @@ -608,9 +633,9 @@ pub fn is_reserved_ident(&self) -> bool { (&Literal(a), &Literal(b)) => a == b, (&Lifetime(a), &Lifetime(b)) => a == b, - (&Ident(a, b), &Ident(c, d)) => b == d && (a.name == c.name || - a.name == kw::DollarCrate || - c.name == kw::DollarCrate), + (&Ident(a, b), &Ident(c, d)) => b == d && (a == c || + a == kw::DollarCrate || + c == kw::DollarCrate), (&Interpolated(_), &Interpolated(_)) => false, @@ -738,8 +763,7 @@ pub fn to_tokenstream(&self, sess: &ParseSess, span: Span) -> TokenStream { prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span) } Nonterminal::NtIdent(ident, is_raw) => { - let token = Ident(ident, is_raw); - Some(TokenTree::token(ident.span, token).into()) + Some(TokenTree::token(ident.span, Ident(ident.name, is_raw)).into()) } Nonterminal::NtLifetime(ident) => { Some(TokenTree::token(ident.span, Lifetime(ident.name)).into()) @@ -827,7 +851,7 @@ fn prepend_attrs(sess: &ParseSess, // For simple paths, push the identifier directly if attr.path.segments.len() == 1 && attr.path.segments[0].args.is_none() { let ident = attr.path.segments[0].ident; - let token = Ident(ident, ident.as_str().starts_with("r#")); + let token = Ident(ident.name, ident.as_str().starts_with("r#")); brackets.push(tokenstream::TokenTree::token(ident.span, token)); // ... and for more complicated paths, fall back to a reparse hack that diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs index 140b77b6b5f..bb80c1a1b3f 100644 --- a/src/libsyntax/tokenstream.rs +++ b/src/libsyntax/tokenstream.rs @@ -575,7 +575,7 @@ pub fn apply_mark(self, mark: Mark) -> Self { #[cfg(test)] mod tests { use super::*; - use crate::syntax::ast::Ident; + use crate::syntax::ast::Name; use crate::with_default_globals; use crate::util::parser_testing::string_to_stream; use syntax_pos::{Span, BytePos, NO_EXPANSION}; @@ -660,7 +660,7 @@ fn test_is_empty() { with_default_globals(|| { let test0: TokenStream = Vec::::new().into_iter().collect(); let test1: TokenStream = - TokenTree::token(sp(0, 1), token::Ident(Ident::from_str("a"), false)).into(); + TokenTree::token(sp(0, 1), token::Ident(Name::intern("a"), false)).into(); let test2 = string_to_ts("foo(bar::baz)"); assert_eq!(test0.is_empty(), true); diff --git a/src/libsyntax_ext/concat_idents.rs b/src/libsyntax_ext/concat_idents.rs index 59f25af3742..8f061abc77b 100644 --- a/src/libsyntax_ext/concat_idents.rs +++ b/src/libsyntax_ext/concat_idents.rs @@ -38,8 +38,8 @@ pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt<'_>, } } else { match *e { - TokenTree::Token(Token { kind: token::Ident(ident, _), .. }) => - res_str.push_str(&ident.as_str()), + TokenTree::Token(Token { kind: token::Ident(name, _), .. }) => + res_str.push_str(&name.as_str()), _ => { cx.span_err(sp, "concat_idents! requires ident args."); return DummyResult::any(sp); diff --git a/src/libsyntax_ext/format.rs b/src/libsyntax_ext/format.rs index 0eaac544e33..c78215b77a9 100644 --- a/src/libsyntax_ext/format.rs +++ b/src/libsyntax_ext/format.rs @@ -149,16 +149,16 @@ fn parse_args<'a>( } // accept trailing commas if named || (p.token.is_ident() && p.look_ahead(1, |t| *t == token::Eq)) { named = true; - let ident = if let token::Ident(i, _) = p.token.kind { + let name = if let token::Ident(name, _) = p.token.kind { p.bump(); - i + name } else { return Err(ecx.struct_span_err( p.span, "expected ident, positional arguments cannot follow named arguments", )); }; - let name: &str = &ident.as_str(); + let name: &str = &name.as_str(); p.expect(&token::Eq)?; let e = p.parse_expr()?; diff --git a/src/libsyntax_ext/proc_macro_decls.rs b/src/libsyntax_ext/proc_macro_decls.rs index de8b689396f..29297aa913e 100644 --- a/src/libsyntax_ext/proc_macro_decls.rs +++ b/src/libsyntax_ext/proc_macro_decls.rs @@ -132,7 +132,7 @@ fn collect_custom_derive(&mut self, item: &'a ast::Item, attr: &'a ast::Attribut } }; - if !trait_ident.can_be_raw() { + if !trait_ident.name.can_be_raw() { self.handler.span_err(trait_attr.span, &format!("`{}` cannot be a name of derive macro", trait_ident)); } @@ -166,7 +166,7 @@ fn collect_custom_derive(&mut self, item: &'a ast::Item, attr: &'a ast::Attribut return None; } }; - if !ident.can_be_raw() { + if !ident.name.can_be_raw() { self.handler.span_err( attr.span, &format!("`{}` cannot be a name of derive helper attribute", ident), diff --git a/src/libsyntax_ext/proc_macro_server.rs b/src/libsyntax_ext/proc_macro_server.rs index 6ab613d2abd..ff2835c70f7 100644 --- a/src/libsyntax_ext/proc_macro_server.rs +++ b/src/libsyntax_ext/proc_macro_server.rs @@ -142,9 +142,8 @@ macro_rules! op { Question => op!('?'), SingleQuote => op!('\''), - Ident(ident, false) if ident.name == kw::DollarCrate => - tt!(Ident::dollar_crate()), - Ident(ident, is_raw) => tt!(Ident::new(ident.name, is_raw)), + Ident(name, false) if name == kw::DollarCrate => tt!(Ident::dollar_crate()), + Ident(name, is_raw) => tt!(Ident::new(name, is_raw)), Lifetime(name) => { let ident = ast::Ident::new(name, span).without_first_quote(); stack.push(tt!(Ident::new(ident.name, false))); @@ -159,7 +158,7 @@ macro_rules! op { escaped.extend(ch.escape_debug()); } let stream = vec![ - Ident(ast::Ident::new(sym::doc, span), false), + Ident(sym::doc, false), Eq, TokenKind::lit(token::Str, Symbol::intern(&escaped), None), ] @@ -211,8 +210,7 @@ fn to_internal(self) -> TokenStream { .into(); } TokenTree::Ident(self::Ident { sym, is_raw, span }) => { - let token = Ident(ast::Ident::new(sym, span), is_raw); - return tokenstream::TokenTree::token(span, token).into(); + return tokenstream::TokenTree::token(span, Ident(sym, is_raw)).into(); } TokenTree::Literal(self::Literal { lit: token::Lit { kind: token::Integer, symbol, suffix }, @@ -338,7 +336,8 @@ fn new(sym: Symbol, is_raw: bool, span: Span) -> Ident { if !Self::is_valid(&string) { panic!("`{:?}` is not a valid identifier", string) } - if is_raw && !ast::Ident::from_interned_str(sym.as_interned_str()).can_be_raw() { + // Get rid of gensyms to conservatively check rawness on the string contents only. + if is_raw && !sym.as_interned_str().as_symbol().can_be_raw() { panic!("`{}` cannot be a raw identifier", string); } Ident { sym, is_raw, span } diff --git a/src/libsyntax_pos/symbol.rs b/src/libsyntax_pos/symbol.rs index 4e080d115d2..c37aae0bf31 100644 --- a/src/libsyntax_pos/symbol.rs +++ b/src/libsyntax_pos/symbol.rs @@ -1019,6 +1019,21 @@ fn is_unused_keyword_2018(self) -> bool { pub fn is_doc_keyword(self) -> bool { self <= kw::Union } + + /// A keyword or reserved identifier that can be used as a path segment. + pub fn is_path_segment_keyword(self) -> bool { + self == kw::Super || + self == kw::SelfLower || + self == kw::SelfUpper || + self == kw::Crate || + self == kw::PathRoot || + self == kw::DollarCrate + } + + /// This symbol can be a raw identifier. + pub fn can_be_raw(self) -> bool { + self != kw::Invalid && self != kw::Underscore && !self.is_path_segment_keyword() + } } impl Ident { @@ -1049,24 +1064,13 @@ pub fn is_reserved(self) -> bool { /// A keyword or reserved identifier that can be used as a path segment. pub fn is_path_segment_keyword(self) -> bool { - self.name == kw::Super || - self.name == kw::SelfLower || - self.name == kw::SelfUpper || - self.name == kw::Crate || - self.name == kw::PathRoot || - self.name == kw::DollarCrate - } - - /// This identifier can be a raw identifier. - pub fn can_be_raw(self) -> bool { - self.name != kw::Invalid && self.name != kw::Underscore && - !self.is_path_segment_keyword() + self.name.is_path_segment_keyword() } /// We see this identifier in a normal identifier position, like variable name or a type. /// How was it written originally? Did it use the raw form? Let's try to guess. pub fn is_raw_guess(self) -> bool { - self.can_be_raw() && self.is_reserved() + self.name.can_be_raw() && self.is_reserved() } } -- 2.44.0