From aa6fba98ae717d6090cdd5d0569114adfc825680 Mon Sep 17 00:00:00 2001 From: Vadim Petrochenkov Date: Wed, 5 Jun 2019 01:17:07 +0300 Subject: [PATCH] syntax: Use `Token` in `Parser` --- src/libsyntax/ext/tt/macro_parser.rs | 17 ++- src/libsyntax/ext/tt/macro_rules.rs | 16 +-- src/libsyntax/parse/attr.rs | 12 +- src/libsyntax/parse/diagnostics.rs | 12 +- src/libsyntax/parse/literal.rs | 26 ++--- src/libsyntax/parse/mod.rs | 2 +- src/libsyntax/parse/parser.rs | 161 ++++++++++++++------------- src/libsyntax_ext/asm.rs | 2 +- src/libsyntax_ext/assert.rs | 2 +- src/libsyntax_ext/format.rs | 2 +- 10 files changed, 126 insertions(+), 126 deletions(-) diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index 6acdffedd6b..4f681a77ed3 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -273,7 +273,7 @@ pub enum ParseResult { Success(T), /// Arm failed to match. If the second parameter is `token::Eof`, it indicates an unexpected /// end of macro invocation. Otherwise, it indicates that no rules expected the given token. - Failure(syntax_pos::Span, TokenKind, &'static str), + Failure(Token, &'static str), /// Fatal error (malformed macro?). Abort compilation. Error(syntax_pos::Span, String), } @@ -701,7 +701,7 @@ pub fn parse( parser.span, ) { Success(_) => {} - Failure(sp, tok, t) => return Failure(sp, tok, t), + Failure(token, msg) => return Failure(token, msg), Error(sp, msg) => return Error(sp, msg), } @@ -727,13 +727,13 @@ pub fn parse( "ambiguity: multiple successful parses".to_string(), ); } else { + let span = if parser.span.is_dummy() { + parser.span + } else { + sess.source_map().next_point(parser.span) + }; return Failure( - if parser.span.is_dummy() { - parser.span - } else { - sess.source_map().next_point(parser.span) - }, - token::Eof, + Token { kind: token::Eof, span }, "missing tokens in macro arguments", ); } @@ -771,7 +771,6 @@ pub fn parse( // then there is a syntax error. else if bb_items.is_empty() && next_items.is_empty() { return Failure( - parser.span, parser.token.clone(), "no rules expected this token in macro call", ); diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 703ad0053a0..05e921b1bfd 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -190,10 +190,10 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt<'_>, arm_span, }) } - Failure(sp, tok, t) => if sp.lo() >= best_fail_spot.lo() { - best_fail_spot = sp; - best_fail_tok = Some(tok); - best_fail_text = Some(t); + Failure(token, msg) => if token.span.lo() >= best_fail_spot.lo() { + best_fail_spot = token.span; + best_fail_tok = Some(token.kind); + best_fail_text = Some(msg); }, Error(err_sp, ref msg) => { cx.span_fatal(err_sp.substitute_dummy(sp), &msg[..]) @@ -288,11 +288,11 @@ pub fn compile( let argument_map = match parse(sess, body.stream(), &argument_gram, None, true) { Success(m) => m, - Failure(sp, tok, t) => { - let s = parse_failure_msg(tok); - let sp = sp.substitute_dummy(def.span); + Failure(token, msg) => { + let s = parse_failure_msg(token.kind); + let sp = token.span.substitute_dummy(def.span); let mut err = sess.span_diagnostic.struct_span_fatal(sp, &s); - err.span_label(sp, t); + err.span_label(sp, msg); err.emit(); FatalError.raise(); } diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index 9b78b56041f..8040168a67e 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -24,7 +24,7 @@ impl<'a> Parser<'a> { let mut just_parsed_doc_comment = false; loop { debug!("parse_outer_attributes: self.token={:?}", self.token); - match self.token { + match self.token.kind { token::Pound => { let inner_error_reason = if just_parsed_doc_comment { "an inner attribute is not permitted following an outer doc comment" @@ -81,7 +81,7 @@ fn parse_attribute_with_inner_parse_policy(&mut self, debug!("parse_attribute_with_inner_parse_policy: inner_parse_policy={:?} self.token={:?}", inner_parse_policy, self.token); - let (span, path, tokens, style) = match self.token { + let (span, path, tokens, style) = match self.token.kind { token::Pound => { let lo = self.span; self.bump(); @@ -140,7 +140,7 @@ fn parse_attribute_with_inner_parse_policy(&mut self, /// PATH `=` TOKEN_TREE /// The delimiters or `=` are still put into the resulting token stream. crate fn parse_meta_item_unrestricted(&mut self) -> PResult<'a, (ast::Path, TokenStream)> { - let meta = match self.token { + let meta = match self.token.kind { token::Interpolated(ref nt) => match **nt { Nonterminal::NtMeta(ref meta) => Some(meta.clone()), _ => None, @@ -159,7 +159,7 @@ fn parse_attribute_with_inner_parse_policy(&mut self, } else if self.eat(&token::Eq) { let eq = TokenTree::token(self.prev_span, token::Eq); let mut is_interpolated_expr = false; - if let token::Interpolated(nt) = &self.token { + if let token::Interpolated(nt) = &self.token.kind { if let token::NtExpr(..) = **nt { is_interpolated_expr = true; } @@ -188,7 +188,7 @@ fn parse_attribute_with_inner_parse_policy(&mut self, crate fn parse_inner_attributes(&mut self) -> PResult<'a, Vec> { let mut attrs: Vec = vec![]; loop { - match self.token { + match self.token.kind { token::Pound => { // Don't even try to parse if it's not an inner attribute. if !self.look_ahead(1, |t| t == &token::Not) { @@ -236,7 +236,7 @@ fn parse_unsuffixed_lit(&mut self) -> PResult<'a, ast::Lit> { /// meta_item : IDENT ( '=' UNSUFFIXED_LIT | '(' meta_item_inner? ')' )? ; /// meta_item_inner : (meta_item | UNSUFFIXED_LIT) (',' meta_item_inner)? ; pub fn parse_meta_item(&mut self) -> PResult<'a, ast::MetaItem> { - let nt_meta = match self.token { + let nt_meta = match self.token.kind { token::Interpolated(ref nt) => match **nt { token::NtMeta(ref e) => Some(e.clone()), _ => None, diff --git a/src/libsyntax/parse/diagnostics.rs b/src/libsyntax/parse/diagnostics.rs index b391f7ca327..1759a229cf4 100644 --- a/src/libsyntax/parse/diagnostics.rs +++ b/src/libsyntax/parse/diagnostics.rs @@ -201,7 +201,7 @@ pub fn span_err>(&self, sp: S, m: &str) { self.span, &format!("expected identifier, found {}", self.this_token_descr()), ); - if let token::Ident(ident, false) = &self.token { + if let token::Ident(ident, false) = &self.token.kind { if ident.is_raw_guess() { err.span_suggestion( self.span, @@ -730,7 +730,7 @@ pub fn unexpected_try_recover( ) -> PResult<'a, bool /* recovered */> { let token_str = pprust::token_to_string(t); let this_token_str = self.this_token_descr(); - let (prev_sp, sp) = match (&self.token, self.subparser_name) { + let (prev_sp, sp) = match (&self.token.kind, self.subparser_name) { // Point at the end of the macro call when reaching end of macro arguments. (token::Eof, Some(_)) => { let sp = self.sess.source_map().next_point(self.span); @@ -746,7 +746,7 @@ pub fn unexpected_try_recover( let msg = format!( "expected `{}`, found {}", token_str, - match (&self.token, self.subparser_name) { + match (&self.token.kind, self.subparser_name) { (token::Eof, Some(origin)) => format!("end of {}", origin), _ => this_token_str, }, @@ -989,7 +989,7 @@ pub fn unexpected_try_recover( break_on_semi, break_on_block); loop { debug!("recover_stmt_ loop {:?}", self.token); - match self.token { + match self.token.kind { token::OpenDelim(token::DelimToken::Brace) => { brace_depth += 1; self.bump(); @@ -1074,7 +1074,7 @@ pub fn unexpected_try_recover( } crate fn eat_incorrect_doc_comment(&mut self, applied_to: &str) { - if let token::DocComment(_) = self.token { + if let token::DocComment(_) = self.token.kind { let mut err = self.diagnostic().struct_span_err( self.span, &format!("documentation comments cannot be applied to {}", applied_to), @@ -1214,7 +1214,7 @@ pub fn unexpected_try_recover( } crate fn expected_expression_found(&self) -> DiagnosticBuilder<'a> { - let (span, msg) = match (&self.token, self.subparser_name) { + let (span, msg) = match (&self.token.kind, self.subparser_name) { (&token::Eof, Some(origin)) => { let sp = self.sess.source_map().next_point(self.span); (sp, format!("expected expression, found end of {}", origin)) diff --git a/src/libsyntax/parse/literal.rs b/src/libsyntax/parse/literal.rs index 4b8ef20180f..1abb8254bc6 100644 --- a/src/libsyntax/parse/literal.rs +++ b/src/libsyntax/parse/literal.rs @@ -3,7 +3,7 @@ use crate::ast::{self, Ident, Lit, LitKind}; use crate::parse::parser::Parser; use crate::parse::PResult; -use crate::parse::token::{self, TokenKind}; +use crate::parse::token::{self, Token, TokenKind}; use crate::parse::unescape::{unescape_str, unescape_char, unescape_byte_str, unescape_byte}; use crate::print::pprust; use crate::symbol::{kw, sym, Symbol}; @@ -272,44 +272,42 @@ impl<'a> Parser<'a> { if self.token == token::Dot { // Attempt to recover `.4` as `0.4`. recovered = self.look_ahead(1, |t| { - if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = *t { + if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = t.kind { let next_span = self.look_ahead_span(1); if self.span.hi() == next_span.lo() { let s = String::from("0.") + &symbol.as_str(); - let token = TokenKind::lit(token::Float, Symbol::intern(&s), suffix); - return Some((token, self.span.to(next_span))); + let kind = TokenKind::lit(token::Float, Symbol::intern(&s), suffix); + return Some(Token { kind, span: self.span.to(next_span) }); } } None }); - if let Some((ref token, span)) = recovered { + if let Some(token) = &recovered { self.bump(); self.diagnostic() - .struct_span_err(span, "float literals must have an integer part") + .struct_span_err(token.span, "float literals must have an integer part") .span_suggestion( - span, + token.span, "must have an integer part", - pprust::token_to_string(&token), + pprust::token_to_string(token), Applicability::MachineApplicable, ) .emit(); } } - let (token, span) = recovered.as_ref().map_or((&self.token, self.span), - |(token, span)| (token, *span)); - - match Lit::from_token(token, span) { + let token = recovered.as_ref().unwrap_or(&self.token); + match Lit::from_token(token, token.span) { Ok(lit) => { self.bump(); Ok(lit) } Err(LitError::NotLiteral) => { let msg = format!("unexpected token: {}", self.this_token_descr()); - Err(self.span_fatal(span, &msg)) + Err(self.span_fatal(token.span, &msg)) } Err(err) => { - let lit = token.expect_lit(); + let (lit, span) = (token.expect_lit(), token.span); self.bump(); err.report(&self.sess.span_diagnostic, lit, span); let lit = token::Lit::new(token::Err, lit.symbol, lit.suffix); diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 398b4b1da17..5187621258d 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -239,7 +239,7 @@ fn maybe_source_file_to_parser( let mut parser = stream_to_parser(sess, stream, None); parser.unclosed_delims = unclosed_delims; if parser.token == token::Eof && parser.span.is_dummy() { - parser.span = Span::new(end_pos, end_pos, parser.span.ctxt()); + parser.token.span = Span::new(end_pos, end_pos, parser.span.ctxt()); } Ok(parser) diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index eda67b3a93d..cc67a3fbd66 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -57,6 +57,7 @@ use std::borrow::Cow; use std::cmp; use std::mem; +use std::ops::Deref; use std::path::{self, Path, PathBuf}; use std::slice; @@ -121,7 +122,7 @@ pub enum PathStyle { /// `token::Interpolated` tokens. macro_rules! maybe_whole_expr { ($p:expr) => { - if let token::Interpolated(nt) = &$p.token { + if let token::Interpolated(nt) = &$p.token.kind { match &**nt { token::NtExpr(e) | token::NtLiteral(e) => { let e = e.clone(); @@ -147,7 +148,7 @@ macro_rules! maybe_whole_expr { /// As maybe_whole_expr, but for things other than expressions macro_rules! maybe_whole { ($p:expr, $constructor:ident, |$x:ident| $e:expr) => { - if let token::Interpolated(nt) = &$p.token { + if let token::Interpolated(nt) = &$p.token.kind { if let token::$constructor(x) = &**nt { let $x = x.clone(); $p.bump(); @@ -161,7 +162,7 @@ macro_rules! maybe_whole { macro_rules! maybe_recover_from_interpolated_ty_qpath { ($self: expr, $allow_qpath_recovery: expr) => { if $allow_qpath_recovery && $self.look_ahead(1, |t| t == &token::ModSep) { - if let token::Interpolated(nt) = &$self.token { + if let token::Interpolated(nt) = &$self.token.kind { if let token::NtTy(ty) = &**nt { let ty = ty.clone(); $self.bump(); @@ -196,14 +197,13 @@ enum PrevTokenKind { #[derive(Clone)] pub struct Parser<'a> { pub sess: &'a ParseSess, - /// the current token: - pub token: token::TokenKind, - /// the span of the current token: - pub span: Span, + /// The current token. + pub token: Token, + /// The span of the previous token. meta_var_span: Option, /// The span of the previous token. pub prev_span: Span, - /// The kind of the previous troken. + /// The previous token kind. prev_token_kind: PrevTokenKind, restrictions: Restrictions, /// Used to determine the path to externally loaded source files. @@ -242,6 +242,15 @@ fn drop(&mut self) { } } +// FIXME: Parser uses `self.span` all the time. +// Remove this impl if you think that using `self.token.span` instead is acceptable. +impl Deref for Parser<'_> { + type Target = Token; + fn deref(&self) -> &Self::Target { + &self.token + } +} + #[derive(Clone)] crate struct TokenCursor { crate frame: TokenCursorFrame, @@ -468,8 +477,7 @@ pub fn new( ) -> Self { let mut parser = Parser { sess, - token: token::Whitespace, - span: DUMMY_SP, + token: Token { kind: token::Whitespace, span: DUMMY_SP }, prev_span: DUMMY_SP, meta_var_span: None, prev_token_kind: PrevTokenKind::Other, @@ -498,9 +506,7 @@ pub fn new( subparser_name, }; - let tok = parser.next_tok(); - parser.token = tok.kind; - parser.span = tok.span; + parser.token = parser.next_tok(); if let Some(directory) = directory { parser.directory = directory; @@ -534,7 +540,7 @@ pub fn this_token_to_string(&self) -> String { } crate fn token_descr(&self) -> Option<&'static str> { - Some(match &self.token { + Some(match &self.token.kind { t if t.is_special_ident() => "reserved identifier", t if t.is_used_keyword() => "keyword", t if t.is_unused_keyword() => "reserved keyword", @@ -612,7 +618,7 @@ pub fn parse_ident(&mut self) -> PResult<'a, ast::Ident> { } fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, ast::Ident> { - match self.token { + match self.token.kind { token::Ident(ident, _) => { if self.token.is_reserved_ident() { let mut err = self.expected_ident_found(); @@ -732,7 +738,7 @@ fn check_const_arg(&mut self) -> bool { /// See issue #47856 for an example of when this may occur. fn eat_plus(&mut self) -> bool { self.expected_tokens.push(TokenType::Token(token::BinOp(token::Plus))); - match self.token { + match self.token.kind { token::BinOp(token::Plus) => { self.bump(); true @@ -763,7 +769,7 @@ fn check_plus(&mut self) -> bool { /// `&` and continues. If an `&` is not seen, signals an error. fn expect_and(&mut self) -> PResult<'a, ()> { self.expected_tokens.push(TokenType::Token(token::BinOp(token::And))); - match self.token { + match self.token.kind { token::BinOp(token::And) => { self.bump(); Ok(()) @@ -780,7 +786,7 @@ fn expect_and(&mut self) -> PResult<'a, ()> { /// `|` and continues. If an `|` is not seen, signals an error. fn expect_or(&mut self) -> PResult<'a, ()> { self.expected_tokens.push(TokenType::Token(token::BinOp(token::Or))); - match self.token { + match self.token.kind { token::BinOp(token::Or) => { self.bump(); Ok(()) @@ -805,7 +811,7 @@ fn expect_no_suffix(&self, sp: Span, kind: &str, suffix: Option) { /// starting token. fn eat_lt(&mut self) -> bool { self.expected_tokens.push(TokenType::Token(token::Lt)); - let ate = match self.token { + let ate = match self.token.kind { token::Lt => { self.bump(); true @@ -845,7 +851,7 @@ fn expect_lt(&mut self) -> PResult<'a, ()> { /// with a single `>` and continues. If a `>` is not seen, signals an error. fn expect_gt(&mut self) -> PResult<'a, ()> { self.expected_tokens.push(TokenType::Token(token::Gt)); - let ate = match self.token { + let ate = match self.token.kind { token::Gt => { self.bump(); Some(()) @@ -928,7 +934,7 @@ pub fn parse_seq_to_before_end( TokenExpectType::NoExpect => self.token == **k, } }) { - match self.token { + match self.token.kind { token::CloseDelim(..) | token::Eof => break, _ => {} }; @@ -1011,7 +1017,7 @@ pub fn bump(&mut self) { self.prev_span = self.meta_var_span.take().unwrap_or(self.span); // Record last token kind for possible error recovery. - self.prev_token_kind = match self.token { + self.prev_token_kind = match self.token.kind { token::DocComment(..) => PrevTokenKind::DocComment, token::Comma => PrevTokenKind::Comma, token::BinOp(token::Plus) => PrevTokenKind::Plus, @@ -1022,9 +1028,7 @@ pub fn bump(&mut self) { _ => PrevTokenKind::Other, }; - let next = self.next_tok(); - self.token = next.kind; - self.span = next.span; + self.token = self.next_tok(); self.expected_tokens.clear(); // check after each token self.process_potential_macro_variable(); @@ -1038,24 +1042,25 @@ fn bump_with(&mut self, next: token::TokenKind, span: Span) { // fortunately for tokens currently using `bump_with`, the // prev_token_kind will be of no use anyway. self.prev_token_kind = PrevTokenKind::Other; - self.token = next; - self.span = span; + self.token = Token { kind: next, span }; self.expected_tokens.clear(); } pub fn look_ahead(&self, dist: usize, f: F) -> R where - F: FnOnce(&token::TokenKind) -> R, + F: FnOnce(&token::Token) -> R, { if dist == 0 { - return f(&self.token) + // FIXME: Avoid cloning here. + return f(&self.token); } - f(&match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) { + let frame = &self.token_cursor.frame; + f(&match frame.tree_cursor.look_ahead(dist - 1) { Some(tree) => match tree { - TokenTree::Token(token) => token.kind, - TokenTree::Delimited(_, delim, _) => token::OpenDelim(delim), - }, - None => token::CloseDelim(self.token_cursor.frame.delim), + TokenTree::Token(token) => token, + TokenTree::Delimited(dspan, delim, _) => Token { kind: token::OpenDelim(delim), span: dspan.open }, + } + None => Token { kind: token::CloseDelim(frame.delim), span: frame.span.close } }) } @@ -1209,7 +1214,7 @@ fn parse_trait_item_(&mut self, decl, }; - let body = match self.token { + let body = match self.token.kind { token::Semi => { self.bump(); *at_end = true; @@ -1477,7 +1482,7 @@ fn parse_ptr(&mut self) -> PResult<'a, MutTy> { } fn is_named_argument(&self) -> bool { - let offset = match self.token { + let offset = match self.token.kind { token::Interpolated(ref nt) => match **nt { token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon), _ => 0, @@ -1612,7 +1617,7 @@ fn maybe_parse_fixed_length_of_vec(&mut self) -> PResult<'a, Option } fn parse_path_segment_ident(&mut self) -> PResult<'a, ast::Ident> { - match self.token { + match self.token.kind { token::Ident(ident, _) if self.token.is_path_segment_keyword() => { let span = self.span; self.bump(); @@ -1623,7 +1628,7 @@ fn parse_path_segment_ident(&mut self) -> PResult<'a, ast::Ident> { } fn parse_ident_or_underscore(&mut self) -> PResult<'a, ast::Ident> { - match self.token { + match self.token.kind { token::Ident(ident, false) if ident.name == kw::Underscore => { let span = self.span; self.bump(); @@ -1710,7 +1715,7 @@ pub fn parse_path(&mut self, style: PathStyle) -> PResult<'a, ast::Path> { /// backwards-compatibility. This is used when parsing derive macro paths in `#[derive]` /// attributes. pub fn parse_path_allowing_meta(&mut self, style: PathStyle) -> PResult<'a, ast::Path> { - let meta_ident = match self.token { + let meta_ident = match self.token.kind { token::Interpolated(ref nt) => match **nt { token::NtMeta(ref meta) => match meta.node { ast::MetaItemKind::Word => Some(meta.path.clone()), @@ -1859,7 +1864,7 @@ fn parse_mutability(&mut self) -> Mutability { } fn parse_field_name(&mut self) -> PResult<'a, Ident> { - if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = self.token { + if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = self.token.kind { self.expect_no_suffix(self.span, "a tuple index", suffix); self.bump(); Ok(Ident::new(symbol, self.prev_span)) @@ -1949,7 +1954,7 @@ fn mk_assign_op(&self, binop: ast::BinOp, } fn expect_delimited_token_tree(&mut self) -> PResult<'a, (MacDelimiter, TokenStream)> { - let delim = match self.token { + let delim = match self.token.kind { token::OpenDelim(delim) => delim, _ => { let msg = "expected open delimiter"; @@ -1993,7 +1998,7 @@ fn parse_bottom_expr(&mut self) -> PResult<'a, P> { let ex: ExprKind; // Note: when adding new syntax here, don't forget to adjust TokenKind::can_begin_expr(). - match self.token { + match self.token.kind { token::OpenDelim(token::Paren) => { self.bump(); @@ -2363,7 +2368,7 @@ fn parse_struct_expr(&mut self, lo: Span, pth: ast::Path, mut attrs: ThinVec, lo: Span) -> PResult<'a, P { // Method call `expr.f()` let mut args = self.parse_unspanned_seq( @@ -2542,7 +2547,7 @@ fn parse_dot_or_call_expr_with_(&mut self, e0: P, lo: Span) -> PResult<'a, // expr.f if self.eat(&token::Dot) { - match self.token { + match self.token.kind { token::Ident(..) => { e = self.parse_dot_suffix(e, lo)?; } @@ -2594,7 +2599,7 @@ fn parse_dot_or_call_expr_with_(&mut self, e0: P, lo: Span) -> PResult<'a, continue; } if self.expr_is_complete(&e) { break; } - match self.token { + match self.token.kind { // expr(...) token::OpenDelim(token::Paren) => { let seq = self.parse_unspanned_seq( @@ -2627,11 +2632,11 @@ fn parse_dot_or_call_expr_with_(&mut self, e0: P, lo: Span) -> PResult<'a, } crate fn process_potential_macro_variable(&mut self) { - let (token, span) = match self.token { + self.token = match self.token.kind { token::Dollar if self.span.ctxt() != syntax_pos::hygiene::SyntaxContext::empty() && self.look_ahead(1, |t| t.is_ident()) => { self.bump(); - let name = match self.token { + let name = match self.token.kind { token::Ident(ident, _) => ident, _ => unreachable!() }; @@ -2646,24 +2651,22 @@ fn parse_dot_or_call_expr_with_(&mut self, e0: P, lo: Span) -> PResult<'a, // Interpolated identifier and lifetime tokens are replaced with usual identifier // and lifetime tokens, so the former are never encountered during normal parsing. match **nt { - token::NtIdent(ident, is_raw) => (token::Ident(ident, is_raw), ident.span), - token::NtLifetime(ident) => (token::Lifetime(ident), ident.span), + token::NtIdent(ident, is_raw) => Token { kind: token::Ident(ident, is_raw), span: ident.span }, + token::NtLifetime(ident) => Token { kind: token::Lifetime(ident), span: ident.span }, _ => return, } } _ => return, }; - self.token = token; - self.span = span; } /// Parses a single token tree from the input. crate fn parse_token_tree(&mut self) -> TokenTree { - match self.token { + match self.token.kind { token::OpenDelim(..) => { let frame = mem::replace(&mut self.token_cursor.frame, self.token_cursor.stack.pop().unwrap()); - self.span = frame.span.entire(); + self.token.span = frame.span.entire(); self.bump(); TokenTree::Delimited( frame.span, @@ -2673,9 +2676,9 @@ fn parse_dot_or_call_expr_with_(&mut self, e0: P, lo: Span) -> PResult<'a, }, token::CloseDelim(_) | token::Eof => unreachable!(), _ => { - let (token, span) = (mem::replace(&mut self.token, token::Whitespace), self.span); + let token = mem::replace(&mut self.token, Token { kind: token::Whitespace, span: DUMMY_SP }); self.bump(); - TokenTree::token(span, token) + TokenTree::Token(token) } } } @@ -2692,7 +2695,7 @@ pub fn parse_all_token_trees(&mut self) -> PResult<'a, Vec> { pub fn parse_tokens(&mut self) -> TokenStream { let mut result = Vec::new(); loop { - match self.token { + match self.token.kind { token::Eof | token::CloseDelim(..) => break, _ => result.push(self.parse_token_tree().into()), } @@ -2707,7 +2710,7 @@ fn parse_prefix_expr(&mut self, let attrs = self.parse_or_use_outer_attributes(already_parsed_attrs)?; let lo = self.span; // Note: when adding new unary operators, don't forget to adjust TokenKind::can_begin_expr() - let (hi, ex) = match self.token { + let (hi, ex) = match self.token.kind { token::Not => { self.bump(); let e = self.parse_prefix_expr(None); @@ -2760,7 +2763,7 @@ fn parse_prefix_expr(&mut self, // `not` is just an ordinary identifier in Rust-the-language, // but as `rustc`-the-compiler, we can issue clever diagnostics // for confused users who really want to say `!` - let token_cannot_continue_expr = |t: &token::TokenKind| match *t { + let token_cannot_continue_expr = |t: &token::Token| match t.kind { // These tokens can start an expression after `!`, but // can't continue an expression after an ident token::Ident(ident, is_raw) => token::ident_can_begin_expr(ident, is_raw), @@ -3040,7 +3043,7 @@ fn parse_assoc_op_cast(&mut self, lhs: P, lhs_span: Span, match self.parse_path(PathStyle::Expr) { Ok(path) => { - let (op_noun, op_verb) = match self.token { + let (op_noun, op_verb) = match self.token.kind { token::Lt => ("comparison", "comparing"), token::BinOp(token::Shl) => ("shift", "shifting"), _ => { @@ -3844,14 +3847,14 @@ fn parse_pat_range_end(&mut self) -> PResult<'a, P> { // helper function to decide whether to parse as ident binding or to try to do // something more complex like range patterns fn parse_as_ident(&mut self) -> bool { - self.look_ahead(1, |t| match *t { + self.look_ahead(1, |t| match t.kind { token::OpenDelim(token::Paren) | token::OpenDelim(token::Brace) | token::DotDotDot | token::DotDotEq | token::ModSep | token::Not => Some(false), // ensure slice patterns [a, b.., c] and [a, b, c..] don't go into the // range pattern branch token::DotDot => None, _ => Some(true), - }).unwrap_or_else(|| self.look_ahead(2, |t| match *t { + }).unwrap_or_else(|| self.look_ahead(2, |t| match t.kind { token::Comma | token::CloseDelim(token::Bracket) => true, _ => false, })) @@ -3914,12 +3917,12 @@ fn parse_pat_with_range_pat( let lo = self.span; let pat; - match self.token { + match self.token.kind { token::BinOp(token::And) | token::AndAnd => { // Parse &pat / &mut pat self.expect_and()?; let mutbl = self.parse_mutability(); - if let token::Lifetime(ident) = self.token { + if let token::Lifetime(ident) = self.token.kind { let mut err = self.fatal(&format!("unexpected lifetime `{}` in pattern", ident)); err.span_label(self.span, "unexpected lifetime"); @@ -3990,7 +3993,7 @@ fn parse_pat_with_range_pat( // Parse an unqualified path (None, self.parse_path(PathStyle::Expr)?) }; - match self.token { + match self.token.kind { token::Not if qself.is_none() => { // Parse macro invocation self.bump(); @@ -3999,7 +4002,7 @@ fn parse_pat_with_range_pat( pat = PatKind::Mac(mac); } token::DotDotDot | token::DotDotEq | token::DotDot => { - let end_kind = match self.token { + let end_kind = match self.token.kind { token::DotDot => RangeEnd::Excluded, token::DotDotDot => RangeEnd::Included(RangeSyntax::DotDotDot), token::DotDotEq => RangeEnd::Included(RangeSyntax::DotDotEq), @@ -4325,7 +4328,7 @@ fn is_auto_trait_item(&self) -> bool { fn eat_macro_def(&mut self, attrs: &[Attribute], vis: &Visibility, lo: Span) -> PResult<'a, Option>> { let token_lo = self.span; - let (ident, def) = match self.token { + let (ident, def) = match self.token.kind { token::Ident(ident, false) if ident.name == kw::Macro => { self.bump(); let ident = self.parse_ident()?; @@ -4436,7 +4439,7 @@ fn parse_stmt_without_recovery(&mut self, } // it's a macro invocation - let id = match self.token { + let id = match self.token.kind { token::OpenDelim(_) => Ident::invalid(), // no special identifier _ => self.parse_ident()?, }; @@ -4444,7 +4447,7 @@ fn parse_stmt_without_recovery(&mut self, // check that we're pointing at delimiters (need to check // again after the `if`, because of `parse_ident` // consuming more tokens). - match self.token { + match self.token.kind { token::OpenDelim(_) => {} _ => { // we only expect an ident if we didn't parse one @@ -4481,7 +4484,7 @@ fn parse_stmt_without_recovery(&mut self, // We used to incorrectly stop parsing macro-expanded statements here. // If the next token will be an error anyway but could have parsed with the // earlier behavior, stop parsing here and emit a warning to avoid breakage. - else if macro_legacy_warnings && self.token.can_begin_expr() && match self.token { + else if macro_legacy_warnings && self.token.can_begin_expr() && match self.token.kind { // These can continue an expression, so we can't stop parsing and warn. token::OpenDelim(token::Paren) | token::OpenDelim(token::Bracket) | token::BinOp(token::Minus) | token::BinOp(token::Star) | @@ -5250,7 +5253,7 @@ fn parse_generic_args(&mut self) -> PResult<'a, (Vec, Vec PResult<'a, P> { /// Returns the parsed optional self argument and whether a self shortcut was used. fn parse_self_arg(&mut self) -> PResult<'a, Option> { - let expect_ident = |this: &mut Self| match this.token { + let expect_ident = |this: &mut Self| match this.token.kind { // Preserve hygienic context. token::Ident(ident, _) => { let span = this.span; this.bump(); Ident::new(ident.name, span) } @@ -5492,7 +5495,7 @@ fn parse_self_arg(&mut self) -> PResult<'a, Option> { // Only a limited set of initial token sequences is considered `self` parameters; anything // else is parsed as a normal function parameter list, so some lookahead is required. let eself_lo = self.span; - let (eself, eself_ident, eself_hi) = match self.token { + let (eself, eself_ident, eself_hi) = match self.token.kind { token::BinOp(token::And) => { // `&self` // `&mut self` @@ -5803,7 +5806,7 @@ fn complain_if_pub_macro(&self, vis: &VisibilityKind, sp: Span) { match *vis { VisibilityKind::Inherited => {} _ => { - let is_macro_rules: bool = match self.token { + let is_macro_rules: bool = match self.token.kind { token::Ident(sid, _) => sid.name == sym::macro_rules, _ => false, }; @@ -5918,7 +5921,7 @@ fn parse_item_trait(&mut self, is_auto: IsAuto, unsafety: Unsafety) -> PResult<' self.expect(&token::OpenDelim(token::Brace))?; let mut trait_items = vec![]; while !self.eat(&token::CloseDelim(token::Brace)) { - if let token::DocComment(_) = self.token { + if let token::DocComment(_) = self.token.kind { if self.look_ahead(1, |tok| tok == &token::CloseDelim(token::Brace)) { let mut err = self.diagnostic().struct_span_err_with_code( @@ -6246,7 +6249,7 @@ fn parse_single_struct_field(&mut self, if self.token == token::Comma { seen_comma = true; } - match self.token { + match self.token.kind { token::Comma => { self.bump(); } @@ -7011,7 +7014,7 @@ fn parse_item_enum(&mut self) -> PResult<'a, ItemInfo> { /// Parses a string as an ABI spec on an extern type or module. Consumes /// the `extern` keyword, if one is found. fn parse_opt_abi(&mut self) -> PResult<'a, Option> { - match self.token { + match self.token.kind { token::Literal(token::Lit { kind: token::Str, symbol, suffix }) | token::Literal(token::Lit { kind: token::StrRaw(..), symbol, suffix }) => { let sp = self.span; @@ -7046,7 +7049,7 @@ fn is_static_global(&mut self) -> bool { if token.is_keyword(kw::Move) { return true; } - match *token { + match token.kind { token::BinOp(token::Or) | token::OrOr => true, _ => false, } @@ -7818,7 +7821,7 @@ pub fn parse_crate_mod(&mut self) -> PResult<'a, Crate> { } pub fn parse_optional_str(&mut self) -> Option<(Symbol, ast::StrStyle, Option)> { - let ret = match self.token { + let ret = match self.token.kind { token::Literal(token::Lit { kind: token::Str, symbol, suffix }) => (symbol, ast::StrStyle::Cooked, suffix), token::Literal(token::Lit { kind: token::StrRaw(n), symbol, suffix }) => diff --git a/src/libsyntax_ext/asm.rs b/src/libsyntax_ext/asm.rs index 83c4c809de3..b015815ac9c 100644 --- a/src/libsyntax_ext/asm.rs +++ b/src/libsyntax_ext/asm.rs @@ -260,7 +260,7 @@ fn parse_inline_asm<'a>( loop { // MOD_SEP is a double colon '::' without space in between. // When encountered, the state must be advanced twice. - match (&p.token, state.next(), state.next().next()) { + match (&p.token.kind, state.next(), state.next().next()) { (&token::Colon, StateNone, _) | (&token::ModSep, _, StateNone) => { p.bump(); diff --git a/src/libsyntax_ext/assert.rs b/src/libsyntax_ext/assert.rs index 8a297a5c9bc..e5e422c4d9c 100644 --- a/src/libsyntax_ext/assert.rs +++ b/src/libsyntax_ext/assert.rs @@ -103,7 +103,7 @@ fn parse_assert<'a>( // // Parse this as an actual message, and suggest inserting a comma. Eventually, this should be // turned into an error. - let custom_message = if let token::Literal(token::Lit { kind: token::Str, .. }) = parser.token { + let custom_message = if let token::Literal(token::Lit { kind: token::Str, .. }) = parser.token.kind { let mut err = cx.struct_span_warn(parser.span, "unexpected string literal"); let comma_span = cx.source_map().next_point(parser.prev_span); err.span_suggestion_short( diff --git a/src/libsyntax_ext/format.rs b/src/libsyntax_ext/format.rs index b5be45547cf..0eaac544e33 100644 --- a/src/libsyntax_ext/format.rs +++ b/src/libsyntax_ext/format.rs @@ -149,7 +149,7 @@ fn parse_args<'a>( } // accept trailing commas if named || (p.token.is_ident() && p.look_ahead(1, |t| *t == token::Eq)) { named = true; - let ident = if let token::Ident(i, _) = p.token { + let ident = if let token::Ident(i, _) = p.token.kind { p.bump(); i } else { -- 2.44.0