]> git.lizzy.rs Git - rust.git/commitdiff
syntax: Use `Token` in `TokenTree::Token`
authorVadim Petrochenkov <vadim.petrochenkov@gmail.com>
Tue, 4 Jun 2019 17:42:43 +0000 (20:42 +0300)
committerVadim Petrochenkov <vadim.petrochenkov@gmail.com>
Thu, 6 Jun 2019 11:03:15 +0000 (14:03 +0300)
31 files changed:
src/librustc/hir/lowering.rs
src/librustc/ich/impls_syntax.rs
src/librustc_lint/builtin.rs
src/librustdoc/html/highlight.rs
src/libsyntax/attr/mod.rs
src/libsyntax/diagnostics/plugin.rs
src/libsyntax/ext/base.rs
src/libsyntax/ext/expand.rs
src/libsyntax/ext/tt/macro_parser.rs
src/libsyntax/ext/tt/macro_rules.rs
src/libsyntax/ext/tt/quoted.rs
src/libsyntax/ext/tt/transcribe.rs
src/libsyntax/feature_gate.rs
src/libsyntax/lib.rs
src/libsyntax/mut_visit.rs
src/libsyntax/parse/attr.rs
src/libsyntax/parse/lexer/mod.rs
src/libsyntax/parse/lexer/tokentrees.rs
src/libsyntax/parse/literal.rs
src/libsyntax/parse/mod.rs
src/libsyntax/parse/parser.rs
src/libsyntax/parse/token.rs
src/libsyntax/print/pprust.rs
src/libsyntax/tokenstream.rs
src/libsyntax/visit.rs
src/libsyntax_ext/asm.rs
src/libsyntax_ext/assert.rs
src/libsyntax_ext/concat_idents.rs
src/libsyntax_ext/deriving/custom.rs
src/libsyntax_ext/proc_macro_server.rs
src/libsyntax_ext/trace_macros.rs

index 919f682fc4f6fced54f58ae31f20cb319172039c..e7f52b48cb9ede5018ad0860123c722138c1fb58 100644 (file)
@@ -67,7 +67,7 @@
 use syntax::std_inject;
 use syntax::symbol::{kw, sym, Symbol};
 use syntax::tokenstream::{TokenStream, TokenTree};
-use syntax::parse::token::{self, TokenKind};
+use syntax::parse::token::{self, Token};
 use syntax::visit::{self, Visitor};
 use syntax_pos::{DUMMY_SP, edition, Span};
 
@@ -1328,7 +1328,7 @@ fn lower_token_stream(&mut self, tokens: TokenStream) -> TokenStream {
 
     fn lower_token_tree(&mut self, tree: TokenTree) -> TokenStream {
         match tree {
-            TokenTree::Token(span, token) => self.lower_token(token, span),
+            TokenTree::Token(token) => self.lower_token(token),
             TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited(
                 span,
                 delim,
@@ -1337,13 +1337,13 @@ fn lower_token_tree(&mut self, tree: TokenTree) -> TokenStream {
         }
     }
 
-    fn lower_token(&mut self, token: TokenKind, span: Span) -> TokenStream {
-        match token {
+    fn lower_token(&mut self, token: Token) -> TokenStream {
+        match token.kind {
             token::Interpolated(nt) => {
-                let tts = nt.to_tokenstream(&self.sess.parse_sess, span);
+                let tts = nt.to_tokenstream(&self.sess.parse_sess, token.span);
                 self.lower_token_stream(tts)
             }
-            other => TokenTree::Token(span, other).into(),
+            _ => TokenTree::Token(token).into(),
         }
     }
 
index 8e2550d3c453747188fc6528b6ea0c0d2afe0fa8..a373f434bf71e90cccb6cd9e3456f9065887f2dc 100644 (file)
@@ -261,9 +261,8 @@ fn hash_stable<W: StableHasherResult>(&self,
                                           hasher: &mut StableHasher<W>) {
         mem::discriminant(self).hash_stable(hcx, hasher);
         match *self {
-            tokenstream::TokenTree::Token(span, ref token) => {
-                span.hash_stable(hcx, hasher);
-                hash_token(token, hcx, hasher);
+            tokenstream::TokenTree::Token(ref token) => {
+                token.hash_stable(hcx, hasher);
             }
             tokenstream::TokenTree::Delimited(span, delim, ref tts) => {
                 span.hash_stable(hcx, hasher);
@@ -306,70 +305,75 @@ fn hash_stable<W: StableHasherResult>(&self,
     suffix
 });
 
-fn hash_token<'a, 'gcx, W: StableHasherResult>(
-    token: &token::TokenKind,
-    hcx: &mut StableHashingContext<'a>,
-    hasher: &mut StableHasher<W>,
-) {
-    mem::discriminant(token).hash_stable(hcx, hasher);
-    match *token {
-        token::Eq |
-        token::Lt |
-        token::Le |
-        token::EqEq |
-        token::Ne |
-        token::Ge |
-        token::Gt |
-        token::AndAnd |
-        token::OrOr |
-        token::Not |
-        token::Tilde |
-        token::At |
-        token::Dot |
-        token::DotDot |
-        token::DotDotDot |
-        token::DotDotEq |
-        token::Comma |
-        token::Semi |
-        token::Colon |
-        token::ModSep |
-        token::RArrow |
-        token::LArrow |
-        token::FatArrow |
-        token::Pound |
-        token::Dollar |
-        token::Question |
-        token::SingleQuote |
-        token::Whitespace |
-        token::Comment |
-        token::Eof => {}
-
-        token::BinOp(bin_op_token) |
-        token::BinOpEq(bin_op_token) => {
-            std_hash::Hash::hash(&bin_op_token, hasher);
-        }
+impl<'a> HashStable<StableHashingContext<'a>> for token::TokenKind {
+    fn hash_stable<W: StableHasherResult>(&self,
+                                          hcx: &mut StableHashingContext<'a>,
+                                          hasher: &mut StableHasher<W>) {
+        mem::discriminant(self).hash_stable(hcx, hasher);
+        match *self {
+            token::Eq |
+            token::Lt |
+            token::Le |
+            token::EqEq |
+            token::Ne |
+            token::Ge |
+            token::Gt |
+            token::AndAnd |
+            token::OrOr |
+            token::Not |
+            token::Tilde |
+            token::At |
+            token::Dot |
+            token::DotDot |
+            token::DotDotDot |
+            token::DotDotEq |
+            token::Comma |
+            token::Semi |
+            token::Colon |
+            token::ModSep |
+            token::RArrow |
+            token::LArrow |
+            token::FatArrow |
+            token::Pound |
+            token::Dollar |
+            token::Question |
+            token::SingleQuote |
+            token::Whitespace |
+            token::Comment |
+            token::Eof => {}
+
+            token::BinOp(bin_op_token) |
+            token::BinOpEq(bin_op_token) => {
+                std_hash::Hash::hash(&bin_op_token, hasher);
+            }
 
-        token::OpenDelim(delim_token) |
-        token::CloseDelim(delim_token) => {
-            std_hash::Hash::hash(&delim_token, hasher);
-        }
-        token::Literal(lit) => lit.hash_stable(hcx, hasher),
+            token::OpenDelim(delim_token) |
+            token::CloseDelim(delim_token) => {
+                std_hash::Hash::hash(&delim_token, hasher);
+            }
+            token::Literal(lit) => lit.hash_stable(hcx, hasher),
 
-        token::Ident(ident, is_raw) => {
-            ident.name.hash_stable(hcx, hasher);
-            is_raw.hash_stable(hcx, hasher);
-        }
-        token::Lifetime(ident) => ident.name.hash_stable(hcx, hasher),
+            token::Ident(ident, is_raw) => {
+                ident.name.hash_stable(hcx, hasher);
+                is_raw.hash_stable(hcx, hasher);
+            }
+            token::Lifetime(ident) => ident.name.hash_stable(hcx, hasher),
 
-        token::Interpolated(_) => {
-            bug!("interpolated tokens should not be present in the HIR")
-        }
+            token::Interpolated(_) => {
+                bug!("interpolated tokens should not be present in the HIR")
+            }
 
-        token::DocComment(val) |
-        token::Shebang(val) => val.hash_stable(hcx, hasher),
+            token::DocComment(val) |
+            token::Shebang(val) => val.hash_stable(hcx, hasher),
+        }
     }
 }
 
+impl_stable_hash_for!(struct token::Token {
+    kind,
+    span
+});
+
 impl_stable_hash_for!(enum ::syntax::ast::NestedMetaItem {
     MetaItem(meta_item),
     Literal(lit)
index 937085c8ad8e8e1e17ab6e75479cfbb207645353..a3da97bd5db1ec23678f94a62fe983c47ab5c1c7 100644 (file)
@@ -1414,11 +1414,11 @@ impl KeywordIdents {
     fn check_tokens(&mut self, cx: &EarlyContext<'_>, tokens: TokenStream) {
         for tt in tokens.into_trees() {
             match tt {
-                TokenTree::Token(span, tok) => match tok.ident() {
+                TokenTree::Token(token) => match token.ident() {
                     // only report non-raw idents
                     Some((ident, false)) => {
                         self.check_ident_token(cx, UnderMacro(true), ast::Ident {
-                            span: span.substitute_dummy(ident.span),
+                            span: token.span.substitute_dummy(ident.span),
                             ..ident
                         });
                     }
index 3b9de761828b7387c1eeecebcab26e90005f397e..d68741233754bbb60efe23acfedfd6a1f328cfd2 100644 (file)
@@ -234,7 +234,7 @@ fn write_token<W: Writer>(&mut self,
             // reference or dereference operator or a reference or pointer type, instead of the
             // bit-and or multiplication operator.
             token::BinOp(token::And) | token::BinOp(token::Star)
-                if self.lexer.peek().kind != token::Whitespace => Class::RefKeyWord,
+                if self.lexer.peek() != token::Whitespace => Class::RefKeyWord,
 
             // Consider this as part of a macro invocation if there was a
             // leading identifier.
@@ -335,7 +335,7 @@ fn write_token<W: Writer>(&mut self,
                     sym::Option | sym::Result => Class::PreludeTy,
                     sym::Some | sym::None | sym::Ok | sym::Err => Class::PreludeVal,
 
-                    _ if token.kind.is_reserved_ident() => Class::KeyWord,
+                    _ if token.is_reserved_ident() => Class::KeyWord,
 
                     _ => {
                         if self.in_macro_nonterminal {
index ade15f024a60919a760be14d0567f4e38fffceaa..448061395afdc91ddbf44c192470cd3912d51068 100644 (file)
@@ -20,7 +20,7 @@
 use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
 use crate::parse::parser::Parser;
 use crate::parse::{self, ParseSess, PResult};
-use crate::parse::token::{self, TokenKind};
+use crate::parse::token::{self, Token, TokenKind};
 use crate::ptr::P;
 use crate::symbol::{sym, Symbol};
 use crate::ThinVec;
@@ -465,9 +465,9 @@ fn tokens(&self) -> TokenStream {
                 let mod_sep_span = Span::new(last_pos,
                                              segment.ident.span.lo(),
                                              segment.ident.span.ctxt());
-                idents.push(TokenTree::Token(mod_sep_span, token::ModSep).into());
+                idents.push(TokenTree::token(mod_sep_span, token::ModSep).into());
             }
-            idents.push(TokenTree::Token(segment.ident.span,
+            idents.push(TokenTree::token(segment.ident.span,
                                          TokenKind::from_ast_ident(segment.ident)).into());
             last_pos = segment.ident.span.hi();
         }
@@ -480,10 +480,10 @@ fn from_tokens<I>(tokens: &mut iter::Peekable<I>) -> Option<MetaItem>
     {
         // FIXME: Share code with `parse_path`.
         let path = match tokens.next() {
-            Some(TokenTree::Token(span, token @ token::Ident(..))) |
-            Some(TokenTree::Token(span, token @ token::ModSep)) => 'arm: {
-                let mut segments = if let token::Ident(ident, _) = token {
-                    if let Some(TokenTree::Token(_, token::ModSep)) = tokens.peek() {
+            Some(TokenTree::Token(Token { kind: kind @ token::Ident(..), span })) |
+            Some(TokenTree::Token(Token { kind: kind @ token::ModSep, span })) => 'arm: {
+                let mut segments = if let token::Ident(ident, _) = kind {
+                    if let Some(TokenTree::Token(Token { kind: token::ModSep, .. })) = tokens.peek() {
                         tokens.next();
                         vec![PathSegment::from_ident(ident.with_span_pos(span))]
                     } else {
@@ -493,13 +493,12 @@ fn from_tokens<I>(tokens: &mut iter::Peekable<I>) -> Option<MetaItem>
                     vec![PathSegment::path_root(span)]
                 };
                 loop {
-                    if let Some(TokenTree::Token(span,
-                                                    token::Ident(ident, _))) = tokens.next() {
+                    if let Some(TokenTree::Token(Token { kind: token::Ident(ident, _), span })) = tokens.next() {
                         segments.push(PathSegment::from_ident(ident.with_span_pos(span)));
                     } else {
                         return None;
                     }
-                    if let Some(TokenTree::Token(_, token::ModSep)) = tokens.peek() {
+                    if let Some(TokenTree::Token(Token { kind: token::ModSep, .. })) = tokens.peek() {
                         tokens.next();
                     } else {
                         break;
@@ -508,7 +507,7 @@ fn from_tokens<I>(tokens: &mut iter::Peekable<I>) -> Option<MetaItem>
                 let span = span.with_hi(segments.last().unwrap().ident.span.hi());
                 Path { span, segments }
             }
-            Some(TokenTree::Token(_, token::Interpolated(nt))) => match *nt {
+            Some(TokenTree::Token(Token { kind: token::Interpolated(nt), .. })) => match *nt {
                 token::Nonterminal::NtIdent(ident, _) => Path::from_ident(ident),
                 token::Nonterminal::NtMeta(ref meta) => return Some(meta.clone()),
                 token::Nonterminal::NtPath(ref path) => path.clone(),
@@ -533,7 +532,7 @@ pub fn tokens(&self, span: Span) -> TokenStream {
         match *self {
             MetaItemKind::Word => TokenStream::empty(),
             MetaItemKind::NameValue(ref lit) => {
-                let mut vec = vec![TokenTree::Token(span, token::Eq).into()];
+                let mut vec = vec![TokenTree::token(span, token::Eq).into()];
                 lit.tokens().append_to_tree_and_joint_vec(&mut vec);
                 TokenStream::new(vec)
             }
@@ -541,7 +540,7 @@ pub fn tokens(&self, span: Span) -> TokenStream {
                 let mut tokens = Vec::new();
                 for (i, item) in list.iter().enumerate() {
                     if i > 0 {
-                        tokens.push(TokenTree::Token(span, token::Comma).into());
+                        tokens.push(TokenTree::token(span, token::Comma).into());
                     }
                     item.tokens().append_to_tree_and_joint_vec(&mut tokens);
                 }
@@ -558,10 +557,10 @@ fn from_tokens<I>(tokens: &mut iter::Peekable<I>) -> Option<MetaItemKind>
         where I: Iterator<Item = TokenTree>,
     {
         let delimited = match tokens.peek().cloned() {
-            Some(TokenTree::Token(_, token::Eq)) => {
+            Some(TokenTree::Token(token)) if token == token::Eq => {
                 tokens.next();
-                return if let Some(TokenTree::Token(span, token)) = tokens.next() {
-                    Lit::from_token(&token, span).ok().map(MetaItemKind::NameValue)
+                return if let Some(TokenTree::Token(token)) = tokens.next() {
+                    Lit::from_token(&token, token.span).ok().map(MetaItemKind::NameValue)
                 } else {
                     None
                 };
@@ -579,7 +578,7 @@ fn from_tokens<I>(tokens: &mut iter::Peekable<I>) -> Option<MetaItemKind>
             let item = NestedMetaItem::from_tokens(&mut tokens)?;
             result.push(item);
             match tokens.next() {
-                None | Some(TokenTree::Token(_, token::Comma)) => {}
+                None | Some(TokenTree::Token(Token { kind: token::Comma, .. })) => {}
                 _ => return None,
             }
         }
@@ -605,8 +604,8 @@ fn tokens(&self) -> TokenStream {
     fn from_tokens<I>(tokens: &mut iter::Peekable<I>) -> Option<NestedMetaItem>
         where I: Iterator<Item = TokenTree>,
     {
-        if let Some(TokenTree::Token(span, token)) = tokens.peek().cloned() {
-            if let Ok(lit) = Lit::from_token(&token, span) {
+        if let Some(TokenTree::Token(token)) = tokens.peek().cloned() {
+            if let Ok(lit) = Lit::from_token(&token, token.span) {
                 tokens.next();
                 return Some(NestedMetaItem::Literal(lit));
             }
index 0c57c23b2b5c4102e61a83e5be5f4e99fafb46b4..b342e4bc47274505b7b0c006019217b2dcde4983 100644 (file)
@@ -5,7 +5,7 @@
 use crate::source_map;
 use crate::ext::base::{ExtCtxt, MacEager, MacResult};
 use crate::ext::build::AstBuilder;
-use crate::parse::token;
+use crate::parse::token::{self, Token};
 use crate::ptr::P;
 use crate::symbol::kw;
 use crate::tokenstream::{TokenTree};
@@ -34,7 +34,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt<'_>,
                                    token_tree: &[TokenTree])
                                    -> Box<dyn MacResult+'cx> {
     let code = match (token_tree.len(), token_tree.get(0)) {
-        (1, Some(&TokenTree::Token(_, token::Ident(code, _)))) => code,
+        (1, Some(&TokenTree::Token(Token { kind: token::Ident(code, _), .. }))) => code,
         _ => unreachable!()
     };
 
@@ -72,12 +72,12 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt<'_>,
         token_tree.get(1),
         token_tree.get(2)
     ) {
-        (1, Some(&TokenTree::Token(_, token::Ident(ref code, _))), None, None) => {
+        (1, Some(&TokenTree::Token(Token { kind: token::Ident(ref code, _), .. })), None, None) => {
             (code, None)
         },
-        (3, Some(&TokenTree::Token(_, token::Ident(ref code, _))),
-            Some(&TokenTree::Token(_, token::Comma)),
-            Some(&TokenTree::Token(_, token::Literal(token::Lit { symbol, .. })))) => {
+        (3, Some(&TokenTree::Token(Token { kind: token::Ident(ref code, _), .. })),
+            Some(&TokenTree::Token(Token { kind: token::Comma, .. })),
+            Some(&TokenTree::Token(Token { kind: token::Literal(token::Lit { symbol, .. }), .. }))) => {
             (code, Some(symbol))
         }
         _ => unreachable!()
@@ -143,9 +143,9 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt<'_>,
     let (crate_name, name) = match (&token_tree[0], &token_tree[2]) {
         (
             // Crate name.
-            &TokenTree::Token(_, token::Ident(ref crate_name, _)),
+            &TokenTree::Token(Token { kind: token::Ident(ref crate_name, _), .. }),
             // DIAGNOSTICS ident.
-            &TokenTree::Token(_, token::Ident(ref name, _))
+            &TokenTree::Token(Token { kind: token::Ident(ref name, _), .. })
         ) => (*&crate_name, name),
         _ => unreachable!()
     };
index 4b5b9ff7bbeeee5b9febe53a99f84b9c7184ca29..0c2ab67240741a809b861d9acf87a323e44274f4 100644 (file)
@@ -265,10 +265,12 @@ fn expand<'cx>(
 
         impl MutVisitor for AvoidInterpolatedIdents {
             fn visit_tt(&mut self, tt: &mut tokenstream::TokenTree) {
-                if let tokenstream::TokenTree::Token(_, token::Interpolated(nt)) = tt {
-                    if let token::NtIdent(ident, is_raw) = **nt {
-                        *tt = tokenstream::TokenTree::Token(ident.span,
-                                                            token::Ident(ident, is_raw));
+                if let tokenstream::TokenTree::Token(token) = tt {
+                    if let token::Interpolated(nt) = &token.kind {
+                        if let token::NtIdent(ident, is_raw) = **nt {
+                            *tt = tokenstream::TokenTree::token(ident.span,
+                                                                token::Ident(ident, is_raw));
+                        }
                     }
                 }
                 mut_visit::noop_visit_tt(tt, self)
index 7b158b65d156233f8d50009e75648fde11d6b3c8..4396b9be9bbb096b59bd812cfad01426c4e54607 100644 (file)
@@ -585,7 +585,7 @@ fn expand_attr_invoc(&mut self,
             }
             AttrProcMacro(ref mac, ..) => {
                 self.gate_proc_macro_attr_item(attr.span, &item);
-                let item_tok = TokenTree::Token(DUMMY_SP, token::Interpolated(Lrc::new(match item {
+                let item_tok = TokenTree::token(DUMMY_SP, token::Interpolated(Lrc::new(match item {
                     Annotatable::Item(item) => token::NtItem(item),
                     Annotatable::TraitItem(item) => token::NtTraitItem(item.into_inner()),
                     Annotatable::ImplItem(item) => token::NtImplItem(item.into_inner()),
index c22952ed7504b904ea4cc07e2c8e37a4d65666eb..6acdffedd6b1a6e3cfe5f21818394d473187299a 100644 (file)
@@ -78,7 +78,7 @@
 use crate::ext::tt::quoted::{self, TokenTree};
 use crate::parse::{Directory, ParseSess};
 use crate::parse::parser::{Parser, PathStyle};
-use crate::parse::token::{self, DocComment, Nonterminal, TokenKind};
+use crate::parse::token::{self, DocComment, Nonterminal, Token, TokenKind};
 use crate::print::pprust;
 use crate::symbol::{kw, sym, Symbol};
 use crate::tokenstream::{DelimSpan, TokenStream};
@@ -609,7 +609,8 @@ fn inner_parse_loop<'root, 'tt>(
                 //
                 // At the beginning of the loop, if we reach the end of the delimited submatcher,
                 // we pop the stack to backtrack out of the descent.
-                seq @ TokenTree::Delimited(..) | seq @ TokenTree::Token(_, DocComment(..)) => {
+                seq @ TokenTree::Delimited(..) |
+                seq @ TokenTree::Token(Token { kind: DocComment(..), .. }) => {
                     let lower_elts = mem::replace(&mut item.top_elts, Tt(seq));
                     let idx = item.idx;
                     item.stack.push(MatcherTtFrame {
@@ -621,7 +622,7 @@ fn inner_parse_loop<'root, 'tt>(
                 }
 
                 // We just matched a normal token. We can just advance the parser.
-                TokenTree::Token(_, ref t) if token_name_eq(t, token) => {
+                TokenTree::Token(t) if token_name_eq(&t, token) => {
                     item.idx += 1;
                     next_items.push(item);
                 }
index 9d3ea4d8645da328f8b577d6ebfa9fc780255399..703ad0053a0ef34e3b62b1d4d46671a0fa01c01c 100644 (file)
@@ -11,7 +11,7 @@
 use crate::feature_gate::Features;
 use crate::parse::{Directory, ParseSess};
 use crate::parse::parser::Parser;
-use crate::parse::token::{self, NtTT};
+use crate::parse::token::{self, Token, NtTT};
 use crate::parse::token::TokenKind::*;
 use crate::symbol::{Symbol, kw, sym};
 use crate::tokenstream::{DelimSpan, TokenStream, TokenTree};
@@ -270,7 +270,7 @@ pub fn compile(
         quoted::TokenTree::Sequence(DelimSpan::dummy(), Lrc::new(quoted::SequenceRepetition {
             tts: vec![
                 quoted::TokenTree::MetaVarDecl(DUMMY_SP, lhs_nm, ast::Ident::from_str("tt")),
-                quoted::TokenTree::Token(DUMMY_SP, token::FatArrow),
+                quoted::TokenTree::token(DUMMY_SP, token::FatArrow),
                 quoted::TokenTree::MetaVarDecl(DUMMY_SP, rhs_nm, ast::Ident::from_str("tt")),
             ],
             separator: Some(if body.legacy { token::Semi } else { token::Comma }),
@@ -279,7 +279,7 @@ pub fn compile(
         })),
         // to phase into semicolon-termination instead of semicolon-separation
         quoted::TokenTree::Sequence(DelimSpan::dummy(), Lrc::new(quoted::SequenceRepetition {
-            tts: vec![quoted::TokenTree::Token(DUMMY_SP, token::Semi)],
+            tts: vec![quoted::TokenTree::token(DUMMY_SP, token::Semi)],
             separator: None,
             op: quoted::KleeneOp::ZeroOrMore,
             num_captures: 0
@@ -613,7 +613,7 @@ fn build_recur(sets: &mut FirstSets, tts: &[TokenTree]) -> TokenSet {
 
                         if let (Some(ref sep), true) = (seq_rep.separator.clone(),
                                                         subfirst.maybe_empty) {
-                            first.add_one_maybe(TokenTree::Token(sp.entire(), sep.clone()));
+                            first.add_one_maybe(TokenTree::token(sp.entire(), sep.clone()));
                         }
 
                         // Reverse scan: Sequence comes before `first`.
@@ -663,7 +663,7 @@ fn first(&self, tts: &[quoted::TokenTree]) -> TokenSet {
 
                             if let (Some(ref sep), true) = (seq_rep.separator.clone(),
                                                             subfirst.maybe_empty) {
-                                first.add_one_maybe(TokenTree::Token(sp.entire(), sep.clone()));
+                                first.add_one_maybe(TokenTree::token(sp.entire(), sep.clone()));
                             }
 
                             assert!(first.maybe_empty);
@@ -869,7 +869,7 @@ fn check_matcher_core(sess: &ParseSess,
                 let mut new;
                 let my_suffix = if let Some(ref u) = seq_rep.separator {
                     new = suffix_first.clone();
-                    new.add_one_maybe(TokenTree::Token(sp.entire(), u.clone()));
+                    new.add_one_maybe(TokenTree::token(sp.entire(), u.clone()));
                     &new
                 } else {
                     &suffix_first
@@ -1015,7 +1015,7 @@ enum IsInFollow {
 fn is_in_follow(tok: &quoted::TokenTree, frag: &str) -> IsInFollow {
     use quoted::TokenTree;
 
-    if let TokenTree::Token(_, token::CloseDelim(_)) = *tok {
+    if let TokenTree::Token(Token { kind: token::CloseDelim(_), .. }) = *tok {
         // closing a token tree can never be matched by any fragment;
         // iow, we always require that `(` and `)` match, etc.
         IsInFollow::Yes
@@ -1033,8 +1033,8 @@ fn is_in_follow(tok: &quoted::TokenTree, frag: &str) -> IsInFollow {
             },
             "stmt" | "expr"  => {
                 let tokens = vec!["`=>`", "`,`", "`;`"];
-                match *tok {
-                    TokenTree::Token(_, ref tok) => match *tok {
+                match tok {
+                    TokenTree::Token(token) => match token.kind {
                         FatArrow | Comma | Semi => IsInFollow::Yes,
                         _ => IsInFollow::No(tokens),
                     },
@@ -1043,8 +1043,8 @@ fn is_in_follow(tok: &quoted::TokenTree, frag: &str) -> IsInFollow {
             },
             "pat" => {
                 let tokens = vec!["`=>`", "`,`", "`=`", "`|`", "`if`", "`in`"];
-                match *tok {
-                    TokenTree::Token(_, ref tok) => match *tok {
+                match tok {
+                    TokenTree::Token(token) => match token.kind {
                         FatArrow | Comma | Eq | BinOp(token::Or) => IsInFollow::Yes,
                         Ident(i, false) if i.name == kw::If ||
                                            i.name == kw::In => IsInFollow::Yes,
@@ -1058,8 +1058,8 @@ fn is_in_follow(tok: &quoted::TokenTree, frag: &str) -> IsInFollow {
                     "`{`", "`[`", "`=>`", "`,`", "`>`","`=`", "`:`", "`;`", "`|`", "`as`",
                     "`where`",
                 ];
-                match *tok {
-                    TokenTree::Token(_, ref tok) => match *tok {
+                match tok {
+                    TokenTree::Token(token) => match token.kind {
                         OpenDelim(token::DelimToken::Brace) |
                         OpenDelim(token::DelimToken::Bracket) |
                         Comma | FatArrow | Colon | Eq | Gt | BinOp(token::Shr) | Semi |
@@ -1089,8 +1089,8 @@ fn is_in_follow(tok: &quoted::TokenTree, frag: &str) -> IsInFollow {
             "vis" => {
                 // Explicitly disallow `priv`, on the off chance it comes back.
                 let tokens = vec!["`,`", "an ident", "a type"];
-                match *tok {
-                    TokenTree::Token(_, ref tok) => match *tok {
+                match tok {
+                    TokenTree::Token(token) => match token.kind {
                         Comma => IsInFollow::Yes,
                         Ident(i, is_raw) if is_raw || i.name != kw::Priv =>
                             IsInFollow::Yes,
@@ -1150,7 +1150,7 @@ fn is_legal_fragment_specifier(_sess: &ParseSess,
 
 fn quoted_tt_to_string(tt: &quoted::TokenTree) -> String {
     match *tt {
-        quoted::TokenTree::Token(_, ref tok) => crate::print::pprust::token_to_string(tok),
+        quoted::TokenTree::Token(ref token) => crate::print::pprust::token_to_string(&token),
         quoted::TokenTree::MetaVar(_, name) => format!("${}", name),
         quoted::TokenTree::MetaVarDecl(_, name, kind) => format!("${}:{}", name, kind),
         _ => panic!("unexpected quoted::TokenTree::{{Sequence or Delimited}} \
index fe0cb56b29e3087db7b9228ef102952f9d40c18f..9f4e35ad3d7795f6899ba97c568c050a04d8f6ad 100644 (file)
@@ -2,7 +2,8 @@
 use crate::early_buffered_lints::BufferedEarlyLintId;
 use crate::ext::tt::macro_parser;
 use crate::feature_gate::Features;
-use crate::parse::{token, ParseSess};
+use crate::parse::token::{self, Token, TokenKind};
+use crate::parse::ParseSess;
 use crate::print::pprust;
 use crate::tokenstream::{self, DelimSpan};
 use crate::ast;
@@ -39,7 +40,7 @@ pub fn open_tt(&self, span: Span) -> TokenTree {
         } else {
             span.with_lo(span.lo() + BytePos(self.delim.len() as u32))
         };
-        TokenTree::Token(open_span, self.open_token())
+        TokenTree::token(open_span, self.open_token())
     }
 
     /// Returns a `self::TokenTree` with a `Span` corresponding to the closing delimiter.
@@ -49,7 +50,7 @@ pub fn close_tt(&self, span: Span) -> TokenTree {
         } else {
             span.with_lo(span.hi() - BytePos(self.delim.len() as u32))
         };
-        TokenTree::Token(close_span, self.close_token())
+        TokenTree::token(close_span, self.close_token())
     }
 }
 
@@ -81,7 +82,7 @@ pub enum KleeneOp {
 /// are "first-class" token trees. Useful for parsing macros.
 #[derive(Debug, Clone, PartialEq, RustcEncodable, RustcDecodable)]
 pub enum TokenTree {
-    Token(Span, token::TokenKind),
+    Token(Token),
     Delimited(DelimSpan, Lrc<Delimited>),
     /// A kleene-style repetition sequence
     Sequence(DelimSpan, Lrc<SequenceRepetition>),
@@ -144,13 +145,17 @@ pub fn get_tt(&self, index: usize) -> TokenTree {
     /// Retrieves the `TokenTree`'s span.
     pub fn span(&self) -> Span {
         match *self {
-            TokenTree::Token(sp, _)
-            | TokenTree::MetaVar(sp, _)
-            | TokenTree::MetaVarDecl(sp, _, _) => sp,
-            TokenTree::Delimited(sp, _)
-            | TokenTree::Sequence(sp, _) => sp.entire(),
+            TokenTree::Token(Token { span, .. })
+            | TokenTree::MetaVar(span, _)
+            | TokenTree::MetaVarDecl(span, _, _) => span,
+            TokenTree::Delimited(span, _)
+            | TokenTree::Sequence(span, _) => span.entire(),
         }
     }
+
+    crate fn token(span: Span, kind: TokenKind) -> TokenTree {
+        TokenTree::Token(Token { kind, span })
+    }
 }
 
 /// Takes a `tokenstream::TokenStream` and returns a `Vec<self::TokenTree>`. Specifically, this
@@ -205,14 +210,14 @@ pub fn parse(
         match tree {
             TokenTree::MetaVar(start_sp, ident) if expect_matchers => {
                 let span = match trees.next() {
-                    Some(tokenstream::TokenTree::Token(span, token::Colon)) => match trees.next() {
-                        Some(tokenstream::TokenTree::Token(end_sp, ref tok)) => match tok.ident() {
+                    Some(tokenstream::TokenTree::Token(Token { kind: token::Colon, span })) => match trees.next() {
+                        Some(tokenstream::TokenTree::Token(token)) => match token.ident() {
                             Some((kind, _)) => {
-                                let span = end_sp.with_lo(start_sp.lo());
+                                let span = token.span.with_lo(start_sp.lo());
                                 result.push(TokenTree::MetaVarDecl(span, ident, kind));
                                 continue;
                             }
-                            _ => end_sp,
+                            _ => token.span,
                         },
                         tree => tree
                             .as_ref()
@@ -270,7 +275,7 @@ fn parse_tree<I>(
     // Depending on what `tree` is, we could be parsing different parts of a macro
     match tree {
         // `tree` is a `$` token. Look at the next token in `trees`
-        tokenstream::TokenTree::Token(span, token::Dollar) => match trees.next() {
+        tokenstream::TokenTree::Token(Token { kind: token::Dollar, span }) => match trees.next() {
             // `tree` is followed by a delimited set of token trees. This indicates the beginning
             // of a repetition sequence in the macro (e.g. `$(pat)*`).
             Some(tokenstream::TokenTree::Delimited(span, delim, tts)) => {
@@ -316,33 +321,33 @@ fn parse_tree<I>(
 
             // `tree` is followed by an `ident`. This could be `$meta_var` or the `$crate` special
             // metavariable that names the crate of the invocation.
-            Some(tokenstream::TokenTree::Token(ident_span, ref token)) if token.is_ident() => {
+            Some(tokenstream::TokenTree::Token(token)) if token.is_ident() => {
                 let (ident, is_raw) = token.ident().unwrap();
-                let span = ident_span.with_lo(span.lo());
+                let span = token.span.with_lo(span.lo());
                 if ident.name == kw::Crate && !is_raw {
                     let ident = ast::Ident::new(kw::DollarCrate, ident.span);
-                    TokenTree::Token(span, token::Ident(ident, is_raw))
+                    TokenTree::token(span, token::Ident(ident, is_raw))
                 } else {
                     TokenTree::MetaVar(span, ident)
                 }
             }
 
             // `tree` is followed by a random token. This is an error.
-            Some(tokenstream::TokenTree::Token(span, tok)) => {
+            Some(tokenstream::TokenTree::Token(token)) => {
                 let msg = format!(
                     "expected identifier, found `{}`",
-                    pprust::token_to_string(&tok)
+                    pprust::token_to_string(&token),
                 );
-                sess.span_diagnostic.span_err(span, &msg);
-                TokenTree::MetaVar(span, ast::Ident::invalid())
+                sess.span_diagnostic.span_err(token.span, &msg);
+                TokenTree::MetaVar(token.span, ast::Ident::invalid())
             }
 
             // There are no more tokens. Just return the `$` we already have.
-            None => TokenTree::Token(span, token::Dollar),
+            None => TokenTree::token(span, token::Dollar),
         },
 
         // `tree` is an arbitrary token. Keep it.
-        tokenstream::TokenTree::Token(span, tok) => TokenTree::Token(span, tok),
+        tokenstream::TokenTree::Token(token) => TokenTree::Token(token),
 
         // `tree` is the beginning of a delimited set of tokens (e.g., `(` or `{`). We need to
         // descend into the delimited set and further parse it.
@@ -380,17 +385,14 @@ fn kleene_op(token: &token::TokenKind) -> Option<KleeneOp> {
 /// - Ok(Ok((op, span))) if the next token tree is a KleeneOp
 /// - Ok(Err(tok, span)) if the next token tree is a token but not a KleeneOp
 /// - Err(span) if the next token tree is not a token
-fn parse_kleene_op<I>(
-    input: &mut I,
-    span: Span,
-) -> Result<Result<(KleeneOp, Span), (token::TokenKind, Span)>, Span>
+fn parse_kleene_op<I>(input: &mut I, span: Span) -> Result<Result<(KleeneOp, Span), Token>, Span>
 where
     I: Iterator<Item = tokenstream::TokenTree>,
 {
     match input.next() {
-        Some(tokenstream::TokenTree::Token(span, tok)) => match kleene_op(&tok) {
-            Some(op) => Ok(Ok((op, span))),
-            None => Ok(Err((tok, span))),
+        Some(tokenstream::TokenTree::Token(token)) => match kleene_op(&token) {
+            Some(op) => Ok(Ok((op, token.span))),
+            None => Ok(Err(token)),
         },
         tree => Err(tree
             .as_ref()
@@ -466,7 +468,7 @@ fn parse_sep_and_kleene_op_2015<I>(
             assert_eq!(op, KleeneOp::ZeroOrOne);
 
             // Lookahead at #2. If it is a KleenOp, then #1 is a separator.
-            let is_1_sep = if let Some(&tokenstream::TokenTree::Token(_, ref tok2)) = input.peek() {
+            let is_1_sep = if let Some(tokenstream::TokenTree::Token(tok2)) = input.peek() {
                 kleene_op(tok2).is_some()
             } else {
                 false
@@ -504,7 +506,7 @@ fn parse_sep_and_kleene_op_2015<I>(
                     }
 
                     // #2 is a random token (this is an error) :(
-                    Ok(Err((_, _))) => op1_span,
+                    Ok(Err(_)) => op1_span,
 
                     // #2 is not even a token at all :(
                     Err(_) => op1_span,
@@ -524,7 +526,7 @@ fn parse_sep_and_kleene_op_2015<I>(
         }
 
         // #1 is a separator followed by #2, a KleeneOp
-        Ok(Err((tok, span))) => match parse_kleene_op(input, span) {
+        Ok(Err(token)) => match parse_kleene_op(input, token.span) {
             // #2 is a `?`, which is not allowed as a Kleene op in 2015 edition,
             // but is allowed in the 2018 edition
             Ok(Ok((op, op2_span))) if op == KleeneOp::ZeroOrOne => {
@@ -539,10 +541,10 @@ fn parse_sep_and_kleene_op_2015<I>(
             }
 
             // #2 is a KleeneOp :D
-            Ok(Ok((op, _))) => return (Some(tok), op),
+            Ok(Ok((op, _))) => return (Some(token.kind), op),
 
             // #2 is a random token :(
-            Ok(Err((_, span))) => span,
+            Ok(Err(token)) => token.span,
 
             // #2 is not a token at all :(
             Err(span) => span,
@@ -580,12 +582,12 @@ fn parse_sep_and_kleene_op_2018<I>(
         Ok(Ok((op, _))) => return (None, op),
 
         // #1 is a separator followed by #2, a KleeneOp
-        Ok(Err((tok, span))) => match parse_kleene_op(input, span) {
+        Ok(Err(token)) => match parse_kleene_op(input, token.span) {
             // #2 is the `?` Kleene op, which does not take a separator (error)
             Ok(Ok((op, _op2_span))) if op == KleeneOp::ZeroOrOne => {
                 // Error!
                 sess.span_diagnostic.span_err(
-                    span,
+                    token.span,
                     "the `?` macro repetition operator does not take a separator",
                 );
 
@@ -594,10 +596,10 @@ fn parse_sep_and_kleene_op_2018<I>(
             }
 
             // #2 is a KleeneOp :D
-            Ok(Ok((op, _))) => return (Some(tok), op),
+            Ok(Ok((op, _))) => return (Some(token.kind), op),
 
             // #2 is a random token :(
-            Ok(Err((_, span))) => span,
+            Ok(Err(token)) => token.span,
 
             // #2 is not a token at all :(
             Err(span) => span,
index 1b169d7696af3888881ae080e2377051239b2c30..1dbb0638df195968f7045218697c8a38d35abc41 100644 (file)
@@ -119,7 +119,7 @@ pub fn transcribe(
                             Some((tt, _)) => tt.span(),
                             None => DUMMY_SP,
                         };
-                        result.push(TokenTree::Token(prev_span, sep).into());
+                        result.push(TokenTree::token(prev_span, sep).into());
                     }
                     continue;
                 }
@@ -225,7 +225,7 @@ pub fn transcribe(
                             result.push(tt.clone().into());
                         } else {
                             sp = sp.apply_mark(cx.current_expansion.mark);
-                            let token = TokenTree::Token(sp, token::Interpolated(nt.clone()));
+                            let token = TokenTree::token(sp, token::Interpolated(nt.clone()));
                             result.push(token.into());
                         }
                     } else {
@@ -241,8 +241,8 @@ pub fn transcribe(
                     let ident =
                         Ident::new(ident.name, ident.span.apply_mark(cx.current_expansion.mark));
                     sp = sp.apply_mark(cx.current_expansion.mark);
-                    result.push(TokenTree::Token(sp, token::Dollar).into());
-                    result.push(TokenTree::Token(sp, token::TokenKind::from_ast_ident(ident)).into());
+                    result.push(TokenTree::token(sp, token::Dollar).into());
+                    result.push(TokenTree::token(sp, token::TokenKind::from_ast_ident(ident)).into());
                 }
             }
 
@@ -259,9 +259,9 @@ pub fn transcribe(
 
             // Nothing much to do here. Just push the token to the result, being careful to
             // preserve syntax context.
-            quoted::TokenTree::Token(sp, tok) => {
+            quoted::TokenTree::Token(token) => {
                 let mut marker = Marker(cx.current_expansion.mark);
-                let mut tt = TokenTree::Token(sp, tok);
+                let mut tt = TokenTree::Token(token);
                 noop_visit_tt(&mut tt, &mut marker);
                 result.push(tt.into());
             }
index 4a95b6f69a161a7834f4908f868c155c3adc9f38..64415204047ba275fd3ba3e7ba2be5f1cea06360 100644 (file)
@@ -1958,9 +1958,11 @@ fn visit_attribute(&mut self, attr: &ast::Attribute) {
                 name,
                 template
             ),
-            None => if let Some(TokenTree::Token(_, token::Eq)) = attr.tokens.trees().next() {
-                // All key-value attributes are restricted to meta-item syntax.
-                attr.parse_meta(self.context.parse_sess).map_err(|mut err| err.emit()).ok();
+            None => if let Some(TokenTree::Token(token)) = attr.tokens.trees().next() {
+                if token == token::Eq {
+                    // All key-value attributes are restricted to meta-item syntax.
+                    attr.parse_meta(self.context.parse_sess).map_err(|mut err| err.emit()).ok();
+                }
             }
         }
     }
index 4229121b3d0759cdf370a535e66700b1aac43679..6882586ed2cd2992df42ac33b0ea1dda8f063bff 100644 (file)
@@ -10,6 +10,7 @@
 #![deny(rust_2018_idioms)]
 #![deny(internal)]
 
+#![feature(bind_by_move_pattern_guards)]
 #![feature(crate_visibility_modifier)]
 #![feature(label_break_value)]
 #![feature(nll)]
index 289f2c0ce4864f7f00a7337918968c826215ea5a..ad6d3f71c652ea8df05aa5640d0935a3eccf0c5e 100644 (file)
@@ -9,7 +9,7 @@
 
 use crate::ast::*;
 use crate::source_map::{Spanned, respan};
-use crate::parse::token::{self, TokenKind};
+use crate::parse::token::{self, Token, TokenKind};
 use crate::ptr::P;
 use crate::ThinVec;
 use crate::tokenstream::*;
@@ -576,9 +576,9 @@ pub fn noop_visit_arg<T: MutVisitor>(Arg { id, pat, ty }: &mut Arg, vis: &mut T)
 
 pub fn noop_visit_tt<T: MutVisitor>(tt: &mut TokenTree, vis: &mut T) {
     match tt {
-        TokenTree::Token(span, tok) => {
+        TokenTree::Token(Token { kind, span }) => {
+            vis.visit_token(kind);
             vis.visit_span(span);
-            vis.visit_token(tok);
         }
         TokenTree::Delimited(DelimSpan { open, close }, _delim, tts) => {
             vis.visit_span(open);
index e99a86e807f7f6294e79356f1e777e65d6f7d728..9b78b56041f21b0c2eda478bd91e703a39651646 100644 (file)
@@ -157,7 +157,7 @@ fn parse_attribute_with_inner_parse_policy(&mut self,
                self.check(&token::OpenDelim(DelimToken::Brace)) {
                    self.parse_token_tree().into()
             } else if self.eat(&token::Eq) {
-                let eq = TokenTree::Token(self.prev_span, token::Eq);
+                let eq = TokenTree::token(self.prev_span, token::Eq);
                 let mut is_interpolated_expr = false;
                 if let token::Interpolated(nt) = &self.token {
                     if let token::NtExpr(..) = **nt {
index 32d5b16dd714f7d09c3fefb933ea03e487b909b7..225db0164fe657b96dbcad189cafaf3bf8b5e61f 100644 (file)
@@ -1596,8 +1596,8 @@ fn t1() {
                                         "/* my source file */ fn main() { println!(\"zebra\"); }\n"
                                             .to_string());
             let id = Ident::from_str("fn");
-            assert_eq!(string_reader.next_token().kind, token::Comment);
-            assert_eq!(string_reader.next_token().kind, token::Whitespace);
+            assert_eq!(string_reader.next_token(), token::Comment);
+            assert_eq!(string_reader.next_token(), token::Whitespace);
             let tok1 = string_reader.next_token();
             let tok2 = Token {
                 kind: token::Ident(id, false),
@@ -1605,7 +1605,7 @@ fn t1() {
             };
             assert_eq!(tok1.kind, tok2.kind);
             assert_eq!(tok1.span, tok2.span);
-            assert_eq!(string_reader.next_token().kind, token::Whitespace);
+            assert_eq!(string_reader.next_token(), token::Whitespace);
             // the 'main' id is already read:
             assert_eq!(string_reader.pos.clone(), BytePos(28));
             // read another token:
@@ -1625,7 +1625,7 @@ fn t1() {
     // of tokens (stop checking after exhausting the expected vec)
     fn check_tokenization(mut string_reader: StringReader<'_>, expected: Vec<TokenKind>) {
         for expected_tok in &expected {
-            assert_eq!(&string_reader.next_token().kind, expected_tok);
+            assert_eq!(&string_reader.next_token(), expected_tok);
         }
     }
 
@@ -1683,7 +1683,7 @@ fn character_a() {
         with_default_globals(|| {
             let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
             let sh = mk_sess(sm.clone());
-            assert_eq!(setup(&sm, &sh, "'a'".to_string()).next_token().kind,
+            assert_eq!(setup(&sm, &sh, "'a'".to_string()).next_token(),
                        mk_lit(token::Char, "a", None));
         })
     }
@@ -1693,7 +1693,7 @@ fn character_space() {
         with_default_globals(|| {
             let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
             let sh = mk_sess(sm.clone());
-            assert_eq!(setup(&sm, &sh, "' '".to_string()).next_token().kind,
+            assert_eq!(setup(&sm, &sh, "' '".to_string()).next_token(),
                        mk_lit(token::Char, " ", None));
         })
     }
@@ -1703,7 +1703,7 @@ fn character_escaped() {
         with_default_globals(|| {
             let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
             let sh = mk_sess(sm.clone());
-            assert_eq!(setup(&sm, &sh, "'\\n'".to_string()).next_token().kind,
+            assert_eq!(setup(&sm, &sh, "'\\n'".to_string()).next_token(),
                        mk_lit(token::Char, "\\n", None));
         })
     }
@@ -1713,7 +1713,7 @@ fn lifetime_name() {
         with_default_globals(|| {
             let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
             let sh = mk_sess(sm.clone());
-            assert_eq!(setup(&sm, &sh, "'abc".to_string()).next_token().kind,
+            assert_eq!(setup(&sm, &sh, "'abc".to_string()).next_token(),
                        token::Lifetime(Ident::from_str("'abc")));
         })
     }
@@ -1723,7 +1723,7 @@ fn raw_string() {
         with_default_globals(|| {
             let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
             let sh = mk_sess(sm.clone());
-            assert_eq!(setup(&sm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string()).next_token().kind,
+            assert_eq!(setup(&sm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string()).next_token(),
                        mk_lit(token::StrRaw(3), "\"#a\\b\x00c\"", None));
         })
     }
@@ -1735,10 +1735,10 @@ fn literal_suffixes() {
             let sh = mk_sess(sm.clone());
             macro_rules! test {
                 ($input: expr, $tok_type: ident, $tok_contents: expr) => {{
-                    assert_eq!(setup(&sm, &sh, format!("{}suffix", $input)).next_token().kind,
+                    assert_eq!(setup(&sm, &sh, format!("{}suffix", $input)).next_token(),
                                mk_lit(token::$tok_type, $tok_contents, Some("suffix")));
                     // with a whitespace separator:
-                    assert_eq!(setup(&sm, &sh, format!("{} suffix", $input)).next_token().kind,
+                    assert_eq!(setup(&sm, &sh, format!("{} suffix", $input)).next_token(),
                                mk_lit(token::$tok_type, $tok_contents, None));
                 }}
             }
@@ -1753,11 +1753,11 @@ macro_rules! test {
             test!("1.0", Float, "1.0");
             test!("1.0e10", Float, "1.0e10");
 
-            assert_eq!(setup(&sm, &sh, "2us".to_string()).next_token().kind,
+            assert_eq!(setup(&sm, &sh, "2us".to_string()).next_token(),
                        mk_lit(token::Integer, "2", Some("us")));
-            assert_eq!(setup(&sm, &sh, "r###\"raw\"###suffix".to_string()).next_token().kind,
+            assert_eq!(setup(&sm, &sh, "r###\"raw\"###suffix".to_string()).next_token(),
                        mk_lit(token::StrRaw(3), "raw", Some("suffix")));
-            assert_eq!(setup(&sm, &sh, "br###\"raw\"###suffix".to_string()).next_token().kind,
+            assert_eq!(setup(&sm, &sh, "br###\"raw\"###suffix".to_string()).next_token(),
                        mk_lit(token::ByteStrRaw(3), "raw", Some("suffix")));
         })
     }
@@ -1775,11 +1775,8 @@ fn nested_block_comments() {
             let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
             let sh = mk_sess(sm.clone());
             let mut lexer = setup(&sm, &sh, "/* /* */ */'a'".to_string());
-            match lexer.next_token().kind {
-                token::Comment => {}
-                _ => panic!("expected a comment!"),
-            }
-            assert_eq!(lexer.next_token().kind, mk_lit(token::Char, "a", None));
+            assert_eq!(lexer.next_token(), token::Comment);
+            assert_eq!(lexer.next_token(), mk_lit(token::Char, "a", None));
         })
     }
 
@@ -1792,9 +1789,8 @@ fn crlf_comments() {
             let comment = lexer.next_token();
             assert_eq!(comment.kind, token::Comment);
             assert_eq!((comment.span.lo(), comment.span.hi()), (BytePos(0), BytePos(7)));
-            assert_eq!(lexer.next_token().kind, token::Whitespace);
-            assert_eq!(lexer.next_token().kind,
-                    token::DocComment(Symbol::intern("/// test")));
+            assert_eq!(lexer.next_token(), token::Whitespace);
+            assert_eq!(lexer.next_token(), token::DocComment(Symbol::intern("/// test")));
         })
     }
 }
index 767d37016da87cecece3ad65d3fe2d0911463e71..abff7177abd13dd3627b593d79ea6a1d5f0dafcf 100644 (file)
@@ -203,7 +203,7 @@ fn parse_token_tree(&mut self) -> PResult<'a, TreeAndJoint> {
                 Err(err)
             },
             _ => {
-                let tt = TokenTree::Token(self.span, self.token.clone());
+                let tt = TokenTree::token(self.span, self.token.clone());
                 // Note that testing for joint-ness here is done via the raw
                 // source span as the joint-ness is a property of the raw source
                 // rather than wanting to take `override_span` into account.
index 945475ff9818bc6e4d882d0b4960beefa38cd5fd..4b8ef20180f63583b4f03e11a2d76605d81333fa 100644 (file)
@@ -261,7 +261,7 @@ pub fn from_lit_kind(node: LitKind, span: Span) -> Lit {
             token::Bool => token::Ident(Ident::new(self.token.symbol, self.span), false),
             _ => token::Literal(self.token),
         };
-        TokenTree::Token(self.span, token).into()
+        TokenTree::token(self.span, token).into()
     }
 }
 
index 7f8b96508bdd943a2980ef6a2ca3c9d09e732315..398b4b1da17b0ef74ab0c666b9d63fc311f09c0a 100644 (file)
@@ -385,6 +385,7 @@ mod tests {
     use crate::ast::{self, Ident, PatKind};
     use crate::attr::first_attr_value_str_by_name;
     use crate::ptr::P;
+    use crate::parse::token::Token;
     use crate::print::pprust::item_to_string;
     use crate::tokenstream::{DelimSpan, TokenTree};
     use crate::util::parser_testing::string_to_stream;
@@ -426,9 +427,9 @@ fn string_to_tts_macro () {
             match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) {
                 (
                     4,
-                    Some(&TokenTree::Token(_, token::Ident(name_macro_rules, false))),
-                    Some(&TokenTree::Token(_, token::Not)),
-                    Some(&TokenTree::Token(_, token::Ident(name_zip, false))),
+                    Some(&TokenTree::Token(Token { kind: token::Ident(name_macro_rules, false), .. })),
+                    Some(&TokenTree::Token(Token { kind: token::Not, .. })),
+                    Some(&TokenTree::Token(Token { kind: token::Ident(name_zip, false), .. })),
                     Some(&TokenTree::Delimited(_, macro_delim, ref macro_tts)),
                 )
                 if name_macro_rules.name == sym::macro_rules
@@ -438,7 +439,7 @@ fn string_to_tts_macro () {
                         (
                             3,
                             Some(&TokenTree::Delimited(_, first_delim, ref first_tts)),
-                            Some(&TokenTree::Token(_, token::FatArrow)),
+                            Some(&TokenTree::Token(Token { kind: token::FatArrow, .. })),
                             Some(&TokenTree::Delimited(_, second_delim, ref second_tts)),
                         )
                         if macro_delim == token::Paren => {
@@ -446,8 +447,8 @@ fn string_to_tts_macro () {
                             match (tts.len(), tts.get(0), tts.get(1)) {
                                 (
                                     2,
-                                    Some(&TokenTree::Token(_, token::Dollar)),
-                                    Some(&TokenTree::Token(_, token::Ident(ident, false))),
+                                    Some(&TokenTree::Token(Token { kind: token::Dollar, .. })),
+                                    Some(&TokenTree::Token(Token { kind: token::Ident(ident, false), .. })),
                                 )
                                 if first_delim == token::Paren && ident.name.as_str() == "a" => {},
                                 _ => panic!("value 3: {:?} {:?}", first_delim, first_tts),
@@ -456,8 +457,8 @@ fn string_to_tts_macro () {
                             match (tts.len(), tts.get(0), tts.get(1)) {
                                 (
                                     2,
-                                    Some(&TokenTree::Token(_, token::Dollar)),
-                                    Some(&TokenTree::Token(_, token::Ident(ident, false))),
+                                    Some(&TokenTree::Token(Token { kind: token::Dollar, .. })),
+                                    Some(&TokenTree::Token(Token { kind: token::Ident(ident, false), .. })),
                                 )
                                 if second_delim == token::Paren && ident.name.as_str() == "a" => {},
                                 _ => panic!("value 4: {:?} {:?}", second_delim, second_tts),
@@ -477,16 +478,16 @@ fn string_to_tts_1() {
             let tts = string_to_stream("fn a (b : i32) { b; }".to_string());
 
             let expected = TokenStream::new(vec![
-                TokenTree::Token(sp(0, 2), token::Ident(Ident::from_str("fn"), false)).into(),
-                TokenTree::Token(sp(3, 4), token::Ident(Ident::from_str("a"), false)).into(),
+                TokenTree::token(sp(0, 2), token::Ident(Ident::from_str("fn"), false)).into(),
+                TokenTree::token(sp(3, 4), token::Ident(Ident::from_str("a"), false)).into(),
                 TokenTree::Delimited(
                     DelimSpan::from_pair(sp(5, 6), sp(13, 14)),
                     token::DelimToken::Paren,
                     TokenStream::new(vec![
-                        TokenTree::Token(sp(6, 7),
+                        TokenTree::token(sp(6, 7),
                                          token::Ident(Ident::from_str("b"), false)).into(),
-                        TokenTree::Token(sp(8, 9), token::Colon).into(),
-                        TokenTree::Token(sp(10, 13),
+                        TokenTree::token(sp(8, 9), token::Colon).into(),
+                        TokenTree::token(sp(10, 13),
                                          token::Ident(Ident::from_str("i32"), false)).into(),
                     ]).into(),
                 ).into(),
@@ -494,9 +495,9 @@ fn string_to_tts_1() {
                     DelimSpan::from_pair(sp(15, 16), sp(20, 21)),
                     token::DelimToken::Brace,
                     TokenStream::new(vec![
-                        TokenTree::Token(sp(17, 18),
+                        TokenTree::token(sp(17, 18),
                                          token::Ident(Ident::from_str("b"), false)).into(),
-                        TokenTree::Token(sp(18, 19), token::Semi).into(),
+                        TokenTree::token(sp(18, 19), token::Semi).into(),
                     ]).into(),
                 ).into()
             ]);
index 3b7d4e14dbb40fb18282ef53a8cb54952d342dcf..eda67b3a93d8e1e237a5d29045c0f45f22f82afd 100644 (file)
@@ -318,7 +318,7 @@ fn next(&mut self) -> Token {
             }
 
             match tree {
-                TokenTree::Token(span, kind) => return Token { kind, span },
+                TokenTree::Token(token) => return token,
                 TokenTree::Delimited(sp, delim, tts) => {
                     let frame = TokenCursorFrame::new(sp, delim, &tts);
                     self.stack.push(mem::replace(&mut self.frame, frame));
@@ -353,9 +353,9 @@ fn next_desugared(&mut self) -> Token {
             delim_span,
             token::Bracket,
             [
-                TokenTree::Token(sp, token::Ident(ast::Ident::with_empty_ctxt(sym::doc), false)),
-                TokenTree::Token(sp, token::Eq),
-                TokenTree::Token(sp, token::TokenKind::lit(
+                TokenTree::token(sp, token::Ident(ast::Ident::with_empty_ctxt(sym::doc), false)),
+                TokenTree::token(sp, token::Eq),
+                TokenTree::token(sp, token::TokenKind::lit(
                     token::StrRaw(num_of_hashes), Symbol::intern(&stripped), None
                 )),
             ]
@@ -366,10 +366,10 @@ fn next_desugared(&mut self) -> Token {
             delim_span,
             token::NoDelim,
             &if doc_comment_style(&name.as_str()) == AttrStyle::Inner {
-                [TokenTree::Token(sp, token::Pound), TokenTree::Token(sp, token::Not), body]
+                [TokenTree::token(sp, token::Pound), TokenTree::token(sp, token::Not), body]
                     .iter().cloned().collect::<TokenStream>().into()
             } else {
-                [TokenTree::Token(sp, token::Pound), body]
+                [TokenTree::token(sp, token::Pound), body]
                     .iter().cloned().collect::<TokenStream>().into()
             },
         )));
@@ -1052,7 +1052,7 @@ pub fn look_ahead<R, F>(&self, dist: usize, f: F) -> R where
 
         f(&match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) {
             Some(tree) => match tree {
-                TokenTree::Token(_, tok) => tok,
+                TokenTree::Token(token) => token.kind,
                 TokenTree::Delimited(_, delim, _) => token::OpenDelim(delim),
             },
             None => token::CloseDelim(self.token_cursor.frame.delim),
@@ -1065,7 +1065,7 @@ pub fn look_ahead<R, F>(&self, dist: usize, f: F) -> R where
         }
 
         match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) {
-            Some(TokenTree::Token(span, _)) => span,
+            Some(TokenTree::Token(token)) => token.span,
             Some(TokenTree::Delimited(span, ..)) => span.entire(),
             None => self.look_ahead_span(dist - 1),
         }
@@ -2675,7 +2675,7 @@ fn parse_dot_or_call_expr_with_(&mut self, e0: P<Expr>, lo: Span) -> PResult<'a,
             _ => {
                 let (token, span) = (mem::replace(&mut self.token, token::Whitespace), self.span);
                 self.bump();
-                TokenTree::Token(span, token)
+                TokenTree::token(span, token)
             }
         }
     }
@@ -4344,7 +4344,7 @@ fn eat_macro_def(&mut self, attrs: &[Attribute], vis: &Visibility, lo: Span)
                     };
                     TokenStream::new(vec![
                         args.into(),
-                        TokenTree::Token(token_lo.to(self.prev_span), token::FatArrow).into(),
+                        TokenTree::token(token_lo.to(self.prev_span), token::FatArrow).into(),
                         body.into(),
                     ])
                 } else {
index 3679e4050ff4210ab18bf29f817ce3658f0843a7..a06bf9fae7c2958e2132e9533cca171d40913866 100644 (file)
@@ -18,6 +18,7 @@
 
 use std::fmt;
 use std::mem;
+use std::ops::Deref;
 #[cfg(target_arch = "x86_64")]
 use rustc_data_structures::static_assert_size;
 use rustc_data_structures::sync::Lrc;
@@ -165,7 +166,7 @@ fn ident_can_begin_type(ident: ast::Ident, is_raw: bool) -> bool {
     ].contains(&ident.name)
 }
 
-#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)]
+#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)]
 pub enum TokenKind {
     /* Expression-operator symbols. */
     Eq,
@@ -235,7 +236,7 @@ pub enum TokenKind {
 #[cfg(target_arch = "x86_64")]
 static_assert_size!(TokenKind, 16);
 
-#[derive(Clone, Debug)]
+#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)]
 pub struct Token {
     pub kind: TokenKind,
     pub span: Span,
@@ -614,6 +615,14 @@ fn eq(&self, rhs: &TokenKind) -> bool {
     }
 }
 
+// FIXME: Remove this after all necessary methods are moved from `TokenKind` to `Token`.
+impl Deref for Token {
+    type Target = TokenKind;
+    fn deref(&self) -> &Self::Target {
+        &self.kind
+    }
+}
+
 #[derive(Clone, RustcEncodable, RustcDecodable)]
 /// For interpolation during macro expansion.
 pub enum Nonterminal {
@@ -704,11 +713,11 @@ pub fn to_tokenstream(&self, sess: &ParseSess, span: Span) -> TokenStream {
             }
             Nonterminal::NtIdent(ident, is_raw) => {
                 let token = Ident(ident, is_raw);
-                Some(TokenTree::Token(ident.span, token).into())
+                Some(TokenTree::token(ident.span, token).into())
             }
             Nonterminal::NtLifetime(ident) => {
                 let token = Lifetime(ident);
-                Some(TokenTree::Token(ident.span, token).into())
+                Some(TokenTree::token(ident.span, token).into())
             }
             Nonterminal::NtTT(ref tt) => {
                 Some(tt.clone().into())
@@ -794,7 +803,7 @@ fn prepend_attrs(sess: &ParseSess,
         if attr.path.segments.len() == 1 && attr.path.segments[0].args.is_none() {
             let ident = attr.path.segments[0].ident;
             let token = Ident(ident, ident.as_str().starts_with("r#"));
-            brackets.push(tokenstream::TokenTree::Token(ident.span, token));
+            brackets.push(tokenstream::TokenTree::token(ident.span, token));
 
         // ... and for more complicated paths, fall back to a reparse hack that
         // should eventually be removed.
@@ -808,7 +817,7 @@ fn prepend_attrs(sess: &ParseSess,
         // The span we list here for `#` and for `[ ... ]` are both wrong in
         // that it encompasses more than each token, but it hopefully is "good
         // enough" for now at least.
-        builder.push(tokenstream::TokenTree::Token(attr.span, Pound));
+        builder.push(tokenstream::TokenTree::token(attr.span, Pound));
         let delim_span = DelimSpan::from_single(attr.span);
         builder.push(tokenstream::TokenTree::Delimited(
             delim_span, DelimToken::Bracket, brackets.build().into()));
index cd7106191bee27510c2ebae8b7a7a656e7d937d4..07acfb5dc86c3f7feb152d8f98cacf4e83ff8024 100644 (file)
@@ -724,10 +724,10 @@ fn print_meta_item(&mut self, item: &ast::MetaItem) -> io::Result<()> {
     /// expression arguments as expressions). It can be done! I think.
     fn print_tt(&mut self, tt: tokenstream::TokenTree) -> io::Result<()> {
         match tt {
-            TokenTree::Token(_, ref tk) => {
-                self.writer().word(token_to_string(tk))?;
-                match *tk {
-                    parse::token::DocComment(..) => {
+            TokenTree::Token(ref token) => {
+                self.writer().word(token_to_string(&token))?;
+                match token.kind {
+                    token::DocComment(..) => {
                         self.writer().hardbreak()
                     }
                     _ => Ok(())
index 654c21fd094e907224ac7b7c5f384a03148c95b4..e6fe33d6ccf26afa9d9fb25c51564b93ff81ea33 100644 (file)
@@ -16,7 +16,7 @@
 use crate::ext::base;
 use crate::ext::tt::{macro_parser, quoted};
 use crate::parse::Directory;
-use crate::parse::token::{self, DelimToken, TokenKind};
+use crate::parse::token::{self, DelimToken, Token, TokenKind};
 use crate::print::pprust;
 
 use syntax_pos::{BytePos, Mark, Span, DUMMY_SP};
@@ -44,7 +44,7 @@
 #[derive(Debug, Clone, PartialEq, RustcEncodable, RustcDecodable)]
 pub enum TokenTree {
     /// A single token
-    Token(Span, token::TokenKind),
+    Token(Token),
     /// A delimited sequence of token trees
     Delimited(DelimSpan, DelimToken, TokenStream),
 }
@@ -53,8 +53,7 @@ pub enum TokenTree {
 #[cfg(parallel_compiler)]
 fn _dummy()
 where
-    Span: Send + Sync,
-    token::TokenKind: Send + Sync,
+    Token: Send + Sync,
     DelimSpan: Send + Sync,
     DelimToken: Send + Sync,
     TokenStream: Send + Sync,
@@ -86,12 +85,11 @@ pub fn parse(cx: &base::ExtCtxt<'_>, mtch: &[quoted::TokenTree], tts: TokenStrea
     /// Checks if this TokenTree is equal to the other, regardless of span information.
     pub fn eq_unspanned(&self, other: &TokenTree) -> bool {
         match (self, other) {
-            (&TokenTree::Token(_, ref tk), &TokenTree::Token(_, ref tk2)) => tk == tk2,
-            (&TokenTree::Delimited(_, delim, ref tts),
-             &TokenTree::Delimited(_, delim2, ref tts2)) => {
+            (TokenTree::Token(token), TokenTree::Token(token2)) => token.kind == token2.kind,
+            (TokenTree::Delimited(_, delim, tts), TokenTree::Delimited(_, delim2, tts2)) => {
                 delim == delim2 && tts.eq_unspanned(&tts2)
             }
-            (_, _) => false,
+            _ => false,
         }
     }
 
@@ -102,37 +100,36 @@ pub fn eq_unspanned(&self, other: &TokenTree) -> bool {
     // different method.
     pub fn probably_equal_for_proc_macro(&self, other: &TokenTree) -> bool {
         match (self, other) {
-            (&TokenTree::Token(_, ref tk), &TokenTree::Token(_, ref tk2)) => {
-                tk.probably_equal_for_proc_macro(tk2)
+            (TokenTree::Token(token), TokenTree::Token(token2)) => {
+                token.probably_equal_for_proc_macro(token2)
             }
-            (&TokenTree::Delimited(_, delim, ref tts),
-             &TokenTree::Delimited(_, delim2, ref tts2)) => {
+            (TokenTree::Delimited(_, delim, tts), TokenTree::Delimited(_, delim2, tts2)) => {
                 delim == delim2 && tts.probably_equal_for_proc_macro(&tts2)
             }
-            (_, _) => false,
+            _ => false,
         }
     }
 
     /// Retrieves the TokenTree's span.
     pub fn span(&self) -> Span {
-        match *self {
-            TokenTree::Token(sp, _) => sp,
+        match self {
+            TokenTree::Token(token) => token.span,
             TokenTree::Delimited(sp, ..) => sp.entire(),
         }
     }
 
     /// Modify the `TokenTree`'s span in-place.
     pub fn set_span(&mut self, span: Span) {
-        match *self {
-            TokenTree::Token(ref mut sp, _) => *sp = span,
-            TokenTree::Delimited(ref mut sp, ..) => *sp = DelimSpan::from_single(span),
+        match self {
+            TokenTree::Token(token) => token.span = span,
+            TokenTree::Delimited(dspan, ..) => *dspan = DelimSpan::from_single(span),
         }
     }
 
     /// Indicates if the stream is a token that is equal to the provided token.
     pub fn eq_token(&self, t: TokenKind) -> bool {
-        match *self {
-            TokenTree::Token(_, ref tk) => *tk == t,
+        match self {
+            TokenTree::Token(token) => *token == t,
             _ => false,
         }
     }
@@ -141,6 +138,10 @@ pub fn joint(self) -> TokenStream {
         TokenStream::new(vec![(self, Joint)])
     }
 
+    pub fn token(span: Span, kind: TokenKind) -> TokenTree {
+        TokenTree::Token(Token { kind, span })
+    }
+
     /// Returns the opening delimiter as a token tree.
     pub fn open_tt(span: Span, delim: DelimToken) -> TokenTree {
         let open_span = if span.is_dummy() {
@@ -148,7 +149,7 @@ pub fn open_tt(span: Span, delim: DelimToken) -> TokenTree {
         } else {
             span.with_hi(span.lo() + BytePos(delim.len() as u32))
         };
-        TokenTree::Token(open_span, token::OpenDelim(delim))
+        TokenTree::token(open_span, token::OpenDelim(delim))
     }
 
     /// Returns the closing delimiter as a token tree.
@@ -158,7 +159,7 @@ pub fn close_tt(span: Span, delim: DelimToken) -> TokenTree {
         } else {
             span.with_lo(span.hi() - BytePos(delim.len() as u32))
         };
-        TokenTree::Token(close_span, token::CloseDelim(delim))
+        TokenTree::token(close_span, token::CloseDelim(delim))
     }
 }
 
@@ -201,18 +202,17 @@ pub(crate) fn add_comma(&self) -> Option<(TokenStream, Span)> {
             while let Some((pos, ts)) = iter.next() {
                 if let Some((_, next)) = iter.peek() {
                     let sp = match (&ts, &next) {
-                        (_, (TokenTree::Token(_, token::Comma), _)) => continue,
-                        ((TokenTree::Token(sp, token_left), NonJoint),
-                         (TokenTree::Token(_, token_right), _))
+                        (_, (TokenTree::Token(Token { kind: token::Comma, .. }), _)) => continue,
+                        ((TokenTree::Token(token_left), NonJoint), (TokenTree::Token(token_right), _))
                         if ((token_left.is_ident() && !token_left.is_reserved_ident())
                             || token_left.is_lit()) &&
                             ((token_right.is_ident() && !token_right.is_reserved_ident())
-                            || token_right.is_lit()) => *sp,
+                            || token_right.is_lit()) => token_left.span,
                         ((TokenTree::Delimited(sp, ..), NonJoint), _) => sp.entire(),
                         _ => continue,
                     };
                     let sp = sp.shrink_to_hi();
-                    let comma = (TokenTree::Token(sp, token::Comma), NonJoint);
+                    let comma = (TokenTree::token(sp, token::Comma), NonJoint);
                     suggestion = Some((pos, comma, sp));
                 }
             }
@@ -241,12 +241,6 @@ fn from(tree: TokenTree) -> TreeAndJoint {
     }
 }
 
-impl From<TokenKind> for TokenStream {
-    fn from(token: TokenKind) -> TokenStream {
-        TokenTree::Token(DUMMY_SP, token).into()
-    }
-}
-
 impl<T: Into<TokenStream>> iter::FromIterator<T> for TokenStream {
     fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
         TokenStream::from_streams(iter.into_iter().map(Into::into).collect::<SmallVec<_>>())
@@ -349,22 +343,25 @@ pub fn probably_equal_for_proc_macro(&self, other: &TokenStream) -> bool {
         // streams, making a comparison between a token stream generated from an
         // AST and a token stream which was parsed into an AST more reliable.
         fn semantic_tree(tree: &TokenTree) -> bool {
-            match tree {
-                // The pretty printer tends to add trailing commas to
-                // everything, and in particular, after struct fields.
-                | TokenTree::Token(_, token::Comma)
-                // The pretty printer emits `NoDelim` as whitespace.
-                | TokenTree::Token(_, token::OpenDelim(DelimToken::NoDelim))
-                | TokenTree::Token(_, token::CloseDelim(DelimToken::NoDelim))
-                // The pretty printer collapses many semicolons into one.
-                | TokenTree::Token(_, token::Semi)
-                // The pretty printer collapses whitespace arbitrarily and can
-                // introduce whitespace from `NoDelim`.
-                | TokenTree::Token(_, token::Whitespace)
-                // The pretty printer can turn `$crate` into `::crate_name`
-                | TokenTree::Token(_, token::ModSep) => false,
-                _ => true
+            if let TokenTree::Token(token) = tree {
+                if let
+                    // The pretty printer tends to add trailing commas to
+                    // everything, and in particular, after struct fields.
+                    | token::Comma
+                    // The pretty printer emits `NoDelim` as whitespace.
+                    | token::OpenDelim(DelimToken::NoDelim)
+                    | token::CloseDelim(DelimToken::NoDelim)
+                    // The pretty printer collapses many semicolons into one.
+                    | token::Semi
+                    // The pretty printer collapses whitespace arbitrarily and can
+                    // introduce whitespace from `NoDelim`.
+                    | token::Whitespace
+                    // The pretty printer can turn `$crate` into `::crate_name`
+                    | token::ModSep = token.kind {
+                    return false;
+                }
             }
+            true
         }
 
         let mut t1 = self.trees().filter(semantic_tree);
@@ -430,13 +427,13 @@ pub fn new() -> TokenStreamBuilder {
     pub fn push<T: Into<TokenStream>>(&mut self, stream: T) {
         let stream = stream.into();
         let last_tree_if_joint = self.0.last().and_then(TokenStream::last_tree_if_joint);
-        if let Some(TokenTree::Token(last_span, last_tok)) = last_tree_if_joint {
-            if let Some((TokenTree::Token(span, tok), is_joint)) = stream.first_tree_and_joint() {
-                if let Some(glued_tok) = last_tok.glue(tok) {
+        if let Some(TokenTree::Token(last_token)) = last_tree_if_joint {
+            if let Some((TokenTree::Token(token), is_joint)) = stream.first_tree_and_joint() {
+                if let Some(glued_tok) = last_token.kind.glue(token.kind) {
                     let last_stream = self.0.pop().unwrap();
                     self.push_all_but_last_tree(&last_stream);
-                    let glued_span = last_span.to(span);
-                    let glued_tt = TokenTree::Token(glued_span, glued_tok);
+                    let glued_span = last_token.span.to(token.span);
+                    let glued_tt = TokenTree::token(glued_span, glued_tok);
                     let glued_tokenstream = TokenStream::new(vec![(glued_tt, is_joint)]);
                     self.0.push(glued_tokenstream);
                     self.push_all_but_first_tree(&stream);
@@ -663,7 +660,7 @@ fn test_is_empty() {
         with_default_globals(|| {
             let test0: TokenStream = Vec::<TokenTree>::new().into_iter().collect();
             let test1: TokenStream =
-                TokenTree::Token(sp(0, 1), token::Ident(Ident::from_str("a"), false)).into();
+                TokenTree::token(sp(0, 1), token::Ident(Ident::from_str("a"), false)).into();
             let test2 = string_to_ts("foo(bar::baz)");
 
             assert_eq!(test0.is_empty(), true);
@@ -676,9 +673,9 @@ fn test_is_empty() {
     fn test_dotdotdot() {
         with_default_globals(|| {
             let mut builder = TokenStreamBuilder::new();
-            builder.push(TokenTree::Token(sp(0, 1), token::Dot).joint());
-            builder.push(TokenTree::Token(sp(1, 2), token::Dot).joint());
-            builder.push(TokenTree::Token(sp(2, 3), token::Dot));
+            builder.push(TokenTree::token(sp(0, 1), token::Dot).joint());
+            builder.push(TokenTree::token(sp(1, 2), token::Dot).joint());
+            builder.push(TokenTree::token(sp(2, 3), token::Dot));
             let stream = builder.build();
             assert!(stream.eq_unspanned(&string_to_ts("...")));
             assert_eq!(stream.trees().count(), 1);
index 35f70092be432463e9c312aee31a4176d492d151..e32c5f3f3ecad948f7c7c6895ff70200743feeca 100644 (file)
@@ -855,7 +855,7 @@ pub fn walk_attribute<'a, V: Visitor<'a>>(visitor: &mut V, attr: &'a Attribute)
 
 pub fn walk_tt<'a, V: Visitor<'a>>(visitor: &mut V, tt: TokenTree) {
     match tt {
-        TokenTree::Token(_, tok) => visitor.visit_token(tok),
+        TokenTree::Token(token) => visitor.visit_token(token.kind),
         TokenTree::Delimited(_, _, tts) => visitor.visit_tts(tts),
     }
 }
index 4d7083c1a790b955abbef10861be3c7f156d0c80..83c4c809de372f48fd9e0044b2e5ff62e872964f 100644 (file)
@@ -9,7 +9,8 @@
 use syntax::ast;
 use syntax::ext::base::{self, *};
 use syntax::feature_gate;
-use syntax::parse::{self, token};
+use syntax::parse;
+use syntax::parse::token::{self, Token};
 use syntax::ptr::P;
 use syntax::symbol::{kw, sym, Symbol};
 use syntax::ast::AsmDialect;
@@ -86,8 +87,8 @@ fn parse_inline_asm<'a>(
     let first_colon = tts.iter()
         .position(|tt| {
             match *tt {
-                tokenstream::TokenTree::Token(_, token::Colon) |
-                tokenstream::TokenTree::Token(_, token::ModSep) => true,
+                tokenstream::TokenTree::Token(Token { kind: token::Colon, .. }) |
+                tokenstream::TokenTree::Token(Token { kind: token::ModSep, .. }) => true,
                 _ => false,
             }
         })
index 29dd445e75168ef11a3d0ce7ca31d9da02e1838e..8a297a5c9bc19996d34d91483b85c9585a5aa50f 100644 (file)
@@ -29,7 +29,7 @@ pub fn expand_assert<'cx>(
     let panic_call = Mac_ {
         path: Path::from_ident(Ident::new(sym::panic, sp)),
         tts: custom_message.unwrap_or_else(|| {
-            TokenStream::from(TokenTree::Token(
+            TokenStream::from(TokenTree::token(
                 DUMMY_SP,
                 TokenKind::lit(token::Str, Symbol::intern(&format!(
                     "assertion failed: {}",
index 77c53f402cc9f03b7f21bdea386aa457ac368d2d..59f25af37427641e6e9bec150420a0266dc48a4c 100644 (file)
@@ -3,7 +3,7 @@
 use syntax::ast;
 use syntax::ext::base::{self, *};
 use syntax::feature_gate;
-use syntax::parse::token;
+use syntax::parse::token::{self, Token};
 use syntax::ptr::P;
 use syntax_pos::Span;
 use syntax_pos::symbol::{Symbol, sym};
@@ -30,7 +30,7 @@ pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt<'_>,
     for (i, e) in tts.iter().enumerate() {
         if i & 1 == 1 {
             match *e {
-                TokenTree::Token(_, token::Comma) => {}
+                TokenTree::Token(Token { kind: token::Comma, .. }) => {}
                 _ => {
                     cx.span_err(sp, "concat_idents! expecting comma.");
                     return DummyResult::any(sp);
@@ -38,7 +38,7 @@ pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt<'_>,
             }
         } else {
             match *e {
-                TokenTree::Token(_, token::Ident(ident, _)) =>
+                TokenTree::Token(Token { kind: token::Ident(ident, _), .. }) =>
                     res_str.push_str(&ident.as_str()),
                 _ => {
                     cx.span_err(sp, "concat_idents! requires ident args.");
index c1d93805a5811a62d4bd04efb43eb3e5cc7663b3..3deab97db88c0fe28e255e59412f3724c90e1d7e 100644 (file)
@@ -69,7 +69,7 @@ fn expand(&self,
         MarkAttrs(&self.attrs).visit_item(&item);
 
         let token = token::Interpolated(Lrc::new(token::NtItem(item)));
-        let input = tokenstream::TokenTree::Token(DUMMY_SP, token).into();
+        let input = tokenstream::TokenTree::token(DUMMY_SP, token).into();
 
         let server = proc_macro_server::Rustc::new(ecx);
         let stream = match self.client.run(&EXEC_STRATEGY, server, input) {
index 119b83b7527b491f5f59e80c1c48b4610a9b2906..26eb9e9d4fc1fdad7ba86baa958d8eee446e476f 100644 (file)
@@ -55,7 +55,7 @@ fn from_internal(((tree, is_joint), sess, stack): (TreeAndJoint, &ParseSess, &mu
         use syntax::parse::token::*;
 
         let joint = is_joint == Joint;
-        let (span, token) = match tree {
+        let Token { kind, span } = match tree {
             tokenstream::TokenTree::Delimited(span, delim, tts) => {
                 let delimiter = Delimiter::from_internal(delim);
                 return TokenTree::Group(Group {
@@ -64,7 +64,7 @@ fn from_internal(((tree, is_joint), sess, stack): (TreeAndJoint, &ParseSess, &mu
                     span,
                 });
             }
-            tokenstream::TokenTree::Token(span, token) => (span, token),
+            tokenstream::TokenTree::Token(token) => token,
         };
 
         macro_rules! tt {
@@ -93,7 +93,7 @@ macro_rules! op {
             }};
         }
 
-        match token {
+        match kind {
             Eq => op!('='),
             Lt => op!('<'),
             Le => op!('<', '='),
@@ -164,7 +164,7 @@ macro_rules! op {
                     TokenKind::lit(token::Str, Symbol::intern(&escaped), None),
                 ]
                 .into_iter()
-                .map(|token| tokenstream::TokenTree::Token(span, token))
+                .map(|kind| tokenstream::TokenTree::token(span, kind))
                 .collect();
                 stack.push(TokenTree::Group(Group {
                     delimiter: Delimiter::Bracket,
@@ -212,7 +212,7 @@ fn to_internal(self) -> TokenStream {
             }
             TokenTree::Ident(self::Ident { sym, is_raw, span }) => {
                 let token = Ident(ast::Ident::new(sym, span), is_raw);
-                return tokenstream::TokenTree::Token(span, token).into();
+                return tokenstream::TokenTree::token(span, token).into();
             }
             TokenTree::Literal(self::Literal {
                 lit: token::Lit { kind: token::Integer, symbol, suffix },
@@ -221,8 +221,8 @@ fn to_internal(self) -> TokenStream {
                 let minus = BinOp(BinOpToken::Minus);
                 let symbol = Symbol::intern(&symbol.as_str()[1..]);
                 let integer = TokenKind::lit(token::Integer, symbol, suffix);
-                let a = tokenstream::TokenTree::Token(span, minus);
-                let b = tokenstream::TokenTree::Token(span, integer);
+                let a = tokenstream::TokenTree::token(span, minus);
+                let b = tokenstream::TokenTree::token(span, integer);
                 return vec![a, b].into_iter().collect();
             }
             TokenTree::Literal(self::Literal {
@@ -232,16 +232,16 @@ fn to_internal(self) -> TokenStream {
                 let minus = BinOp(BinOpToken::Minus);
                 let symbol = Symbol::intern(&symbol.as_str()[1..]);
                 let float = TokenKind::lit(token::Float, symbol, suffix);
-                let a = tokenstream::TokenTree::Token(span, minus);
-                let b = tokenstream::TokenTree::Token(span, float);
+                let a = tokenstream::TokenTree::token(span, minus);
+                let b = tokenstream::TokenTree::token(span, float);
                 return vec![a, b].into_iter().collect();
             }
             TokenTree::Literal(self::Literal { lit, span }) => {
-                return tokenstream::TokenTree::Token(span, Literal(lit)).into()
+                return tokenstream::TokenTree::token(span, Literal(lit)).into()
             }
         };
 
-        let token = match ch {
+        let kind = match ch {
             '=' => Eq,
             '<' => Lt,
             '>' => Gt,
@@ -267,7 +267,7 @@ fn to_internal(self) -> TokenStream {
             _ => unreachable!(),
         };
 
-        let tree = tokenstream::TokenTree::Token(span, token);
+        let tree = tokenstream::TokenTree::token(span, kind);
         TokenStream::new(vec![(tree, if joint { Joint } else { NonJoint })])
     }
 }
index 61ef94560ccb95c6b718dd3bcdf3ebc56cc55f4f..6c74f77ff1fb5c4268c1248591c22e21bbd41f29 100644 (file)
@@ -17,10 +17,10 @@ pub fn expand_trace_macros(cx: &mut ExtCtxt<'_>,
     }
 
     match (tt.len(), tt.first()) {
-        (1, Some(&TokenTree::Token(_, ref tok))) if tok.is_keyword(kw::True) => {
+        (1, Some(TokenTree::Token(token))) if token.is_keyword(kw::True) => {
             cx.set_trace_macros(true);
         }
-        (1, Some(&TokenTree::Token(_, ref tok))) if tok.is_keyword(kw::False) => {
+        (1, Some(TokenTree::Token(token))) if token.is_keyword(kw::False) => {
             cx.set_trace_macros(false);
         }
         _ => cx.span_err(sp, "trace_macros! accepts only `true` or `false`"),