use syntax::std_inject;
use syntax::symbol::{kw, sym, Symbol};
use syntax::tokenstream::{TokenStream, TokenTree};
-use syntax::parse::token::{self, TokenKind};
+use syntax::parse::token::{self, Token};
use syntax::visit::{self, Visitor};
use syntax_pos::{DUMMY_SP, edition, Span};
fn lower_token_tree(&mut self, tree: TokenTree) -> TokenStream {
match tree {
- TokenTree::Token(span, token) => self.lower_token(token, span),
+ TokenTree::Token(token) => self.lower_token(token),
TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited(
span,
delim,
}
}
- fn lower_token(&mut self, token: TokenKind, span: Span) -> TokenStream {
- match token {
+ fn lower_token(&mut self, token: Token) -> TokenStream {
+ match token.kind {
token::Interpolated(nt) => {
- let tts = nt.to_tokenstream(&self.sess.parse_sess, span);
+ let tts = nt.to_tokenstream(&self.sess.parse_sess, token.span);
self.lower_token_stream(tts)
}
- other => TokenTree::Token(span, other).into(),
+ _ => TokenTree::Token(token).into(),
}
}
hasher: &mut StableHasher<W>) {
mem::discriminant(self).hash_stable(hcx, hasher);
match *self {
- tokenstream::TokenTree::Token(span, ref token) => {
- span.hash_stable(hcx, hasher);
- hash_token(token, hcx, hasher);
+ tokenstream::TokenTree::Token(ref token) => {
+ token.hash_stable(hcx, hasher);
}
tokenstream::TokenTree::Delimited(span, delim, ref tts) => {
span.hash_stable(hcx, hasher);
suffix
});
-fn hash_token<'a, 'gcx, W: StableHasherResult>(
- token: &token::TokenKind,
- hcx: &mut StableHashingContext<'a>,
- hasher: &mut StableHasher<W>,
-) {
- mem::discriminant(token).hash_stable(hcx, hasher);
- match *token {
- token::Eq |
- token::Lt |
- token::Le |
- token::EqEq |
- token::Ne |
- token::Ge |
- token::Gt |
- token::AndAnd |
- token::OrOr |
- token::Not |
- token::Tilde |
- token::At |
- token::Dot |
- token::DotDot |
- token::DotDotDot |
- token::DotDotEq |
- token::Comma |
- token::Semi |
- token::Colon |
- token::ModSep |
- token::RArrow |
- token::LArrow |
- token::FatArrow |
- token::Pound |
- token::Dollar |
- token::Question |
- token::SingleQuote |
- token::Whitespace |
- token::Comment |
- token::Eof => {}
-
- token::BinOp(bin_op_token) |
- token::BinOpEq(bin_op_token) => {
- std_hash::Hash::hash(&bin_op_token, hasher);
- }
+impl<'a> HashStable<StableHashingContext<'a>> for token::TokenKind {
+ fn hash_stable<W: StableHasherResult>(&self,
+ hcx: &mut StableHashingContext<'a>,
+ hasher: &mut StableHasher<W>) {
+ mem::discriminant(self).hash_stable(hcx, hasher);
+ match *self {
+ token::Eq |
+ token::Lt |
+ token::Le |
+ token::EqEq |
+ token::Ne |
+ token::Ge |
+ token::Gt |
+ token::AndAnd |
+ token::OrOr |
+ token::Not |
+ token::Tilde |
+ token::At |
+ token::Dot |
+ token::DotDot |
+ token::DotDotDot |
+ token::DotDotEq |
+ token::Comma |
+ token::Semi |
+ token::Colon |
+ token::ModSep |
+ token::RArrow |
+ token::LArrow |
+ token::FatArrow |
+ token::Pound |
+ token::Dollar |
+ token::Question |
+ token::SingleQuote |
+ token::Whitespace |
+ token::Comment |
+ token::Eof => {}
+
+ token::BinOp(bin_op_token) |
+ token::BinOpEq(bin_op_token) => {
+ std_hash::Hash::hash(&bin_op_token, hasher);
+ }
- token::OpenDelim(delim_token) |
- token::CloseDelim(delim_token) => {
- std_hash::Hash::hash(&delim_token, hasher);
- }
- token::Literal(lit) => lit.hash_stable(hcx, hasher),
+ token::OpenDelim(delim_token) |
+ token::CloseDelim(delim_token) => {
+ std_hash::Hash::hash(&delim_token, hasher);
+ }
+ token::Literal(lit) => lit.hash_stable(hcx, hasher),
- token::Ident(ident, is_raw) => {
- ident.name.hash_stable(hcx, hasher);
- is_raw.hash_stable(hcx, hasher);
- }
- token::Lifetime(ident) => ident.name.hash_stable(hcx, hasher),
+ token::Ident(ident, is_raw) => {
+ ident.name.hash_stable(hcx, hasher);
+ is_raw.hash_stable(hcx, hasher);
+ }
+ token::Lifetime(ident) => ident.name.hash_stable(hcx, hasher),
- token::Interpolated(_) => {
- bug!("interpolated tokens should not be present in the HIR")
- }
+ token::Interpolated(_) => {
+ bug!("interpolated tokens should not be present in the HIR")
+ }
- token::DocComment(val) |
- token::Shebang(val) => val.hash_stable(hcx, hasher),
+ token::DocComment(val) |
+ token::Shebang(val) => val.hash_stable(hcx, hasher),
+ }
}
}
+impl_stable_hash_for!(struct token::Token {
+ kind,
+ span
+});
+
impl_stable_hash_for!(enum ::syntax::ast::NestedMetaItem {
MetaItem(meta_item),
Literal(lit)
fn check_tokens(&mut self, cx: &EarlyContext<'_>, tokens: TokenStream) {
for tt in tokens.into_trees() {
match tt {
- TokenTree::Token(span, tok) => match tok.ident() {
+ TokenTree::Token(token) => match token.ident() {
// only report non-raw idents
Some((ident, false)) => {
self.check_ident_token(cx, UnderMacro(true), ast::Ident {
- span: span.substitute_dummy(ident.span),
+ span: token.span.substitute_dummy(ident.span),
..ident
});
}
// reference or dereference operator or a reference or pointer type, instead of the
// bit-and or multiplication operator.
token::BinOp(token::And) | token::BinOp(token::Star)
- if self.lexer.peek().kind != token::Whitespace => Class::RefKeyWord,
+ if self.lexer.peek() != token::Whitespace => Class::RefKeyWord,
// Consider this as part of a macro invocation if there was a
// leading identifier.
sym::Option | sym::Result => Class::PreludeTy,
sym::Some | sym::None | sym::Ok | sym::Err => Class::PreludeVal,
- _ if token.kind.is_reserved_ident() => Class::KeyWord,
+ _ if token.is_reserved_ident() => Class::KeyWord,
_ => {
if self.in_macro_nonterminal {
use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
use crate::parse::parser::Parser;
use crate::parse::{self, ParseSess, PResult};
-use crate::parse::token::{self, TokenKind};
+use crate::parse::token::{self, Token, TokenKind};
use crate::ptr::P;
use crate::symbol::{sym, Symbol};
use crate::ThinVec;
let mod_sep_span = Span::new(last_pos,
segment.ident.span.lo(),
segment.ident.span.ctxt());
- idents.push(TokenTree::Token(mod_sep_span, token::ModSep).into());
+ idents.push(TokenTree::token(mod_sep_span, token::ModSep).into());
}
- idents.push(TokenTree::Token(segment.ident.span,
+ idents.push(TokenTree::token(segment.ident.span,
TokenKind::from_ast_ident(segment.ident)).into());
last_pos = segment.ident.span.hi();
}
{
// FIXME: Share code with `parse_path`.
let path = match tokens.next() {
- Some(TokenTree::Token(span, token @ token::Ident(..))) |
- Some(TokenTree::Token(span, token @ token::ModSep)) => 'arm: {
- let mut segments = if let token::Ident(ident, _) = token {
- if let Some(TokenTree::Token(_, token::ModSep)) = tokens.peek() {
+ Some(TokenTree::Token(Token { kind: kind @ token::Ident(..), span })) |
+ Some(TokenTree::Token(Token { kind: kind @ token::ModSep, span })) => 'arm: {
+ let mut segments = if let token::Ident(ident, _) = kind {
+ if let Some(TokenTree::Token(Token { kind: token::ModSep, .. })) = tokens.peek() {
tokens.next();
vec![PathSegment::from_ident(ident.with_span_pos(span))]
} else {
vec![PathSegment::path_root(span)]
};
loop {
- if let Some(TokenTree::Token(span,
- token::Ident(ident, _))) = tokens.next() {
+ if let Some(TokenTree::Token(Token { kind: token::Ident(ident, _), span })) = tokens.next() {
segments.push(PathSegment::from_ident(ident.with_span_pos(span)));
} else {
return None;
}
- if let Some(TokenTree::Token(_, token::ModSep)) = tokens.peek() {
+ if let Some(TokenTree::Token(Token { kind: token::ModSep, .. })) = tokens.peek() {
tokens.next();
} else {
break;
let span = span.with_hi(segments.last().unwrap().ident.span.hi());
Path { span, segments }
}
- Some(TokenTree::Token(_, token::Interpolated(nt))) => match *nt {
+ Some(TokenTree::Token(Token { kind: token::Interpolated(nt), .. })) => match *nt {
token::Nonterminal::NtIdent(ident, _) => Path::from_ident(ident),
token::Nonterminal::NtMeta(ref meta) => return Some(meta.clone()),
token::Nonterminal::NtPath(ref path) => path.clone(),
match *self {
MetaItemKind::Word => TokenStream::empty(),
MetaItemKind::NameValue(ref lit) => {
- let mut vec = vec![TokenTree::Token(span, token::Eq).into()];
+ let mut vec = vec![TokenTree::token(span, token::Eq).into()];
lit.tokens().append_to_tree_and_joint_vec(&mut vec);
TokenStream::new(vec)
}
let mut tokens = Vec::new();
for (i, item) in list.iter().enumerate() {
if i > 0 {
- tokens.push(TokenTree::Token(span, token::Comma).into());
+ tokens.push(TokenTree::token(span, token::Comma).into());
}
item.tokens().append_to_tree_and_joint_vec(&mut tokens);
}
where I: Iterator<Item = TokenTree>,
{
let delimited = match tokens.peek().cloned() {
- Some(TokenTree::Token(_, token::Eq)) => {
+ Some(TokenTree::Token(token)) if token == token::Eq => {
tokens.next();
- return if let Some(TokenTree::Token(span, token)) = tokens.next() {
- Lit::from_token(&token, span).ok().map(MetaItemKind::NameValue)
+ return if let Some(TokenTree::Token(token)) = tokens.next() {
+ Lit::from_token(&token, token.span).ok().map(MetaItemKind::NameValue)
} else {
None
};
let item = NestedMetaItem::from_tokens(&mut tokens)?;
result.push(item);
match tokens.next() {
- None | Some(TokenTree::Token(_, token::Comma)) => {}
+ None | Some(TokenTree::Token(Token { kind: token::Comma, .. })) => {}
_ => return None,
}
}
fn from_tokens<I>(tokens: &mut iter::Peekable<I>) -> Option<NestedMetaItem>
where I: Iterator<Item = TokenTree>,
{
- if let Some(TokenTree::Token(span, token)) = tokens.peek().cloned() {
- if let Ok(lit) = Lit::from_token(&token, span) {
+ if let Some(TokenTree::Token(token)) = tokens.peek().cloned() {
+ if let Ok(lit) = Lit::from_token(&token, token.span) {
tokens.next();
return Some(NestedMetaItem::Literal(lit));
}
use crate::source_map;
use crate::ext::base::{ExtCtxt, MacEager, MacResult};
use crate::ext::build::AstBuilder;
-use crate::parse::token;
+use crate::parse::token::{self, Token};
use crate::ptr::P;
use crate::symbol::kw;
use crate::tokenstream::{TokenTree};
token_tree: &[TokenTree])
-> Box<dyn MacResult+'cx> {
let code = match (token_tree.len(), token_tree.get(0)) {
- (1, Some(&TokenTree::Token(_, token::Ident(code, _)))) => code,
+ (1, Some(&TokenTree::Token(Token { kind: token::Ident(code, _), .. }))) => code,
_ => unreachable!()
};
token_tree.get(1),
token_tree.get(2)
) {
- (1, Some(&TokenTree::Token(_, token::Ident(ref code, _))), None, None) => {
+ (1, Some(&TokenTree::Token(Token { kind: token::Ident(ref code, _), .. })), None, None) => {
(code, None)
},
- (3, Some(&TokenTree::Token(_, token::Ident(ref code, _))),
- Some(&TokenTree::Token(_, token::Comma)),
- Some(&TokenTree::Token(_, token::Literal(token::Lit { symbol, .. })))) => {
+ (3, Some(&TokenTree::Token(Token { kind: token::Ident(ref code, _), .. })),
+ Some(&TokenTree::Token(Token { kind: token::Comma, .. })),
+ Some(&TokenTree::Token(Token { kind: token::Literal(token::Lit { symbol, .. }), .. }))) => {
(code, Some(symbol))
}
_ => unreachable!()
let (crate_name, name) = match (&token_tree[0], &token_tree[2]) {
(
// Crate name.
- &TokenTree::Token(_, token::Ident(ref crate_name, _)),
+ &TokenTree::Token(Token { kind: token::Ident(ref crate_name, _), .. }),
// DIAGNOSTICS ident.
- &TokenTree::Token(_, token::Ident(ref name, _))
+ &TokenTree::Token(Token { kind: token::Ident(ref name, _), .. })
) => (*&crate_name, name),
_ => unreachable!()
};
impl MutVisitor for AvoidInterpolatedIdents {
fn visit_tt(&mut self, tt: &mut tokenstream::TokenTree) {
- if let tokenstream::TokenTree::Token(_, token::Interpolated(nt)) = tt {
- if let token::NtIdent(ident, is_raw) = **nt {
- *tt = tokenstream::TokenTree::Token(ident.span,
- token::Ident(ident, is_raw));
+ if let tokenstream::TokenTree::Token(token) = tt {
+ if let token::Interpolated(nt) = &token.kind {
+ if let token::NtIdent(ident, is_raw) = **nt {
+ *tt = tokenstream::TokenTree::token(ident.span,
+ token::Ident(ident, is_raw));
+ }
}
}
mut_visit::noop_visit_tt(tt, self)
}
AttrProcMacro(ref mac, ..) => {
self.gate_proc_macro_attr_item(attr.span, &item);
- let item_tok = TokenTree::Token(DUMMY_SP, token::Interpolated(Lrc::new(match item {
+ let item_tok = TokenTree::token(DUMMY_SP, token::Interpolated(Lrc::new(match item {
Annotatable::Item(item) => token::NtItem(item),
Annotatable::TraitItem(item) => token::NtTraitItem(item.into_inner()),
Annotatable::ImplItem(item) => token::NtImplItem(item.into_inner()),
use crate::ext::tt::quoted::{self, TokenTree};
use crate::parse::{Directory, ParseSess};
use crate::parse::parser::{Parser, PathStyle};
-use crate::parse::token::{self, DocComment, Nonterminal, TokenKind};
+use crate::parse::token::{self, DocComment, Nonterminal, Token, TokenKind};
use crate::print::pprust;
use crate::symbol::{kw, sym, Symbol};
use crate::tokenstream::{DelimSpan, TokenStream};
//
// At the beginning of the loop, if we reach the end of the delimited submatcher,
// we pop the stack to backtrack out of the descent.
- seq @ TokenTree::Delimited(..) | seq @ TokenTree::Token(_, DocComment(..)) => {
+ seq @ TokenTree::Delimited(..) |
+ seq @ TokenTree::Token(Token { kind: DocComment(..), .. }) => {
let lower_elts = mem::replace(&mut item.top_elts, Tt(seq));
let idx = item.idx;
item.stack.push(MatcherTtFrame {
}
// We just matched a normal token. We can just advance the parser.
- TokenTree::Token(_, ref t) if token_name_eq(t, token) => {
+ TokenTree::Token(t) if token_name_eq(&t, token) => {
item.idx += 1;
next_items.push(item);
}
use crate::feature_gate::Features;
use crate::parse::{Directory, ParseSess};
use crate::parse::parser::Parser;
-use crate::parse::token::{self, NtTT};
+use crate::parse::token::{self, Token, NtTT};
use crate::parse::token::TokenKind::*;
use crate::symbol::{Symbol, kw, sym};
use crate::tokenstream::{DelimSpan, TokenStream, TokenTree};
quoted::TokenTree::Sequence(DelimSpan::dummy(), Lrc::new(quoted::SequenceRepetition {
tts: vec![
quoted::TokenTree::MetaVarDecl(DUMMY_SP, lhs_nm, ast::Ident::from_str("tt")),
- quoted::TokenTree::Token(DUMMY_SP, token::FatArrow),
+ quoted::TokenTree::token(DUMMY_SP, token::FatArrow),
quoted::TokenTree::MetaVarDecl(DUMMY_SP, rhs_nm, ast::Ident::from_str("tt")),
],
separator: Some(if body.legacy { token::Semi } else { token::Comma }),
})),
// to phase into semicolon-termination instead of semicolon-separation
quoted::TokenTree::Sequence(DelimSpan::dummy(), Lrc::new(quoted::SequenceRepetition {
- tts: vec![quoted::TokenTree::Token(DUMMY_SP, token::Semi)],
+ tts: vec![quoted::TokenTree::token(DUMMY_SP, token::Semi)],
separator: None,
op: quoted::KleeneOp::ZeroOrMore,
num_captures: 0
if let (Some(ref sep), true) = (seq_rep.separator.clone(),
subfirst.maybe_empty) {
- first.add_one_maybe(TokenTree::Token(sp.entire(), sep.clone()));
+ first.add_one_maybe(TokenTree::token(sp.entire(), sep.clone()));
}
// Reverse scan: Sequence comes before `first`.
if let (Some(ref sep), true) = (seq_rep.separator.clone(),
subfirst.maybe_empty) {
- first.add_one_maybe(TokenTree::Token(sp.entire(), sep.clone()));
+ first.add_one_maybe(TokenTree::token(sp.entire(), sep.clone()));
}
assert!(first.maybe_empty);
let mut new;
let my_suffix = if let Some(ref u) = seq_rep.separator {
new = suffix_first.clone();
- new.add_one_maybe(TokenTree::Token(sp.entire(), u.clone()));
+ new.add_one_maybe(TokenTree::token(sp.entire(), u.clone()));
&new
} else {
&suffix_first
fn is_in_follow(tok: "ed::TokenTree, frag: &str) -> IsInFollow {
use quoted::TokenTree;
- if let TokenTree::Token(_, token::CloseDelim(_)) = *tok {
+ if let TokenTree::Token(Token { kind: token::CloseDelim(_), .. }) = *tok {
// closing a token tree can never be matched by any fragment;
// iow, we always require that `(` and `)` match, etc.
IsInFollow::Yes
},
"stmt" | "expr" => {
let tokens = vec!["`=>`", "`,`", "`;`"];
- match *tok {
- TokenTree::Token(_, ref tok) => match *tok {
+ match tok {
+ TokenTree::Token(token) => match token.kind {
FatArrow | Comma | Semi => IsInFollow::Yes,
_ => IsInFollow::No(tokens),
},
},
"pat" => {
let tokens = vec!["`=>`", "`,`", "`=`", "`|`", "`if`", "`in`"];
- match *tok {
- TokenTree::Token(_, ref tok) => match *tok {
+ match tok {
+ TokenTree::Token(token) => match token.kind {
FatArrow | Comma | Eq | BinOp(token::Or) => IsInFollow::Yes,
Ident(i, false) if i.name == kw::If ||
i.name == kw::In => IsInFollow::Yes,
"`{`", "`[`", "`=>`", "`,`", "`>`","`=`", "`:`", "`;`", "`|`", "`as`",
"`where`",
];
- match *tok {
- TokenTree::Token(_, ref tok) => match *tok {
+ match tok {
+ TokenTree::Token(token) => match token.kind {
OpenDelim(token::DelimToken::Brace) |
OpenDelim(token::DelimToken::Bracket) |
Comma | FatArrow | Colon | Eq | Gt | BinOp(token::Shr) | Semi |
"vis" => {
// Explicitly disallow `priv`, on the off chance it comes back.
let tokens = vec!["`,`", "an ident", "a type"];
- match *tok {
- TokenTree::Token(_, ref tok) => match *tok {
+ match tok {
+ TokenTree::Token(token) => match token.kind {
Comma => IsInFollow::Yes,
Ident(i, is_raw) if is_raw || i.name != kw::Priv =>
IsInFollow::Yes,
fn quoted_tt_to_string(tt: "ed::TokenTree) -> String {
match *tt {
- quoted::TokenTree::Token(_, ref tok) => crate::print::pprust::token_to_string(tok),
+ quoted::TokenTree::Token(ref token) => crate::print::pprust::token_to_string(&token),
quoted::TokenTree::MetaVar(_, name) => format!("${}", name),
quoted::TokenTree::MetaVarDecl(_, name, kind) => format!("${}:{}", name, kind),
_ => panic!("unexpected quoted::TokenTree::{{Sequence or Delimited}} \
use crate::early_buffered_lints::BufferedEarlyLintId;
use crate::ext::tt::macro_parser;
use crate::feature_gate::Features;
-use crate::parse::{token, ParseSess};
+use crate::parse::token::{self, Token, TokenKind};
+use crate::parse::ParseSess;
use crate::print::pprust;
use crate::tokenstream::{self, DelimSpan};
use crate::ast;
} else {
span.with_lo(span.lo() + BytePos(self.delim.len() as u32))
};
- TokenTree::Token(open_span, self.open_token())
+ TokenTree::token(open_span, self.open_token())
}
/// Returns a `self::TokenTree` with a `Span` corresponding to the closing delimiter.
} else {
span.with_lo(span.hi() - BytePos(self.delim.len() as u32))
};
- TokenTree::Token(close_span, self.close_token())
+ TokenTree::token(close_span, self.close_token())
}
}
/// are "first-class" token trees. Useful for parsing macros.
#[derive(Debug, Clone, PartialEq, RustcEncodable, RustcDecodable)]
pub enum TokenTree {
- Token(Span, token::TokenKind),
+ Token(Token),
Delimited(DelimSpan, Lrc<Delimited>),
/// A kleene-style repetition sequence
Sequence(DelimSpan, Lrc<SequenceRepetition>),
/// Retrieves the `TokenTree`'s span.
pub fn span(&self) -> Span {
match *self {
- TokenTree::Token(sp, _)
- | TokenTree::MetaVar(sp, _)
- | TokenTree::MetaVarDecl(sp, _, _) => sp,
- TokenTree::Delimited(sp, _)
- | TokenTree::Sequence(sp, _) => sp.entire(),
+ TokenTree::Token(Token { span, .. })
+ | TokenTree::MetaVar(span, _)
+ | TokenTree::MetaVarDecl(span, _, _) => span,
+ TokenTree::Delimited(span, _)
+ | TokenTree::Sequence(span, _) => span.entire(),
}
}
+
+ crate fn token(span: Span, kind: TokenKind) -> TokenTree {
+ TokenTree::Token(Token { kind, span })
+ }
}
/// Takes a `tokenstream::TokenStream` and returns a `Vec<self::TokenTree>`. Specifically, this
match tree {
TokenTree::MetaVar(start_sp, ident) if expect_matchers => {
let span = match trees.next() {
- Some(tokenstream::TokenTree::Token(span, token::Colon)) => match trees.next() {
- Some(tokenstream::TokenTree::Token(end_sp, ref tok)) => match tok.ident() {
+ Some(tokenstream::TokenTree::Token(Token { kind: token::Colon, span })) => match trees.next() {
+ Some(tokenstream::TokenTree::Token(token)) => match token.ident() {
Some((kind, _)) => {
- let span = end_sp.with_lo(start_sp.lo());
+ let span = token.span.with_lo(start_sp.lo());
result.push(TokenTree::MetaVarDecl(span, ident, kind));
continue;
}
- _ => end_sp,
+ _ => token.span,
},
tree => tree
.as_ref()
// Depending on what `tree` is, we could be parsing different parts of a macro
match tree {
// `tree` is a `$` token. Look at the next token in `trees`
- tokenstream::TokenTree::Token(span, token::Dollar) => match trees.next() {
+ tokenstream::TokenTree::Token(Token { kind: token::Dollar, span }) => match trees.next() {
// `tree` is followed by a delimited set of token trees. This indicates the beginning
// of a repetition sequence in the macro (e.g. `$(pat)*`).
Some(tokenstream::TokenTree::Delimited(span, delim, tts)) => {
// `tree` is followed by an `ident`. This could be `$meta_var` or the `$crate` special
// metavariable that names the crate of the invocation.
- Some(tokenstream::TokenTree::Token(ident_span, ref token)) if token.is_ident() => {
+ Some(tokenstream::TokenTree::Token(token)) if token.is_ident() => {
let (ident, is_raw) = token.ident().unwrap();
- let span = ident_span.with_lo(span.lo());
+ let span = token.span.with_lo(span.lo());
if ident.name == kw::Crate && !is_raw {
let ident = ast::Ident::new(kw::DollarCrate, ident.span);
- TokenTree::Token(span, token::Ident(ident, is_raw))
+ TokenTree::token(span, token::Ident(ident, is_raw))
} else {
TokenTree::MetaVar(span, ident)
}
}
// `tree` is followed by a random token. This is an error.
- Some(tokenstream::TokenTree::Token(span, tok)) => {
+ Some(tokenstream::TokenTree::Token(token)) => {
let msg = format!(
"expected identifier, found `{}`",
- pprust::token_to_string(&tok)
+ pprust::token_to_string(&token),
);
- sess.span_diagnostic.span_err(span, &msg);
- TokenTree::MetaVar(span, ast::Ident::invalid())
+ sess.span_diagnostic.span_err(token.span, &msg);
+ TokenTree::MetaVar(token.span, ast::Ident::invalid())
}
// There are no more tokens. Just return the `$` we already have.
- None => TokenTree::Token(span, token::Dollar),
+ None => TokenTree::token(span, token::Dollar),
},
// `tree` is an arbitrary token. Keep it.
- tokenstream::TokenTree::Token(span, tok) => TokenTree::Token(span, tok),
+ tokenstream::TokenTree::Token(token) => TokenTree::Token(token),
// `tree` is the beginning of a delimited set of tokens (e.g., `(` or `{`). We need to
// descend into the delimited set and further parse it.
/// - Ok(Ok((op, span))) if the next token tree is a KleeneOp
/// - Ok(Err(tok, span)) if the next token tree is a token but not a KleeneOp
/// - Err(span) if the next token tree is not a token
-fn parse_kleene_op<I>(
- input: &mut I,
- span: Span,
-) -> Result<Result<(KleeneOp, Span), (token::TokenKind, Span)>, Span>
+fn parse_kleene_op<I>(input: &mut I, span: Span) -> Result<Result<(KleeneOp, Span), Token>, Span>
where
I: Iterator<Item = tokenstream::TokenTree>,
{
match input.next() {
- Some(tokenstream::TokenTree::Token(span, tok)) => match kleene_op(&tok) {
- Some(op) => Ok(Ok((op, span))),
- None => Ok(Err((tok, span))),
+ Some(tokenstream::TokenTree::Token(token)) => match kleene_op(&token) {
+ Some(op) => Ok(Ok((op, token.span))),
+ None => Ok(Err(token)),
},
tree => Err(tree
.as_ref()
assert_eq!(op, KleeneOp::ZeroOrOne);
// Lookahead at #2. If it is a KleenOp, then #1 is a separator.
- let is_1_sep = if let Some(&tokenstream::TokenTree::Token(_, ref tok2)) = input.peek() {
+ let is_1_sep = if let Some(tokenstream::TokenTree::Token(tok2)) = input.peek() {
kleene_op(tok2).is_some()
} else {
false
}
// #2 is a random token (this is an error) :(
- Ok(Err((_, _))) => op1_span,
+ Ok(Err(_)) => op1_span,
// #2 is not even a token at all :(
Err(_) => op1_span,
}
// #1 is a separator followed by #2, a KleeneOp
- Ok(Err((tok, span))) => match parse_kleene_op(input, span) {
+ Ok(Err(token)) => match parse_kleene_op(input, token.span) {
// #2 is a `?`, which is not allowed as a Kleene op in 2015 edition,
// but is allowed in the 2018 edition
Ok(Ok((op, op2_span))) if op == KleeneOp::ZeroOrOne => {
}
// #2 is a KleeneOp :D
- Ok(Ok((op, _))) => return (Some(tok), op),
+ Ok(Ok((op, _))) => return (Some(token.kind), op),
// #2 is a random token :(
- Ok(Err((_, span))) => span,
+ Ok(Err(token)) => token.span,
// #2 is not a token at all :(
Err(span) => span,
Ok(Ok((op, _))) => return (None, op),
// #1 is a separator followed by #2, a KleeneOp
- Ok(Err((tok, span))) => match parse_kleene_op(input, span) {
+ Ok(Err(token)) => match parse_kleene_op(input, token.span) {
// #2 is the `?` Kleene op, which does not take a separator (error)
Ok(Ok((op, _op2_span))) if op == KleeneOp::ZeroOrOne => {
// Error!
sess.span_diagnostic.span_err(
- span,
+ token.span,
"the `?` macro repetition operator does not take a separator",
);
}
// #2 is a KleeneOp :D
- Ok(Ok((op, _))) => return (Some(tok), op),
+ Ok(Ok((op, _))) => return (Some(token.kind), op),
// #2 is a random token :(
- Ok(Err((_, span))) => span,
+ Ok(Err(token)) => token.span,
// #2 is not a token at all :(
Err(span) => span,
Some((tt, _)) => tt.span(),
None => DUMMY_SP,
};
- result.push(TokenTree::Token(prev_span, sep).into());
+ result.push(TokenTree::token(prev_span, sep).into());
}
continue;
}
result.push(tt.clone().into());
} else {
sp = sp.apply_mark(cx.current_expansion.mark);
- let token = TokenTree::Token(sp, token::Interpolated(nt.clone()));
+ let token = TokenTree::token(sp, token::Interpolated(nt.clone()));
result.push(token.into());
}
} else {
let ident =
Ident::new(ident.name, ident.span.apply_mark(cx.current_expansion.mark));
sp = sp.apply_mark(cx.current_expansion.mark);
- result.push(TokenTree::Token(sp, token::Dollar).into());
- result.push(TokenTree::Token(sp, token::TokenKind::from_ast_ident(ident)).into());
+ result.push(TokenTree::token(sp, token::Dollar).into());
+ result.push(TokenTree::token(sp, token::TokenKind::from_ast_ident(ident)).into());
}
}
// Nothing much to do here. Just push the token to the result, being careful to
// preserve syntax context.
- quoted::TokenTree::Token(sp, tok) => {
+ quoted::TokenTree::Token(token) => {
let mut marker = Marker(cx.current_expansion.mark);
- let mut tt = TokenTree::Token(sp, tok);
+ let mut tt = TokenTree::Token(token);
noop_visit_tt(&mut tt, &mut marker);
result.push(tt.into());
}
name,
template
),
- None => if let Some(TokenTree::Token(_, token::Eq)) = attr.tokens.trees().next() {
- // All key-value attributes are restricted to meta-item syntax.
- attr.parse_meta(self.context.parse_sess).map_err(|mut err| err.emit()).ok();
+ None => if let Some(TokenTree::Token(token)) = attr.tokens.trees().next() {
+ if token == token::Eq {
+ // All key-value attributes are restricted to meta-item syntax.
+ attr.parse_meta(self.context.parse_sess).map_err(|mut err| err.emit()).ok();
+ }
}
}
}
#![deny(rust_2018_idioms)]
#![deny(internal)]
+#![feature(bind_by_move_pattern_guards)]
#![feature(crate_visibility_modifier)]
#![feature(label_break_value)]
#![feature(nll)]
use crate::ast::*;
use crate::source_map::{Spanned, respan};
-use crate::parse::token::{self, TokenKind};
+use crate::parse::token::{self, Token, TokenKind};
use crate::ptr::P;
use crate::ThinVec;
use crate::tokenstream::*;
pub fn noop_visit_tt<T: MutVisitor>(tt: &mut TokenTree, vis: &mut T) {
match tt {
- TokenTree::Token(span, tok) => {
+ TokenTree::Token(Token { kind, span }) => {
+ vis.visit_token(kind);
vis.visit_span(span);
- vis.visit_token(tok);
}
TokenTree::Delimited(DelimSpan { open, close }, _delim, tts) => {
vis.visit_span(open);
self.check(&token::OpenDelim(DelimToken::Brace)) {
self.parse_token_tree().into()
} else if self.eat(&token::Eq) {
- let eq = TokenTree::Token(self.prev_span, token::Eq);
+ let eq = TokenTree::token(self.prev_span, token::Eq);
let mut is_interpolated_expr = false;
if let token::Interpolated(nt) = &self.token {
if let token::NtExpr(..) = **nt {
"/* my source file */ fn main() { println!(\"zebra\"); }\n"
.to_string());
let id = Ident::from_str("fn");
- assert_eq!(string_reader.next_token().kind, token::Comment);
- assert_eq!(string_reader.next_token().kind, token::Whitespace);
+ assert_eq!(string_reader.next_token(), token::Comment);
+ assert_eq!(string_reader.next_token(), token::Whitespace);
let tok1 = string_reader.next_token();
let tok2 = Token {
kind: token::Ident(id, false),
};
assert_eq!(tok1.kind, tok2.kind);
assert_eq!(tok1.span, tok2.span);
- assert_eq!(string_reader.next_token().kind, token::Whitespace);
+ assert_eq!(string_reader.next_token(), token::Whitespace);
// the 'main' id is already read:
assert_eq!(string_reader.pos.clone(), BytePos(28));
// read another token:
// of tokens (stop checking after exhausting the expected vec)
fn check_tokenization(mut string_reader: StringReader<'_>, expected: Vec<TokenKind>) {
for expected_tok in &expected {
- assert_eq!(&string_reader.next_token().kind, expected_tok);
+ assert_eq!(&string_reader.next_token(), expected_tok);
}
}
with_default_globals(|| {
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
- assert_eq!(setup(&sm, &sh, "'a'".to_string()).next_token().kind,
+ assert_eq!(setup(&sm, &sh, "'a'".to_string()).next_token(),
mk_lit(token::Char, "a", None));
})
}
with_default_globals(|| {
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
- assert_eq!(setup(&sm, &sh, "' '".to_string()).next_token().kind,
+ assert_eq!(setup(&sm, &sh, "' '".to_string()).next_token(),
mk_lit(token::Char, " ", None));
})
}
with_default_globals(|| {
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
- assert_eq!(setup(&sm, &sh, "'\\n'".to_string()).next_token().kind,
+ assert_eq!(setup(&sm, &sh, "'\\n'".to_string()).next_token(),
mk_lit(token::Char, "\\n", None));
})
}
with_default_globals(|| {
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
- assert_eq!(setup(&sm, &sh, "'abc".to_string()).next_token().kind,
+ assert_eq!(setup(&sm, &sh, "'abc".to_string()).next_token(),
token::Lifetime(Ident::from_str("'abc")));
})
}
with_default_globals(|| {
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
- assert_eq!(setup(&sm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string()).next_token().kind,
+ assert_eq!(setup(&sm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string()).next_token(),
mk_lit(token::StrRaw(3), "\"#a\\b\x00c\"", None));
})
}
let sh = mk_sess(sm.clone());
macro_rules! test {
($input: expr, $tok_type: ident, $tok_contents: expr) => {{
- assert_eq!(setup(&sm, &sh, format!("{}suffix", $input)).next_token().kind,
+ assert_eq!(setup(&sm, &sh, format!("{}suffix", $input)).next_token(),
mk_lit(token::$tok_type, $tok_contents, Some("suffix")));
// with a whitespace separator:
- assert_eq!(setup(&sm, &sh, format!("{} suffix", $input)).next_token().kind,
+ assert_eq!(setup(&sm, &sh, format!("{} suffix", $input)).next_token(),
mk_lit(token::$tok_type, $tok_contents, None));
}}
}
test!("1.0", Float, "1.0");
test!("1.0e10", Float, "1.0e10");
- assert_eq!(setup(&sm, &sh, "2us".to_string()).next_token().kind,
+ assert_eq!(setup(&sm, &sh, "2us".to_string()).next_token(),
mk_lit(token::Integer, "2", Some("us")));
- assert_eq!(setup(&sm, &sh, "r###\"raw\"###suffix".to_string()).next_token().kind,
+ assert_eq!(setup(&sm, &sh, "r###\"raw\"###suffix".to_string()).next_token(),
mk_lit(token::StrRaw(3), "raw", Some("suffix")));
- assert_eq!(setup(&sm, &sh, "br###\"raw\"###suffix".to_string()).next_token().kind,
+ assert_eq!(setup(&sm, &sh, "br###\"raw\"###suffix".to_string()).next_token(),
mk_lit(token::ByteStrRaw(3), "raw", Some("suffix")));
})
}
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
let mut lexer = setup(&sm, &sh, "/* /* */ */'a'".to_string());
- match lexer.next_token().kind {
- token::Comment => {}
- _ => panic!("expected a comment!"),
- }
- assert_eq!(lexer.next_token().kind, mk_lit(token::Char, "a", None));
+ assert_eq!(lexer.next_token(), token::Comment);
+ assert_eq!(lexer.next_token(), mk_lit(token::Char, "a", None));
})
}
let comment = lexer.next_token();
assert_eq!(comment.kind, token::Comment);
assert_eq!((comment.span.lo(), comment.span.hi()), (BytePos(0), BytePos(7)));
- assert_eq!(lexer.next_token().kind, token::Whitespace);
- assert_eq!(lexer.next_token().kind,
- token::DocComment(Symbol::intern("/// test")));
+ assert_eq!(lexer.next_token(), token::Whitespace);
+ assert_eq!(lexer.next_token(), token::DocComment(Symbol::intern("/// test")));
})
}
}
Err(err)
},
_ => {
- let tt = TokenTree::Token(self.span, self.token.clone());
+ let tt = TokenTree::token(self.span, self.token.clone());
// Note that testing for joint-ness here is done via the raw
// source span as the joint-ness is a property of the raw source
// rather than wanting to take `override_span` into account.
token::Bool => token::Ident(Ident::new(self.token.symbol, self.span), false),
_ => token::Literal(self.token),
};
- TokenTree::Token(self.span, token).into()
+ TokenTree::token(self.span, token).into()
}
}
use crate::ast::{self, Ident, PatKind};
use crate::attr::first_attr_value_str_by_name;
use crate::ptr::P;
+ use crate::parse::token::Token;
use crate::print::pprust::item_to_string;
use crate::tokenstream::{DelimSpan, TokenTree};
use crate::util::parser_testing::string_to_stream;
match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) {
(
4,
- Some(&TokenTree::Token(_, token::Ident(name_macro_rules, false))),
- Some(&TokenTree::Token(_, token::Not)),
- Some(&TokenTree::Token(_, token::Ident(name_zip, false))),
+ Some(&TokenTree::Token(Token { kind: token::Ident(name_macro_rules, false), .. })),
+ Some(&TokenTree::Token(Token { kind: token::Not, .. })),
+ Some(&TokenTree::Token(Token { kind: token::Ident(name_zip, false), .. })),
Some(&TokenTree::Delimited(_, macro_delim, ref macro_tts)),
)
if name_macro_rules.name == sym::macro_rules
(
3,
Some(&TokenTree::Delimited(_, first_delim, ref first_tts)),
- Some(&TokenTree::Token(_, token::FatArrow)),
+ Some(&TokenTree::Token(Token { kind: token::FatArrow, .. })),
Some(&TokenTree::Delimited(_, second_delim, ref second_tts)),
)
if macro_delim == token::Paren => {
match (tts.len(), tts.get(0), tts.get(1)) {
(
2,
- Some(&TokenTree::Token(_, token::Dollar)),
- Some(&TokenTree::Token(_, token::Ident(ident, false))),
+ Some(&TokenTree::Token(Token { kind: token::Dollar, .. })),
+ Some(&TokenTree::Token(Token { kind: token::Ident(ident, false), .. })),
)
if first_delim == token::Paren && ident.name.as_str() == "a" => {},
_ => panic!("value 3: {:?} {:?}", first_delim, first_tts),
match (tts.len(), tts.get(0), tts.get(1)) {
(
2,
- Some(&TokenTree::Token(_, token::Dollar)),
- Some(&TokenTree::Token(_, token::Ident(ident, false))),
+ Some(&TokenTree::Token(Token { kind: token::Dollar, .. })),
+ Some(&TokenTree::Token(Token { kind: token::Ident(ident, false), .. })),
)
if second_delim == token::Paren && ident.name.as_str() == "a" => {},
_ => panic!("value 4: {:?} {:?}", second_delim, second_tts),
let tts = string_to_stream("fn a (b : i32) { b; }".to_string());
let expected = TokenStream::new(vec![
- TokenTree::Token(sp(0, 2), token::Ident(Ident::from_str("fn"), false)).into(),
- TokenTree::Token(sp(3, 4), token::Ident(Ident::from_str("a"), false)).into(),
+ TokenTree::token(sp(0, 2), token::Ident(Ident::from_str("fn"), false)).into(),
+ TokenTree::token(sp(3, 4), token::Ident(Ident::from_str("a"), false)).into(),
TokenTree::Delimited(
DelimSpan::from_pair(sp(5, 6), sp(13, 14)),
token::DelimToken::Paren,
TokenStream::new(vec![
- TokenTree::Token(sp(6, 7),
+ TokenTree::token(sp(6, 7),
token::Ident(Ident::from_str("b"), false)).into(),
- TokenTree::Token(sp(8, 9), token::Colon).into(),
- TokenTree::Token(sp(10, 13),
+ TokenTree::token(sp(8, 9), token::Colon).into(),
+ TokenTree::token(sp(10, 13),
token::Ident(Ident::from_str("i32"), false)).into(),
]).into(),
).into(),
DelimSpan::from_pair(sp(15, 16), sp(20, 21)),
token::DelimToken::Brace,
TokenStream::new(vec![
- TokenTree::Token(sp(17, 18),
+ TokenTree::token(sp(17, 18),
token::Ident(Ident::from_str("b"), false)).into(),
- TokenTree::Token(sp(18, 19), token::Semi).into(),
+ TokenTree::token(sp(18, 19), token::Semi).into(),
]).into(),
).into()
]);
}
match tree {
- TokenTree::Token(span, kind) => return Token { kind, span },
+ TokenTree::Token(token) => return token,
TokenTree::Delimited(sp, delim, tts) => {
let frame = TokenCursorFrame::new(sp, delim, &tts);
self.stack.push(mem::replace(&mut self.frame, frame));
delim_span,
token::Bracket,
[
- TokenTree::Token(sp, token::Ident(ast::Ident::with_empty_ctxt(sym::doc), false)),
- TokenTree::Token(sp, token::Eq),
- TokenTree::Token(sp, token::TokenKind::lit(
+ TokenTree::token(sp, token::Ident(ast::Ident::with_empty_ctxt(sym::doc), false)),
+ TokenTree::token(sp, token::Eq),
+ TokenTree::token(sp, token::TokenKind::lit(
token::StrRaw(num_of_hashes), Symbol::intern(&stripped), None
)),
]
delim_span,
token::NoDelim,
&if doc_comment_style(&name.as_str()) == AttrStyle::Inner {
- [TokenTree::Token(sp, token::Pound), TokenTree::Token(sp, token::Not), body]
+ [TokenTree::token(sp, token::Pound), TokenTree::token(sp, token::Not), body]
.iter().cloned().collect::<TokenStream>().into()
} else {
- [TokenTree::Token(sp, token::Pound), body]
+ [TokenTree::token(sp, token::Pound), body]
.iter().cloned().collect::<TokenStream>().into()
},
)));
f(&match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) {
Some(tree) => match tree {
- TokenTree::Token(_, tok) => tok,
+ TokenTree::Token(token) => token.kind,
TokenTree::Delimited(_, delim, _) => token::OpenDelim(delim),
},
None => token::CloseDelim(self.token_cursor.frame.delim),
}
match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) {
- Some(TokenTree::Token(span, _)) => span,
+ Some(TokenTree::Token(token)) => token.span,
Some(TokenTree::Delimited(span, ..)) => span.entire(),
None => self.look_ahead_span(dist - 1),
}
_ => {
let (token, span) = (mem::replace(&mut self.token, token::Whitespace), self.span);
self.bump();
- TokenTree::Token(span, token)
+ TokenTree::token(span, token)
}
}
}
};
TokenStream::new(vec![
args.into(),
- TokenTree::Token(token_lo.to(self.prev_span), token::FatArrow).into(),
+ TokenTree::token(token_lo.to(self.prev_span), token::FatArrow).into(),
body.into(),
])
} else {
use std::fmt;
use std::mem;
+use std::ops::Deref;
#[cfg(target_arch = "x86_64")]
use rustc_data_structures::static_assert_size;
use rustc_data_structures::sync::Lrc;
].contains(&ident.name)
}
-#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)]
+#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)]
pub enum TokenKind {
/* Expression-operator symbols. */
Eq,
#[cfg(target_arch = "x86_64")]
static_assert_size!(TokenKind, 16);
-#[derive(Clone, Debug)]
+#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)]
pub struct Token {
pub kind: TokenKind,
pub span: Span,
}
}
+// FIXME: Remove this after all necessary methods are moved from `TokenKind` to `Token`.
+impl Deref for Token {
+ type Target = TokenKind;
+ fn deref(&self) -> &Self::Target {
+ &self.kind
+ }
+}
+
#[derive(Clone, RustcEncodable, RustcDecodable)]
/// For interpolation during macro expansion.
pub enum Nonterminal {
}
Nonterminal::NtIdent(ident, is_raw) => {
let token = Ident(ident, is_raw);
- Some(TokenTree::Token(ident.span, token).into())
+ Some(TokenTree::token(ident.span, token).into())
}
Nonterminal::NtLifetime(ident) => {
let token = Lifetime(ident);
- Some(TokenTree::Token(ident.span, token).into())
+ Some(TokenTree::token(ident.span, token).into())
}
Nonterminal::NtTT(ref tt) => {
Some(tt.clone().into())
if attr.path.segments.len() == 1 && attr.path.segments[0].args.is_none() {
let ident = attr.path.segments[0].ident;
let token = Ident(ident, ident.as_str().starts_with("r#"));
- brackets.push(tokenstream::TokenTree::Token(ident.span, token));
+ brackets.push(tokenstream::TokenTree::token(ident.span, token));
// ... and for more complicated paths, fall back to a reparse hack that
// should eventually be removed.
// The span we list here for `#` and for `[ ... ]` are both wrong in
// that it encompasses more than each token, but it hopefully is "good
// enough" for now at least.
- builder.push(tokenstream::TokenTree::Token(attr.span, Pound));
+ builder.push(tokenstream::TokenTree::token(attr.span, Pound));
let delim_span = DelimSpan::from_single(attr.span);
builder.push(tokenstream::TokenTree::Delimited(
delim_span, DelimToken::Bracket, brackets.build().into()));
/// expression arguments as expressions). It can be done! I think.
fn print_tt(&mut self, tt: tokenstream::TokenTree) -> io::Result<()> {
match tt {
- TokenTree::Token(_, ref tk) => {
- self.writer().word(token_to_string(tk))?;
- match *tk {
- parse::token::DocComment(..) => {
+ TokenTree::Token(ref token) => {
+ self.writer().word(token_to_string(&token))?;
+ match token.kind {
+ token::DocComment(..) => {
self.writer().hardbreak()
}
_ => Ok(())
use crate::ext::base;
use crate::ext::tt::{macro_parser, quoted};
use crate::parse::Directory;
-use crate::parse::token::{self, DelimToken, TokenKind};
+use crate::parse::token::{self, DelimToken, Token, TokenKind};
use crate::print::pprust;
use syntax_pos::{BytePos, Mark, Span, DUMMY_SP};
#[derive(Debug, Clone, PartialEq, RustcEncodable, RustcDecodable)]
pub enum TokenTree {
/// A single token
- Token(Span, token::TokenKind),
+ Token(Token),
/// A delimited sequence of token trees
Delimited(DelimSpan, DelimToken, TokenStream),
}
#[cfg(parallel_compiler)]
fn _dummy()
where
- Span: Send + Sync,
- token::TokenKind: Send + Sync,
+ Token: Send + Sync,
DelimSpan: Send + Sync,
DelimToken: Send + Sync,
TokenStream: Send + Sync,
/// Checks if this TokenTree is equal to the other, regardless of span information.
pub fn eq_unspanned(&self, other: &TokenTree) -> bool {
match (self, other) {
- (&TokenTree::Token(_, ref tk), &TokenTree::Token(_, ref tk2)) => tk == tk2,
- (&TokenTree::Delimited(_, delim, ref tts),
- &TokenTree::Delimited(_, delim2, ref tts2)) => {
+ (TokenTree::Token(token), TokenTree::Token(token2)) => token.kind == token2.kind,
+ (TokenTree::Delimited(_, delim, tts), TokenTree::Delimited(_, delim2, tts2)) => {
delim == delim2 && tts.eq_unspanned(&tts2)
}
- (_, _) => false,
+ _ => false,
}
}
// different method.
pub fn probably_equal_for_proc_macro(&self, other: &TokenTree) -> bool {
match (self, other) {
- (&TokenTree::Token(_, ref tk), &TokenTree::Token(_, ref tk2)) => {
- tk.probably_equal_for_proc_macro(tk2)
+ (TokenTree::Token(token), TokenTree::Token(token2)) => {
+ token.probably_equal_for_proc_macro(token2)
}
- (&TokenTree::Delimited(_, delim, ref tts),
- &TokenTree::Delimited(_, delim2, ref tts2)) => {
+ (TokenTree::Delimited(_, delim, tts), TokenTree::Delimited(_, delim2, tts2)) => {
delim == delim2 && tts.probably_equal_for_proc_macro(&tts2)
}
- (_, _) => false,
+ _ => false,
}
}
/// Retrieves the TokenTree's span.
pub fn span(&self) -> Span {
- match *self {
- TokenTree::Token(sp, _) => sp,
+ match self {
+ TokenTree::Token(token) => token.span,
TokenTree::Delimited(sp, ..) => sp.entire(),
}
}
/// Modify the `TokenTree`'s span in-place.
pub fn set_span(&mut self, span: Span) {
- match *self {
- TokenTree::Token(ref mut sp, _) => *sp = span,
- TokenTree::Delimited(ref mut sp, ..) => *sp = DelimSpan::from_single(span),
+ match self {
+ TokenTree::Token(token) => token.span = span,
+ TokenTree::Delimited(dspan, ..) => *dspan = DelimSpan::from_single(span),
}
}
/// Indicates if the stream is a token that is equal to the provided token.
pub fn eq_token(&self, t: TokenKind) -> bool {
- match *self {
- TokenTree::Token(_, ref tk) => *tk == t,
+ match self {
+ TokenTree::Token(token) => *token == t,
_ => false,
}
}
TokenStream::new(vec![(self, Joint)])
}
+ pub fn token(span: Span, kind: TokenKind) -> TokenTree {
+ TokenTree::Token(Token { kind, span })
+ }
+
/// Returns the opening delimiter as a token tree.
pub fn open_tt(span: Span, delim: DelimToken) -> TokenTree {
let open_span = if span.is_dummy() {
} else {
span.with_hi(span.lo() + BytePos(delim.len() as u32))
};
- TokenTree::Token(open_span, token::OpenDelim(delim))
+ TokenTree::token(open_span, token::OpenDelim(delim))
}
/// Returns the closing delimiter as a token tree.
} else {
span.with_lo(span.hi() - BytePos(delim.len() as u32))
};
- TokenTree::Token(close_span, token::CloseDelim(delim))
+ TokenTree::token(close_span, token::CloseDelim(delim))
}
}
while let Some((pos, ts)) = iter.next() {
if let Some((_, next)) = iter.peek() {
let sp = match (&ts, &next) {
- (_, (TokenTree::Token(_, token::Comma), _)) => continue,
- ((TokenTree::Token(sp, token_left), NonJoint),
- (TokenTree::Token(_, token_right), _))
+ (_, (TokenTree::Token(Token { kind: token::Comma, .. }), _)) => continue,
+ ((TokenTree::Token(token_left), NonJoint), (TokenTree::Token(token_right), _))
if ((token_left.is_ident() && !token_left.is_reserved_ident())
|| token_left.is_lit()) &&
((token_right.is_ident() && !token_right.is_reserved_ident())
- || token_right.is_lit()) => *sp,
+ || token_right.is_lit()) => token_left.span,
((TokenTree::Delimited(sp, ..), NonJoint), _) => sp.entire(),
_ => continue,
};
let sp = sp.shrink_to_hi();
- let comma = (TokenTree::Token(sp, token::Comma), NonJoint);
+ let comma = (TokenTree::token(sp, token::Comma), NonJoint);
suggestion = Some((pos, comma, sp));
}
}
}
}
-impl From<TokenKind> for TokenStream {
- fn from(token: TokenKind) -> TokenStream {
- TokenTree::Token(DUMMY_SP, token).into()
- }
-}
-
impl<T: Into<TokenStream>> iter::FromIterator<T> for TokenStream {
fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
TokenStream::from_streams(iter.into_iter().map(Into::into).collect::<SmallVec<_>>())
// streams, making a comparison between a token stream generated from an
// AST and a token stream which was parsed into an AST more reliable.
fn semantic_tree(tree: &TokenTree) -> bool {
- match tree {
- // The pretty printer tends to add trailing commas to
- // everything, and in particular, after struct fields.
- | TokenTree::Token(_, token::Comma)
- // The pretty printer emits `NoDelim` as whitespace.
- | TokenTree::Token(_, token::OpenDelim(DelimToken::NoDelim))
- | TokenTree::Token(_, token::CloseDelim(DelimToken::NoDelim))
- // The pretty printer collapses many semicolons into one.
- | TokenTree::Token(_, token::Semi)
- // The pretty printer collapses whitespace arbitrarily and can
- // introduce whitespace from `NoDelim`.
- | TokenTree::Token(_, token::Whitespace)
- // The pretty printer can turn `$crate` into `::crate_name`
- | TokenTree::Token(_, token::ModSep) => false,
- _ => true
+ if let TokenTree::Token(token) = tree {
+ if let
+ // The pretty printer tends to add trailing commas to
+ // everything, and in particular, after struct fields.
+ | token::Comma
+ // The pretty printer emits `NoDelim` as whitespace.
+ | token::OpenDelim(DelimToken::NoDelim)
+ | token::CloseDelim(DelimToken::NoDelim)
+ // The pretty printer collapses many semicolons into one.
+ | token::Semi
+ // The pretty printer collapses whitespace arbitrarily and can
+ // introduce whitespace from `NoDelim`.
+ | token::Whitespace
+ // The pretty printer can turn `$crate` into `::crate_name`
+ | token::ModSep = token.kind {
+ return false;
+ }
}
+ true
}
let mut t1 = self.trees().filter(semantic_tree);
pub fn push<T: Into<TokenStream>>(&mut self, stream: T) {
let stream = stream.into();
let last_tree_if_joint = self.0.last().and_then(TokenStream::last_tree_if_joint);
- if let Some(TokenTree::Token(last_span, last_tok)) = last_tree_if_joint {
- if let Some((TokenTree::Token(span, tok), is_joint)) = stream.first_tree_and_joint() {
- if let Some(glued_tok) = last_tok.glue(tok) {
+ if let Some(TokenTree::Token(last_token)) = last_tree_if_joint {
+ if let Some((TokenTree::Token(token), is_joint)) = stream.first_tree_and_joint() {
+ if let Some(glued_tok) = last_token.kind.glue(token.kind) {
let last_stream = self.0.pop().unwrap();
self.push_all_but_last_tree(&last_stream);
- let glued_span = last_span.to(span);
- let glued_tt = TokenTree::Token(glued_span, glued_tok);
+ let glued_span = last_token.span.to(token.span);
+ let glued_tt = TokenTree::token(glued_span, glued_tok);
let glued_tokenstream = TokenStream::new(vec![(glued_tt, is_joint)]);
self.0.push(glued_tokenstream);
self.push_all_but_first_tree(&stream);
with_default_globals(|| {
let test0: TokenStream = Vec::<TokenTree>::new().into_iter().collect();
let test1: TokenStream =
- TokenTree::Token(sp(0, 1), token::Ident(Ident::from_str("a"), false)).into();
+ TokenTree::token(sp(0, 1), token::Ident(Ident::from_str("a"), false)).into();
let test2 = string_to_ts("foo(bar::baz)");
assert_eq!(test0.is_empty(), true);
fn test_dotdotdot() {
with_default_globals(|| {
let mut builder = TokenStreamBuilder::new();
- builder.push(TokenTree::Token(sp(0, 1), token::Dot).joint());
- builder.push(TokenTree::Token(sp(1, 2), token::Dot).joint());
- builder.push(TokenTree::Token(sp(2, 3), token::Dot));
+ builder.push(TokenTree::token(sp(0, 1), token::Dot).joint());
+ builder.push(TokenTree::token(sp(1, 2), token::Dot).joint());
+ builder.push(TokenTree::token(sp(2, 3), token::Dot));
let stream = builder.build();
assert!(stream.eq_unspanned(&string_to_ts("...")));
assert_eq!(stream.trees().count(), 1);
pub fn walk_tt<'a, V: Visitor<'a>>(visitor: &mut V, tt: TokenTree) {
match tt {
- TokenTree::Token(_, tok) => visitor.visit_token(tok),
+ TokenTree::Token(token) => visitor.visit_token(token.kind),
TokenTree::Delimited(_, _, tts) => visitor.visit_tts(tts),
}
}
use syntax::ast;
use syntax::ext::base::{self, *};
use syntax::feature_gate;
-use syntax::parse::{self, token};
+use syntax::parse;
+use syntax::parse::token::{self, Token};
use syntax::ptr::P;
use syntax::symbol::{kw, sym, Symbol};
use syntax::ast::AsmDialect;
let first_colon = tts.iter()
.position(|tt| {
match *tt {
- tokenstream::TokenTree::Token(_, token::Colon) |
- tokenstream::TokenTree::Token(_, token::ModSep) => true,
+ tokenstream::TokenTree::Token(Token { kind: token::Colon, .. }) |
+ tokenstream::TokenTree::Token(Token { kind: token::ModSep, .. }) => true,
_ => false,
}
})
let panic_call = Mac_ {
path: Path::from_ident(Ident::new(sym::panic, sp)),
tts: custom_message.unwrap_or_else(|| {
- TokenStream::from(TokenTree::Token(
+ TokenStream::from(TokenTree::token(
DUMMY_SP,
TokenKind::lit(token::Str, Symbol::intern(&format!(
"assertion failed: {}",
use syntax::ast;
use syntax::ext::base::{self, *};
use syntax::feature_gate;
-use syntax::parse::token;
+use syntax::parse::token::{self, Token};
use syntax::ptr::P;
use syntax_pos::Span;
use syntax_pos::symbol::{Symbol, sym};
for (i, e) in tts.iter().enumerate() {
if i & 1 == 1 {
match *e {
- TokenTree::Token(_, token::Comma) => {}
+ TokenTree::Token(Token { kind: token::Comma, .. }) => {}
_ => {
cx.span_err(sp, "concat_idents! expecting comma.");
return DummyResult::any(sp);
}
} else {
match *e {
- TokenTree::Token(_, token::Ident(ident, _)) =>
+ TokenTree::Token(Token { kind: token::Ident(ident, _), .. }) =>
res_str.push_str(&ident.as_str()),
_ => {
cx.span_err(sp, "concat_idents! requires ident args.");
MarkAttrs(&self.attrs).visit_item(&item);
let token = token::Interpolated(Lrc::new(token::NtItem(item)));
- let input = tokenstream::TokenTree::Token(DUMMY_SP, token).into();
+ let input = tokenstream::TokenTree::token(DUMMY_SP, token).into();
let server = proc_macro_server::Rustc::new(ecx);
let stream = match self.client.run(&EXEC_STRATEGY, server, input) {
use syntax::parse::token::*;
let joint = is_joint == Joint;
- let (span, token) = match tree {
+ let Token { kind, span } = match tree {
tokenstream::TokenTree::Delimited(span, delim, tts) => {
let delimiter = Delimiter::from_internal(delim);
return TokenTree::Group(Group {
span,
});
}
- tokenstream::TokenTree::Token(span, token) => (span, token),
+ tokenstream::TokenTree::Token(token) => token,
};
macro_rules! tt {
}};
}
- match token {
+ match kind {
Eq => op!('='),
Lt => op!('<'),
Le => op!('<', '='),
TokenKind::lit(token::Str, Symbol::intern(&escaped), None),
]
.into_iter()
- .map(|token| tokenstream::TokenTree::Token(span, token))
+ .map(|kind| tokenstream::TokenTree::token(span, kind))
.collect();
stack.push(TokenTree::Group(Group {
delimiter: Delimiter::Bracket,
}
TokenTree::Ident(self::Ident { sym, is_raw, span }) => {
let token = Ident(ast::Ident::new(sym, span), is_raw);
- return tokenstream::TokenTree::Token(span, token).into();
+ return tokenstream::TokenTree::token(span, token).into();
}
TokenTree::Literal(self::Literal {
lit: token::Lit { kind: token::Integer, symbol, suffix },
let minus = BinOp(BinOpToken::Minus);
let symbol = Symbol::intern(&symbol.as_str()[1..]);
let integer = TokenKind::lit(token::Integer, symbol, suffix);
- let a = tokenstream::TokenTree::Token(span, minus);
- let b = tokenstream::TokenTree::Token(span, integer);
+ let a = tokenstream::TokenTree::token(span, minus);
+ let b = tokenstream::TokenTree::token(span, integer);
return vec![a, b].into_iter().collect();
}
TokenTree::Literal(self::Literal {
let minus = BinOp(BinOpToken::Minus);
let symbol = Symbol::intern(&symbol.as_str()[1..]);
let float = TokenKind::lit(token::Float, symbol, suffix);
- let a = tokenstream::TokenTree::Token(span, minus);
- let b = tokenstream::TokenTree::Token(span, float);
+ let a = tokenstream::TokenTree::token(span, minus);
+ let b = tokenstream::TokenTree::token(span, float);
return vec![a, b].into_iter().collect();
}
TokenTree::Literal(self::Literal { lit, span }) => {
- return tokenstream::TokenTree::Token(span, Literal(lit)).into()
+ return tokenstream::TokenTree::token(span, Literal(lit)).into()
}
};
- let token = match ch {
+ let kind = match ch {
'=' => Eq,
'<' => Lt,
'>' => Gt,
_ => unreachable!(),
};
- let tree = tokenstream::TokenTree::Token(span, token);
+ let tree = tokenstream::TokenTree::token(span, kind);
TokenStream::new(vec![(tree, if joint { Joint } else { NonJoint })])
}
}
}
match (tt.len(), tt.first()) {
- (1, Some(&TokenTree::Token(_, ref tok))) if tok.is_keyword(kw::True) => {
+ (1, Some(TokenTree::Token(token))) if token.is_keyword(kw::True) => {
cx.set_trace_macros(true);
}
- (1, Some(&TokenTree::Token(_, ref tok))) if tok.is_keyword(kw::False) => {
+ (1, Some(TokenTree::Token(token))) if token.is_keyword(kw::False) => {
cx.set_trace_macros(false);
}
_ => cx.span_err(sp, "trace_macros! accepts only `true` or `false`"),