fn from_tokens<I>(tokens: &mut iter::Peekable<I>) -> Option<NestedMetaItem>
where I: Iterator<Item = TokenTree>,
{
- if let Some(TokenTree::Token(token)) = tokens.peek().cloned() {
- if let Ok(lit) = Lit::from_token(&token, token.span) {
+ if let Some(TokenTree::Token(token)) = tokens.peek() {
+ if let Ok(lit) = Lit::from_token(token, token.span) {
tokens.next();
return Some(NestedMetaItem::Literal(lit));
}
"ambiguity: multiple successful parses".to_string(),
);
} else {
- let span = if parser.span.is_dummy() {
- parser.span
- } else {
- sess.source_map().next_point(parser.span)
- };
return Failure(
- Token { kind: token::Eof, span },
+ Token::new(token::Eof, if parser.span.is_dummy() {
+ parser.span
+ } else {
+ sess.source_map().next_point(parser.span)
+ }),
"missing tokens in macro arguments",
);
}
// then there is a syntax error.
else if bb_items.is_empty() && next_items.is_empty() {
return Failure(
- parser.token.clone(),
+ parser.token.take(),
"no rules expected this token in macro call",
);
}
}
crate fn token(span: Span, kind: TokenKind) -> TokenTree {
- TokenTree::Token(Token { kind, span })
+ TokenTree::Token(Token::new(kind, span))
}
}
/// Returns the next token. EFFECT: advances the string_reader.
pub fn try_next_token(&mut self) -> Result<Token, ()> {
assert!(self.fatal_errs.is_empty());
- let ret_val = self.peek_token.clone();
+ let ret_val = self.peek_token.take();
self.advance_token()?;
Ok(ret_val)
}
ch: Some('\n'),
source_file,
end_src_index: src.len(),
- // dummy values; not read
- peek_token: Token { kind: token::Eof, span: syntax_pos::DUMMY_SP },
+ peek_token: Token::dummy(),
peek_span_src_raw: syntax_pos::DUMMY_SP,
src,
fatal_errs: Vec::new(),
self.peek_token = comment;
}
None => {
- if self.is_eof() {
-
- let (real, raw) = self.mk_sp_and_raw(
- self.source_file.end_pos,
- self.source_file.end_pos,
- );
- self.peek_token = Token { kind: token::Eof, span: real };
- self.peek_span_src_raw = raw;
+ let (kind, start_pos, end_pos) = if self.is_eof() {
+ (token::Eof, self.source_file.end_pos, self.source_file.end_pos)
} else {
- let start_bytepos = self.pos;
- let kind = self.next_token_inner()?;
- let (real, raw) = self.mk_sp_and_raw(start_bytepos, self.pos);
- self.peek_token = Token { kind, span: real };
- self.peek_span_src_raw = raw;
+ let start_pos = self.pos;
+ (self.next_token_inner()?, start_pos, self.pos)
};
+ let (real, raw) = self.mk_sp_and_raw(start_pos, end_pos);
+ self.peek_token = Token::new(kind, real);
+ self.peek_span_src_raw = raw;
}
}
} else {
token::Comment
};
- Some(Token { kind, span: self.mk_sp(start_bpos, self.pos) })
+ Some(Token::new(kind, self.mk_sp(start_bpos, self.pos)))
}
Some('*') => {
self.bump();
while !self.ch_is('\n') && !self.is_eof() {
self.bump();
}
- return Some(Token {
- kind: token::Shebang(self.name_from(start)),
- span: self.mk_sp(start, self.pos),
- });
+ return Some(Token::new(
+ token::Shebang(self.name_from(start)),
+ self.mk_sp(start, self.pos),
+ ));
}
}
None
while is_pattern_whitespace(self.ch) {
self.bump();
}
- let c = Some(Token {
- kind: token::Whitespace,
- span: self.mk_sp(start_bpos, self.pos),
- });
+ let c = Some(Token::new(token::Whitespace, self.mk_sp(start_bpos, self.pos)));
debug!("scanning whitespace: {:?}", c);
c
}
token::Comment
};
- Some(Token {
- kind,
- span: self.mk_sp(start_bpos, self.pos),
- })
+ Some(Token::new(kind, self.mk_sp(start_bpos, self.pos)))
})
}
assert_eq!(string_reader.next_token(), token::Comment);
assert_eq!(string_reader.next_token(), token::Whitespace);
let tok1 = string_reader.next_token();
- let tok2 = Token {
- kind: token::Ident(id, false),
- span: Span::new(BytePos(21), BytePos(23), NO_EXPANSION),
- };
+ let tok2 = Token::new(
+ token::Ident(id, false),
+ Span::new(BytePos(21), BytePos(23), NO_EXPANSION),
+ );
assert_eq!(tok1.kind, tok2.kind);
assert_eq!(tok1.span, tok2.span);
assert_eq!(string_reader.next_token(), token::Whitespace);
assert_eq!(string_reader.pos.clone(), BytePos(28));
// read another token:
let tok3 = string_reader.next_token();
- let tok4 = Token {
- kind: mk_ident("main"),
- span: Span::new(BytePos(24), BytePos(28), NO_EXPANSION),
- };
+ let tok4 = Token::new(
+ mk_ident("main"),
+ Span::new(BytePos(24), BytePos(28), NO_EXPANSION),
+ );
assert_eq!(tok3.kind, tok4.kind);
assert_eq!(tok3.span, tok4.span);
// the lparen is already read:
crate fn into_token_trees(self) -> (PResult<'a, TokenStream>, Vec<UnmatchedBrace>) {
let mut tt_reader = TokenTreesReader {
string_reader: self,
- token: token::Token { kind: token::Eof, span: syntax_pos::DUMMY_SP },
+ token: Token::dummy(),
open_braces: Vec::new(),
unmatched_braces: Vec::new(),
matching_delim_spans: Vec::new(),
Err(err)
},
_ => {
- let tt = TokenTree::Token(self.token.clone());
+ let tt = TokenTree::Token(self.token.take());
// Note that testing for joint-ness here is done via the raw
// source span as the joint-ness is a property of the raw source
// rather than wanting to take `override_span` into account.
if self.span.hi() == next_span.lo() {
let s = String::from("0.") + &symbol.as_str();
let kind = TokenKind::lit(token::Float, Symbol::intern(&s), suffix);
- return Some(Token { kind, span: self.span.to(next_span) });
+ return Some(Token::new(kind, self.span.to(next_span)));
}
}
None
self.frame = frame;
continue
} else {
- return Token { kind: token::Eof, span: DUMMY_SP }
+ return Token::new(token::Eof, DUMMY_SP);
};
match self.frame.last_token {
) -> Self {
let mut parser = Parser {
sess,
- token: Token { kind: token::Whitespace, span: DUMMY_SP },
+ token: Token::dummy(),
prev_span: DUMMY_SP,
meta_var_span: None,
prev_token_kind: PrevTokenKind::Other,
// fortunately for tokens currently using `bump_with`, the
// prev_token_kind will be of no use anyway.
self.prev_token_kind = PrevTokenKind::Other;
- self.token = Token { kind: next, span };
+ self.token = Token::new(next, span);
self.expected_tokens.clear();
}
pub fn look_ahead<R, F>(&self, dist: usize, f: F) -> R where
- F: FnOnce(&token::Token) -> R,
+ F: FnOnce(&Token) -> R,
{
if dist == 0 {
// FIXME: Avoid cloning here.
f(&match frame.tree_cursor.look_ahead(dist - 1) {
Some(tree) => match tree {
TokenTree::Token(token) => token,
- TokenTree::Delimited(dspan, delim, _) => Token { kind: token::OpenDelim(delim), span: dspan.open },
+ TokenTree::Delimited(dspan, delim, _) => Token::new(token::OpenDelim(delim), dspan.open),
}
- None => Token { kind: token::CloseDelim(frame.delim), span: frame.span.close }
+ None => Token::new(token::CloseDelim(frame.delim), frame.span.close)
})
}
// Interpolated identifier and lifetime tokens are replaced with usual identifier
// and lifetime tokens, so the former are never encountered during normal parsing.
match **nt {
- token::NtIdent(ident, is_raw) => Token { kind: token::Ident(ident, is_raw), span: ident.span },
- token::NtLifetime(ident) => Token { kind: token::Lifetime(ident), span: ident.span },
+ token::NtIdent(ident, is_raw) => Token::new(token::Ident(ident, is_raw), ident.span),
+ token::NtLifetime(ident) => Token::new(token::Lifetime(ident), ident.span),
_ => return,
}
}
},
token::CloseDelim(_) | token::Eof => unreachable!(),
_ => {
- let token = mem::replace(&mut self.token, Token { kind: token::Whitespace, span: DUMMY_SP });
+ let token = self.token.take();
self.bump();
TokenTree::Token(token)
}
// `not` is just an ordinary identifier in Rust-the-language,
// but as `rustc`-the-compiler, we can issue clever diagnostics
// for confused users who really want to say `!`
- let token_cannot_continue_expr = |t: &token::Token| match t.kind {
+ let token_cannot_continue_expr = |t: &Token| match t.kind {
// These tokens can start an expression after `!`, but
// can't continue an expression after an ident
token::Ident(ident, is_raw) => token::ident_can_begin_expr(ident, is_raw),
use crate::tokenstream::{self, DelimSpan, TokenStream, TokenTree};
use syntax_pos::symbol::{self, Symbol};
-use syntax_pos::{self, Span, FileName};
+use syntax_pos::{self, Span, FileName, DUMMY_SP};
use log::info;
use std::fmt;
}
}
+impl Token {
+ crate fn new(kind: TokenKind, span: Span) -> Self {
+ Token { kind, span }
+ }
+
+ /// Some token that will be thrown away later.
+ crate fn dummy() -> Self {
+ Token::new(TokenKind::Whitespace, DUMMY_SP)
+ }
+
+ /// Return this token by value and leave a dummy token in its place.
+ crate fn take(&mut self) -> Self {
+ mem::replace(self, Token::dummy())
+ }
+}
+
impl PartialEq<TokenKind> for Token {
fn eq(&self, rhs: &TokenKind) -> bool {
self.kind == *rhs
}
pub fn token(span: Span, kind: TokenKind) -> TokenTree {
- TokenTree::Token(Token { kind, span })
+ TokenTree::Token(Token::new(kind, span))
}
/// Returns the opening delimiter as a token tree.