use syntax::source_map::{self, respan, CompilerDesugaringKind, Spanned};
use syntax::std_inject;
use syntax::symbol::{keywords, Symbol};
-use syntax::tokenstream::{Delimited, TokenStream, TokenTree};
+use syntax::tokenstream::{TokenStream, TokenTree};
use syntax::parse::token::Token;
use syntax::visit::{self, Visitor};
use syntax_pos::{Span, MultiSpan};
fn lower_token_tree(&mut self, tree: TokenTree) -> TokenStream {
match tree {
TokenTree::Token(span, token) => self.lower_token(token, span),
- TokenTree::Delimited(span, delimited) => TokenTree::Delimited(
+ TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited(
span,
- Delimited {
- delim: delimited.delim,
- tts: self.lower_token_stream(delimited.tts.into()).into(),
- },
+ delim,
+ self.lower_token_stream(tts.into()).into(),
).into(),
}
}
span.hash_stable(hcx, hasher);
hash_token(token, hcx, hasher);
}
- tokenstream::TokenTree::Delimited(span, ref delimited) => {
+ tokenstream::TokenTree::Delimited(span, delim, ref tts) => {
span.hash_stable(hcx, hasher);
- std_hash::Hash::hash(&delimited.delim, hasher);
- for sub_tt in delimited.stream().trees() {
+ std_hash::Hash::hash(&delim, hasher);
+ for sub_tt in tts.stream().trees() {
sub_tt.hash_stable(hcx, hasher);
}
}
}
_ => {},
}
- TokenTree::Delimited(_, ref delim) => {
- self.check_tokens(cx, delim.tts.clone().into())
+ TokenTree::Delimited(_, _, tts) => {
+ self.check_tokens(cx, tts.stream())
},
}
}
impl Mac_ {
pub fn stream(&self) -> TokenStream {
- self.tts.clone().into()
+ self.tts.stream()
}
}
use ptr::P;
use symbol::Symbol;
use ThinVec;
-use tokenstream::{TokenStream, TokenTree, Delimited, DelimSpan};
+use tokenstream::{TokenStream, TokenTree, DelimSpan};
use GLOBALS;
use std::iter;
}
tokens.push(item.node.tokens());
}
- TokenTree::Delimited(DelimSpan::from_single(span), Delimited {
- delim: token::Paren,
- tts: TokenStream::concat(tokens).into(),
- }).into()
+ TokenTree::Delimited(
+ DelimSpan::from_single(span),
+ token::Paren,
+ TokenStream::concat(tokens).into(),
+ ).into()
}
}
}
None
};
}
- Some(TokenTree::Delimited(_, ref delimited)) if delimited.delim == token::Paren => {
+ Some(TokenTree::Delimited(_, delim, ref tts)) if delim == token::Paren => {
tokens.next();
- delimited.stream()
+ tts.stream()
}
_ => return Some(MetaItemKind::Word),
};
fn extract_proc_macro_attr_input(&self, tokens: TokenStream, span: Span) -> TokenStream {
let mut trees = tokens.trees();
match trees.next() {
- Some(TokenTree::Delimited(_, delim)) => {
+ Some(TokenTree::Delimited(_, _, tts)) => {
if trees.next().is_none() {
- return delim.tts.into()
+ return tts.into()
}
}
Some(TokenTree::Token(..)) => {}
use symbol::Symbol;
use ThinVec;
- use tokenstream::{self, DelimSpan, TokenTree, TokenStream};
+ use tokenstream::{DelimSpan, TokenTree, TokenStream};
pub use parse::new_parser_from_tts;
pub use syntax_pos::{BytePos, Span, DUMMY_SP, FileName};
inner.push(self.tokens.clone());
let delim_span = DelimSpan::from_single(self.span);
- r.push(TokenTree::Delimited(delim_span, tokenstream::Delimited {
- delim: token::Bracket, tts: TokenStream::concat(inner).into()
- }));
+ r.push(TokenTree::Delimited(
+ delim_span, token::Bracket, TokenStream::concat(inner).into()
+ ));
r
}
}
impl ToTokens for () {
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
- vec![TokenTree::Delimited(DelimSpan::dummy(), tokenstream::Delimited {
- delim: token::Paren,
- tts: TokenStream::empty().into(),
- })]
+ vec![
+ TokenTree::Delimited(DelimSpan::dummy(), token::Paren, TokenStream::empty().into())
+ ]
}
}
// Replaces `Token::OpenDelim .. Token::CloseDelim` with `TokenTree::Delimited(..)`.
pub fn unflatten(tts: Vec<TokenTree>) -> Vec<TokenTree> {
- use tokenstream::Delimited;
-
let mut results = Vec::new();
let mut result = Vec::new();
let mut open_span = DUMMY_SP;
}
TokenTree::Token(span, token::CloseDelim(delim)) => {
let delim_span = DelimSpan::from_pair(open_span, span);
- let tree = TokenTree::Delimited(delim_span, Delimited {
+ let tree = TokenTree::Delimited(
+ delim_span,
delim,
- tts: result.into_iter().map(TokenStream::from).collect::<TokenStream>().into(),
- });
+ result.into_iter().map(TokenStream::from).collect::<TokenStream>().into(),
+ );
result = results.pop().unwrap();
result.push(tree);
}
vec![e_tok]);
vec![cx.stmt_expr(e_push)]
},
- TokenTree::Delimited(span, ref delimed) => {
- let mut stmts = statements_mk_tt(cx, &delimed.open_tt(span.open), false);
- stmts.extend(statements_mk_tts(cx, delimed.stream()));
- stmts.extend(statements_mk_tt(cx, &delimed.close_tt(span.close), false));
+ TokenTree::Delimited(span, delim, ref tts) => {
+ let mut stmts = statements_mk_tt(cx, &TokenTree::open_tt(span.open, delim), false);
+ stmts.extend(statements_mk_tts(cx, tts.stream()));
+ stmts.extend(statements_mk_tt(cx, &TokenTree::close_tt(span.close, delim), false));
stmts
}
}
// `tree` is a `$` token. Look at the next token in `trees`
tokenstream::TokenTree::Token(span, token::Dollar) => match trees.next() {
// `tree` is followed by a delimited set of token trees. This indicates the beginning
- // of a repetition sequence in the macro (e.g., `$(pat)*`).
- Some(tokenstream::TokenTree::Delimited(span, delimited)) => {
+ // of a repetition sequence in the macro (e.g. `$(pat)*`).
+ Some(tokenstream::TokenTree::Delimited(span, delim, tts)) => {
// Must have `(` not `{` or `[`
- if delimited.delim != token::Paren {
- let tok = pprust::token_to_string(&token::OpenDelim(delimited.delim));
+ if delim != token::Paren {
+ let tok = pprust::token_to_string(&token::OpenDelim(delim));
let msg = format!("expected `(`, found `{}`", tok);
sess.span_diagnostic.span_err(span.entire(), &msg);
}
// Parse the contents of the sequence itself
let sequence = parse(
- delimited.tts.into(),
+ tts.into(),
expect_matchers,
sess,
features,
// `tree` is the beginning of a delimited set of tokens (e.g., `(` or `{`). We need to
// descend into the delimited set and further parse it.
- tokenstream::TokenTree::Delimited(span, delimited) => TokenTree::Delimited(
+ tokenstream::TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited(
span,
Lrc::new(Delimited {
- delim: delimited.delim,
+ delim: delim,
tts: parse(
- delimited.tts.into(),
+ tts.into(),
expect_matchers,
sess,
features,
use parse::token::{self, Token, NtTT};
use smallvec::SmallVec;
use syntax_pos::DUMMY_SP;
-use tokenstream::{TokenStream, TokenTree, Delimited, DelimSpan};
+use tokenstream::{TokenStream, TokenTree, DelimSpan};
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::sync::Lrc;
if result_stack.is_empty() {
return TokenStream::concat(result);
}
- let tree = TokenTree::Delimited(span, Delimited {
- delim: forest.delim,
- tts: TokenStream::concat(result).into(),
- });
+ let tree = TokenTree::Delimited(
+ span,
+ forest.delim,
+ TokenStream::concat(result).into(),
+ );
result = result_stack.pop().unwrap();
result.push(tree.into());
}
match tt {
TokenTree::Token(span, tok) =>
TokenTree::Token(fld.new_span(span), fld.fold_token(tok)),
- TokenTree::Delimited(span, delimed) => TokenTree::Delimited(
+ TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited(
DelimSpan::from_pair(fld.new_span(span.open), fld.new_span(span.close)),
- Delimited {
- tts: fld.fold_tts(delimed.stream()).into(),
- delim: delimed.delim,
- }
+ delim,
+ fld.fold_tts(tts.stream()).into(),
),
}
}
use print::pprust::token_to_string;
use parse::lexer::StringReader;
use parse::{token, PResult};
-use tokenstream::{Delimited, DelimSpan, TokenStream, TokenTree};
+use tokenstream::{DelimSpan, TokenStream, TokenTree};
impl<'a> StringReader<'a> {
// Parse a stream of tokens into a list of `TokenTree`s, up to an `Eof`.
_ => {}
}
- Ok(TokenTree::Delimited(delim_span, Delimited {
+ Ok(TokenTree::Delimited(
+ delim_span,
delim,
- tts: tts.into(),
- }).into())
+ tts.into(),
+ ).into())
},
token::CloseDelim(_) => {
// An unexpected closing delimiter (i.e., there is no
use attr::first_attr_value_str_by_name;
use parse;
use print::pprust::item_to_string;
- use tokenstream::{self, DelimSpan, TokenTree};
+ use tokenstream::{DelimSpan, TokenTree};
use util::parser_testing::string_to_stream;
use util::parser_testing::{string_to_expr, string_to_item};
use with_globals;
Some(&TokenTree::Token(_, token::Ident(name_macro_rules, false))),
Some(&TokenTree::Token(_, token::Not)),
Some(&TokenTree::Token(_, token::Ident(name_zip, false))),
- Some(&TokenTree::Delimited(_, ref macro_delimed)),
+ Some(&TokenTree::Delimited(_, macro_delim, ref macro_tts)),
)
if name_macro_rules.name == "macro_rules"
&& name_zip.name == "zip" => {
- let tts = ¯o_delimed.stream().trees().collect::<Vec<_>>();
+ let tts = ¯o_tts.stream().trees().collect::<Vec<_>>();
match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) {
(
3,
- Some(&TokenTree::Delimited(_, ref first_delimed)),
+ Some(&TokenTree::Delimited(_, first_delim, ref first_tts)),
Some(&TokenTree::Token(_, token::FatArrow)),
- Some(&TokenTree::Delimited(_, ref second_delimed)),
+ Some(&TokenTree::Delimited(_, second_delim, ref second_tts)),
)
- if macro_delimed.delim == token::Paren => {
- let tts = &first_delimed.stream().trees().collect::<Vec<_>>();
+ if macro_delim == token::Paren => {
+ let tts = &first_tts.stream().trees().collect::<Vec<_>>();
match (tts.len(), tts.get(0), tts.get(1)) {
(
2,
Some(&TokenTree::Token(_, token::Dollar)),
Some(&TokenTree::Token(_, token::Ident(ident, false))),
)
- if first_delimed.delim == token::Paren && ident.name == "a" => {},
- _ => panic!("value 3: {:?}", *first_delimed),
+ if first_delim == token::Paren && ident.name == "a" => {},
+ _ => panic!("value 3: {:?} {:?}", first_delim, first_tts),
}
- let tts = &second_delimed.stream().trees().collect::<Vec<_>>();
+ let tts = &second_tts.stream().trees().collect::<Vec<_>>();
match (tts.len(), tts.get(0), tts.get(1)) {
(
2,
Some(&TokenTree::Token(_, token::Dollar)),
Some(&TokenTree::Token(_, token::Ident(ident, false))),
)
- if second_delimed.delim == token::Paren
- && ident.name == "a" => {},
- _ => panic!("value 4: {:?}", *second_delimed),
+ if second_delim == token::Paren && ident.name == "a" => {},
+ _ => panic!("value 4: {:?} {:?}", second_delim, second_tts),
}
},
- _ => panic!("value 2: {:?}", *macro_delimed),
+ _ => panic!("value 2: {:?} {:?}", macro_delim, macro_tts),
}
},
_ => panic!("value: {:?}",tts),
TokenTree::Token(sp(3, 4), token::Ident(Ident::from_str("a"), false)).into(),
TokenTree::Delimited(
DelimSpan::from_pair(sp(5, 6), sp(13, 14)),
- tokenstream::Delimited {
- delim: token::DelimToken::Paren,
- tts: TokenStream::concat(vec![
- TokenTree::Token(sp(6, 7),
- token::Ident(Ident::from_str("b"), false)).into(),
- TokenTree::Token(sp(8, 9), token::Colon).into(),
- TokenTree::Token(sp(10, 13),
- token::Ident(Ident::from_str("i32"), false)).into(),
- ]).into(),
- }).into(),
+ token::DelimToken::Paren,
+ TokenStream::concat(vec![
+ TokenTree::Token(sp(6, 7),
+ token::Ident(Ident::from_str("b"), false)).into(),
+ TokenTree::Token(sp(8, 9), token::Colon).into(),
+ TokenTree::Token(sp(10, 13),
+ token::Ident(Ident::from_str("i32"), false)).into(),
+ ]).into(),
+ ).into(),
TokenTree::Delimited(
DelimSpan::from_pair(sp(15, 16), sp(20, 21)),
- tokenstream::Delimited {
- delim: token::DelimToken::Brace,
- tts: TokenStream::concat(vec![
- TokenTree::Token(sp(17, 18),
- token::Ident(Ident::from_str("b"), false)).into(),
- TokenTree::Token(sp(18, 19), token::Semi).into(),
- ]).into(),
- }).into()
+ token::DelimToken::Brace,
+ TokenStream::concat(vec![
+ TokenTree::Token(sp(17, 18),
+ token::Ident(Ident::from_str("b"), false)).into(),
+ TokenTree::Token(sp(18, 19), token::Semi).into(),
+ ]).into(),
+ ).into()
]);
assert_eq!(tts, expected);
use parse::{self, SeqSep, classify, token};
use parse::lexer::TokenAndSpan;
use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
+use parse::token::DelimToken;
use parse::{new_sub_parser_from_file, ParseSess, Directory, DirectoryOwnership};
use util::parser::{AssocOp, Fixity};
use print::pprust;
use ptr::P;
use parse::PResult;
use ThinVec;
-use tokenstream::{self, Delimited, DelimSpan, ThinTokenStream, TokenTree, TokenStream};
+use tokenstream::{self, DelimSpan, ThinTokenStream, TokenTree, TokenStream};
use symbol::{Symbol, keywords};
use std::borrow::Cow;
}
impl TokenCursorFrame {
- fn new(sp: DelimSpan, delimited: &Delimited) -> Self {
+ fn new(sp: DelimSpan, delim: DelimToken, tts: &ThinTokenStream) -> Self {
TokenCursorFrame {
- delim: delimited.delim,
+ delim: delim,
span: sp,
- open_delim: delimited.delim == token::NoDelim,
- tree_cursor: delimited.stream().into_trees(),
- close_delim: delimited.delim == token::NoDelim,
+ open_delim: delim == token::NoDelim,
+ tree_cursor: tts.stream().into_trees(),
+ close_delim: delim == token::NoDelim,
last_token: LastToken::Was(None),
}
}
loop {
let tree = if !self.frame.open_delim {
self.frame.open_delim = true;
- Delimited { delim: self.frame.delim, tts: TokenStream::empty().into() }
- .open_tt(self.frame.span.open)
+ TokenTree::open_tt(self.frame.span.open, self.frame.delim)
} else if let Some(tree) = self.frame.tree_cursor.next() {
tree
} else if !self.frame.close_delim {
self.frame.close_delim = true;
- Delimited { delim: self.frame.delim, tts: TokenStream::empty().into() }
- .close_tt(self.frame.span.close)
+ TokenTree::close_tt(self.frame.span.close, self.frame.delim)
} else if let Some(frame) = self.stack.pop() {
self.frame = frame;
continue
match tree {
TokenTree::Token(sp, tok) => return TokenAndSpan { tok: tok, sp: sp },
- TokenTree::Delimited(sp, ref delimited) => {
- let frame = TokenCursorFrame::new(sp, delimited);
+ TokenTree::Delimited(sp, delim, tts) => {
+ let frame = TokenCursorFrame::new(sp, delim, &tts);
self.stack.push(mem::replace(&mut self.frame, frame));
}
}
}
let delim_span = DelimSpan::from_single(sp);
- let body = TokenTree::Delimited(delim_span, Delimited {
- delim: token::Bracket,
- tts: [TokenTree::Token(sp, token::Ident(ast::Ident::from_str("doc"), false)),
- TokenTree::Token(sp, token::Eq),
- TokenTree::Token(sp, token::Literal(
- token::StrRaw(Symbol::intern(&stripped), num_of_hashes), None))]
- .iter().cloned().collect::<TokenStream>().into(),
- });
+ let body = TokenTree::Delimited(
+ delim_span,
+ token::Bracket,
+ [TokenTree::Token(sp, token::Ident(ast::Ident::from_str("doc"), false)),
+ TokenTree::Token(sp, token::Eq),
+ TokenTree::Token(sp, token::Literal(
+ token::StrRaw(Symbol::intern(&stripped), num_of_hashes), None))
+ ]
+ .iter().cloned().collect::<TokenStream>().into(),
+ );
- self.stack.push(mem::replace(&mut self.frame, TokenCursorFrame::new(delim_span, &Delimited {
- delim: token::NoDelim,
- tts: if doc_comment_style(&name.as_str()) == AttrStyle::Inner {
+ self.stack.push(mem::replace(&mut self.frame, TokenCursorFrame::new(
+ delim_span,
+ token::NoDelim,
+ &if doc_comment_style(&name.as_str()) == AttrStyle::Inner {
[TokenTree::Token(sp, token::Pound), TokenTree::Token(sp, token::Not), body]
.iter().cloned().collect::<TokenStream>().into()
} else {
[TokenTree::Token(sp, token::Pound), body]
.iter().cloned().collect::<TokenStream>().into()
},
- })));
+ )));
self.next()
}
root_module_name: None,
expected_tokens: Vec::new(),
token_cursor: TokenCursor {
- frame: TokenCursorFrame::new(DelimSpan::dummy(), &Delimited {
- delim: token::NoDelim,
- tts: tokens.into(),
- }),
+ frame: TokenCursorFrame::new(
+ DelimSpan::dummy(),
+ token::NoDelim,
+ &tokens.into(),
+ ),
stack: Vec::new(),
},
desugar_doc_comments,
f(&match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) {
Some(tree) => match tree {
TokenTree::Token(_, tok) => tok,
- TokenTree::Delimited(_, delimited) => token::OpenDelim(delimited.delim),
+ TokenTree::Delimited(_, delim, _) => token::OpenDelim(delim),
},
None => token::CloseDelim(self.token_cursor.frame.delim),
})
match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) {
Some(TokenTree::Token(span, _)) => span,
- Some(TokenTree::Delimited(span, _)) => span.entire(),
+ Some(TokenTree::Delimited(span, ..)) => span.entire(),
None => self.look_ahead_span(dist - 1),
}
}
return Err(err)
}
};
- let delimited = match self.parse_token_tree() {
- TokenTree::Delimited(_, delimited) => delimited,
+ let tts = match self.parse_token_tree() {
+ TokenTree::Delimited(_, _, tts) => tts,
_ => unreachable!(),
};
let delim = match delim {
token::Brace => MacDelimiter::Brace,
token::NoDelim => self.bug("unexpected no delimiter"),
};
- Ok((delim, delimited.stream().into()))
+ Ok((delim, tts.stream().into()))
}
/// At the bottom (top?) of the precedence hierarchy,
self.token_cursor.stack.pop().unwrap());
self.span = frame.span.entire();
self.bump();
- TokenTree::Delimited(frame.span, Delimited {
- delim: frame.delim,
- tts: frame.tree_cursor.original_stream().into(),
- })
+ TokenTree::Delimited(
+ frame.span,
+ frame.delim,
+ frame.tree_cursor.original_stream().into(),
+ )
},
token::CloseDelim(_) | token::Eof => unreachable!(),
_ => {
let ident = self.parse_ident()?;
let tokens = if self.check(&token::OpenDelim(token::Brace)) {
match self.parse_token_tree() {
- TokenTree::Delimited(_, ref delimited) => delimited.stream(),
+ TokenTree::Delimited(_, _, tts) => tts.stream(),
_ => unreachable!(),
}
} else if self.check(&token::OpenDelim(token::Paren)) {
brackets.push(attr.tokens.clone());
- let tokens = tokenstream::Delimited {
- delim: DelimToken::Bracket,
- tts: brackets.build().into(),
- };
// The span we list here for `#` and for `[ ... ]` are both wrong in
// that it encompasses more than each token, but it hopefully is "good
// enough" for now at least.
builder.push(tokenstream::TokenTree::Token(attr.span, Pound));
let delim_span = DelimSpan::from_single(attr.span);
- builder.push(tokenstream::TokenTree::Delimited(delim_span, tokens));
+ builder.push(tokenstream::TokenTree::Delimited(
+ delim_span, DelimToken::Bracket, brackets.build().into()));
}
builder.push(tokens.clone());
Some(builder.build())
_ => Ok(())
}
}
- TokenTree::Delimited(_, ref delimed) => {
- self.writer().word(token_to_string(&delimed.open_token()))?;
+ TokenTree::Delimited(_, delim, tts) => {
+ self.writer().word(token_to_string(&token::OpenDelim(delim)))?;
self.writer().space()?;
- self.print_tts(delimed.stream())?;
+ self.print_tts(tts.stream())?;
self.writer().space()?;
- self.writer().word(token_to_string(&delimed.close_token()))
+ self.writer().word(token_to_string(&token::CloseDelim(delim)))
},
}
}
use std::borrow::Cow;
use std::{fmt, iter, mem};
-/// A delimited sequence of token trees
-#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)]
-pub struct Delimited {
- /// The type of delimiter
- pub delim: DelimToken,
- /// The delimited sequence of token trees
- pub tts: ThinTokenStream,
-}
-
-impl Delimited {
- /// Returns the opening delimiter as a token.
- pub fn open_token(&self) -> token::Token {
- token::OpenDelim(self.delim)
- }
-
- /// Returns the closing delimiter as a token.
- pub fn close_token(&self) -> token::Token {
- token::CloseDelim(self.delim)
- }
-
- /// Returns the opening delimiter as a token tree.
- pub fn open_tt(&self, span: Span) -> TokenTree {
- let open_span = if span.is_dummy() {
- span
- } else {
- span.with_hi(span.lo() + BytePos(self.delim.len() as u32))
- };
- TokenTree::Token(open_span, self.open_token())
- }
-
- /// Returns the closing delimiter as a token tree.
- pub fn close_tt(&self, span: Span) -> TokenTree {
- let close_span = if span.is_dummy() {
- span
- } else {
- span.with_lo(span.hi() - BytePos(self.delim.len() as u32))
- };
- TokenTree::Token(close_span, self.close_token())
- }
-
- /// Returns the token trees inside the delimiters.
- pub fn stream(&self) -> TokenStream {
- self.tts.clone().into()
- }
-}
-
/// When the main rust parser encounters a syntax-extension invocation, it
/// parses the arguments to the invocation as a token-tree. This is a very
/// loose structure, such that all sorts of different AST-fragments can
/// A single token
Token(Span, token::Token),
/// A delimited sequence of token trees
- Delimited(DelimSpan, Delimited),
+ Delimited(DelimSpan, DelimToken, ThinTokenStream),
}
impl TokenTree {
pub fn eq_unspanned(&self, other: &TokenTree) -> bool {
match (self, other) {
(&TokenTree::Token(_, ref tk), &TokenTree::Token(_, ref tk2)) => tk == tk2,
- (&TokenTree::Delimited(_, ref dl), &TokenTree::Delimited(_, ref dl2)) => {
- dl.delim == dl2.delim &&
- dl.stream().eq_unspanned(&dl2.stream())
+ (&TokenTree::Delimited(_, delim, ref tts),
+ &TokenTree::Delimited(_, delim2, ref tts2)) => {
+ delim == delim2 &&
+ tts.stream().eq_unspanned(&tts2.stream())
}
(_, _) => false,
}
(&TokenTree::Token(_, ref tk), &TokenTree::Token(_, ref tk2)) => {
tk.probably_equal_for_proc_macro(tk2)
}
- (&TokenTree::Delimited(_, ref dl), &TokenTree::Delimited(_, ref dl2)) => {
- dl.delim == dl2.delim &&
- dl.stream().probably_equal_for_proc_macro(&dl2.stream())
+ (&TokenTree::Delimited(_, delim, ref tts),
+ &TokenTree::Delimited(_, delim2, ref tts2)) => {
+ delim == delim2 &&
+ tts.stream().probably_equal_for_proc_macro(&tts2.stream())
}
(_, _) => false,
}
pub fn span(&self) -> Span {
match *self {
TokenTree::Token(sp, _) => sp,
- TokenTree::Delimited(sp, _) => sp.entire(),
+ TokenTree::Delimited(sp, ..) => sp.entire(),
}
}
pub fn set_span(&mut self, span: Span) {
match *self {
TokenTree::Token(ref mut sp, _) => *sp = span,
- TokenTree::Delimited(ref mut sp, _) => *sp = DelimSpan::from_single(span),
+ TokenTree::Delimited(ref mut sp, ..) => *sp = DelimSpan::from_single(span),
}
}
pub fn joint(self) -> TokenStream {
TokenStream { kind: TokenStreamKind::JointTree(self) }
}
+
+ /// Returns the opening delimiter as a token tree.
+ pub fn open_tt(span: Span, delim: DelimToken) -> TokenTree {
+ let open_span = if span.is_dummy() {
+ span
+ } else {
+ span.with_hi(span.lo() + BytePos(delim.len() as u32))
+ };
+ TokenTree::Token(open_span, token::OpenDelim(delim))
+ }
+
+ /// Returns the closing delimiter as a token tree.
+ pub fn close_tt(span: Span, delim: DelimToken) -> TokenTree {
+ let close_span = if span.is_dummy() {
+ span
+ } else {
+ span.with_lo(span.hi() - BytePos(delim.len() as u32))
+ };
+ TokenTree::Token(close_span, token::CloseDelim(delim))
+ }
}
/// # Token Streams
kind: TokenStreamKind,
}
+// `TokenStream` is used a lot. Make sure it doesn't unintentionally get bigger.
+#[cfg(target_arch = "x86_64")]
+static_assert!(MEM_SIZE_OF_TOKEN_STREAM: mem::size_of::<TokenStream>() == 40);
+
impl TokenStream {
/// Given a `TokenStream` with a `Stream` of only two arguments, return a new `TokenStream`
/// separating the two arguments with a comma for diagnostic suggestions.
continue;
}
(TokenStreamKind::Tree(TokenTree::Token(sp, _)), _) => *sp,
- (TokenStreamKind::Tree(TokenTree::Delimited(sp, _)), _) => sp.entire(),
+ (TokenStreamKind::Tree(TokenTree::Delimited(sp, ..)), _) => sp.entire(),
_ => continue,
};
let sp = sp.shrink_to_hi();
#[derive(Debug, Clone)]
pub struct ThinTokenStream(Option<RcVec<TokenStream>>);
+impl ThinTokenStream {
+ pub fn stream(&self) -> TokenStream {
+ self.clone().into()
+ }
+}
+
impl From<TokenStream> for ThinTokenStream {
fn from(stream: TokenStream) -> ThinTokenStream {
ThinTokenStream(match stream.kind {
pub fn walk_tt<'a, V: Visitor<'a>>(visitor: &mut V, tt: TokenTree) {
match tt {
TokenTree::Token(_, tok) => visitor.visit_token(tok),
- TokenTree::Delimited(_, delimed) => visitor.visit_tts(delimed.stream()),
+ TokenTree::Delimited(_, _, tts) => visitor.visit_tts(tts.stream()),
}
}
let (tree, joint) = stream.as_tree();
let (span, token) = match tree {
- tokenstream::TokenTree::Delimited(span, delimed) => {
- let delimiter = Delimiter::from_internal(delimed.delim);
+ tokenstream::TokenTree::Delimited(span, delim, tts) => {
+ let delimiter = Delimiter::from_internal(delim);
return TokenTree::Group(Group {
delimiter,
- stream: delimed.tts.into(),
+ stream: tts.into(),
span,
});
}
}) => {
return tokenstream::TokenTree::Delimited(
span,
- tokenstream::Delimited {
- delim: delimiter.to_internal(),
- tts: stream.into(),
- },
+ delimiter.to_internal(),
+ stream.into(),
)
.into();
}