pub fn token_stream_parse_items(stream: TokenStream) -> Result<Vec<P<ast::Item>>, LexError> {
with_parse_sess(move |sess| {
- let mut parser = parse::new_parser_from_ts(sess, stream.inner);
+ let mut parser = parse::stream_to_parser(sess, stream.inner);
let mut items = Vec::new();
while let Some(item) = try!(parser.parse_item().map_err(super::parse_to_lex_err)) {
__internal::with_parse_sess(|sess| {
let src = src.to_string();
let name = "<proc-macro source code>".to_string();
- let tts = parse::parse_tts_from_source_str(name, src, sess);
-
- Ok(__internal::token_stream_wrap(tts.into_iter().collect()))
+ let stream = parse::parse_stream_from_source_str(name, src, sess);
+ Ok(__internal::token_stream_wrap(stream))
})
}
}
use syntax_pos::DUMMY_SP;
use std::iter;
-use std::rc::Rc;
pub fn qquote<'cx>(stream: TokenStream) -> TokenStream {
stream.quote()
}
fn delimit(delim: token::DelimToken, stream: TokenStream) -> TokenStream {
- TokenTree::Delimited(DUMMY_SP, Rc::new(Delimited {
- delim: delim,
- tts: stream.into_trees().collect(),
- })).into()
+ TokenTree::Delimited(DUMMY_SP, Delimited { delim: delim, tts: stream.into() }).into()
}
macro_rules! quote {
}
}
-impl Quote for Vec<TokenTree> {
- fn quote(&self) -> TokenStream {
- let stream = self.iter().cloned().collect::<TokenStream>();
- quote!((quote stream).into_trees().collect::<::std::vec::Vec<_> >())
- }
-}
-
impl Quote for TokenTree {
fn quote(&self) -> TokenStream {
match *self {
}
}
-impl Quote for Rc<Delimited> {
+impl Quote for Delimited {
fn quote(&self) -> TokenStream {
- quote!(::std::rc::Rc::new(::syntax::tokenstream::Delimited {
+ quote!(::syntax::tokenstream::Delimited {
delim: (quote self.delim),
- tts: (quote self.tts),
- }))
+ tts: (quote self.stream()).into(),
+ })
}
}
use syntax::ast::{Attribute, Lit, StrStyle, FloatTy, IntTy, UintTy, MetaItem};
use syntax::ptr::P;
use syntax::symbol::{Symbol, keywords};
-use syntax::tokenstream::TokenTree;
+use syntax::tokenstream::TokenStream;
use syntax::util::ThinVec;
use std::collections::BTreeMap;
pub attrs: HirVec<Attribute>,
pub id: NodeId,
pub span: Span,
- pub body: HirVec<TokenTree>,
+ pub body: TokenStream,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
debug!("visit_macro_def: st={:?}", self.st);
SawMacroDef.hash(self.st);
hash_attrs!(self, ¯o_def.attrs);
- for tt in ¯o_def.body {
- self.hash_token_tree(tt);
+ for tt in macro_def.body.trees() {
+ self.hash_token_tree(&tt);
}
visit::walk_macro_def(self, macro_def)
}
}
tokenstream::TokenTree::Delimited(span, ref delimited) => {
hash_span!(self, span);
- let tokenstream::Delimited {
- ref delim,
- ref tts,
- } = **delimited;
-
- delim.hash(self.st);
- tts.len().hash(self.st);
- for sub_tt in tts {
- self.hash_token_tree(sub_tt);
+ delimited.delim.hash(self.st);
+ for sub_tt in delimited.stream().trees() {
+ self.hash_token_tree(&sub_tt);
}
}
}
use syntax::ast;
use syntax::attr;
-use syntax::parse::filemap_to_tts;
+use syntax::parse::filemap_to_stream;
use syntax::symbol::Symbol;
use syntax_pos::{mk_sp, Span};
use rustc::hir::svh::Svh;
let filemap = sess.parse_sess.codemap().new_filemap(source_name, None, def.body);
let local_span = mk_sp(filemap.start_pos, filemap.end_pos);
- let body = filemap_to_tts(&sess.parse_sess, filemap);
+ let body = filemap_to_stream(&sess.parse_sess, filemap);
// Mark the attrs as used
let attrs = data.get_item_attrs(id.index);
id: ast::DUMMY_NODE_ID,
span: local_span,
attrs: attrs,
- body: body,
+ body: body.into(),
})
}
/// Serialize the text of exported macros
fn encode_info_for_macro_def(&mut self, macro_def: &hir::MacroDef) -> Entry<'tcx> {
+ use syntax::print::pprust;
Entry {
kind: EntryKind::MacroDef(self.lazy(&MacroDef {
- body: ::syntax::print::pprust::tts_to_string(¯o_def.body)
+ body: pprust::tts_to_string(¯o_def.body.trees().collect::<Vec<_>>()),
})),
visibility: self.lazy(&ty::Visibility::Public),
span: self.lazy(¯o_def.span),
expansion: Cell::new(LegacyScope::Empty),
});
self.invocations.insert(mark, invocation);
- macro_rules.body = mark_tts(¯o_rules.body, mark);
+ macro_rules.body = mark_tts(macro_rules.stream(), mark).into();
let ext = Rc::new(macro_rules::compile(&self.session.parse_sess, ¯o_rules));
self.macro_map.insert(def_id, ext.clone());
ext
pub fn define_macro(&mut self, item: &ast::Item, legacy_scope: &mut LegacyScope<'a>) {
let tts = match item.node {
- ast::ItemKind::Mac(ref mac) => &mac.node.tts,
+ ast::ItemKind::Mac(ref mac) => mac.node.stream(),
_ => unreachable!(),
};
attrs: item.attrs.clone(),
id: ast::DUMMY_NODE_ID,
span: item.span,
- body: mark_tts(tts, mark),
+ body: mark_tts(tts, mark).into(),
};
*legacy_scope = LegacyScope::Binding(self.arenas.alloc_legacy_binding(LegacyBinding {
pub fn signature_string_for_span(&self, span: Span) -> String {
let mut toks = self.retokenise_span(span);
toks.real_token();
- let mut toks = toks.parse_all_token_trees().unwrap().into_iter();
+ let mut toks = toks.parse_all_token_trees().unwrap().trees();
let mut prev = toks.next().unwrap();
let first_span = prev.span();
};
// FIXME(jseyfried) merge with `self.visit_macro()`
- let matchers = def.body.chunks(4).map(|arm| arm[0].span()).collect();
+ let tts = def.stream().trees().collect::<Vec<_>>();
+ let matchers = tts.chunks(4).map(|arm| arm[0].span()).collect();
om.macros.push(Macro {
def_id: def_id,
attrs: def.attrs.clone().into(),
// convert each exported_macro into a doc item
fn visit_local_macro(&self, def: &hir::MacroDef) -> Macro {
+ let tts = def.body.trees().collect::<Vec<_>>();
// Extract the spans of all matchers. They represent the "interface" of the macro.
- let matchers = def.body.chunks(4).map(|arm| arm[0].span()).collect();
+ let matchers = tts.chunks(4).map(|arm| arm[0].span()).collect();
Macro {
def_id: self.cx.tcx.hir.local_def_id(def.id),
use print::pprust;
use ptr::P;
use symbol::{Symbol, keywords};
-use tokenstream::{TokenTree};
+use tokenstream::{ThinTokenStream, TokenStream};
use std::collections::HashSet;
use std::fmt;
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct Mac_ {
pub path: Path,
- pub tts: Vec<TokenTree>,
+ pub tts: ThinTokenStream,
+}
+
+impl Mac_ {
+ pub fn stream(&self) -> TokenStream {
+ self.tts.clone().into()
+ }
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub attrs: Vec<Attribute>,
pub id: NodeId,
pub span: Span,
- pub body: Vec<TokenTree>,
+ pub body: ThinTokenStream,
+}
+
+impl MacroDef {
+ pub fn stream(&self) -> TokenStream {
+ self.body.clone().into()
+ }
}
#[cfg(test)]
/// Represents a thing that maps token trees to Macro Results
pub trait TTMacroExpander {
- fn expand<'cx>(&self,
- ecx: &'cx mut ExtCtxt,
- span: Span,
- token_tree: &[tokenstream::TokenTree])
+ fn expand<'cx>(&self, ecx: &'cx mut ExtCtxt, span: Span, input: TokenStream)
-> Box<MacResult+'cx>;
}
-> Box<MacResult+'cx>;
impl<F> TTMacroExpander for F
- where F : for<'cx> Fn(&'cx mut ExtCtxt, Span, &[tokenstream::TokenTree])
- -> Box<MacResult+'cx>
+ where F: for<'cx> Fn(&'cx mut ExtCtxt, Span, &[tokenstream::TokenTree]) -> Box<MacResult+'cx>
{
- fn expand<'cx>(&self,
- ecx: &'cx mut ExtCtxt,
- span: Span,
- token_tree: &[tokenstream::TokenTree])
+ fn expand<'cx>(&self, ecx: &'cx mut ExtCtxt, span: Span, input: TokenStream)
-> Box<MacResult+'cx> {
- (*self)(ecx, span, token_tree)
+ (*self)(ecx, span, &input.trees().collect::<Vec<_>>())
}
}
expand::MacroExpander::new(self, true)
}
- pub fn new_parser_from_tts(&self, tts: &[tokenstream::TokenTree])
- -> parser::Parser<'a> {
- parse::tts_to_parser(self.parse_sess, tts.to_vec())
+ pub fn new_parser_from_tts(&self, tts: &[tokenstream::TokenTree]) -> parser::Parser<'a> {
+ parse::stream_to_parser(self.parse_sess, tts.iter().cloned().collect())
}
pub fn codemap(&self) -> &'a CodeMap { self.parse_sess.codemap() }
pub fn parse_sess(&self) -> &'a parse::ParseSess { self.parse_sess }
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use ast::{self, Block, Ident, Mac_, PatKind};
+use ast::{self, Block, Ident, PatKind};
use ast::{Name, MacStmtStyle, StmtKind, ItemKind};
use attr::{self, HasAttrs};
use codemap::{ExpnInfo, NameAndSpan, MacroBang, MacroAttribute};
use feature_gate::{self, Features, is_builtin_attr};
use fold;
use fold::*;
+use parse::{filemap_to_stream, ParseSess, DirectoryOwnership, PResult, token};
use parse::parser::Parser;
-use parse::token;
-use parse::{ParseSess, DirectoryOwnership, PResult, filemap_to_tts};
use print::pprust;
use ptr::P;
use std_inject;
use symbol::Symbol;
use symbol::keywords;
use syntax_pos::{self, Span, ExpnId};
-use tokenstream::{TokenTree, TokenStream};
+use tokenstream::TokenStream;
use util::small_vector::SmallVector;
use visit::Visitor;
kind.expect_from_annotatables(items)
}
SyntaxExtension::AttrProcMacro(ref mac) => {
- let attr_toks = tts_for_attr_args(&attr, &self.cx.parse_sess).into_iter().collect();
- let item_toks = tts_for_item(&item, &self.cx.parse_sess).into_iter().collect();
+ let attr_toks = stream_for_attr_args(&attr, &self.cx.parse_sess);
+ let item_toks = stream_for_item(&item, &self.cx.parse_sess);
let tok_result = mac.expand(self.cx, attr.span, attr_toks, item_toks);
self.parse_expansion(tok_result, kind, name, attr.span)
InvocationKind::Bang { mac, ident, span } => (mac, ident, span),
_ => unreachable!(),
};
- let Mac_ { path, tts, .. } = mac.node;
+ let path = &mac.node.path;
let extname = path.segments.last().unwrap().identifier.name;
let ident = ident.unwrap_or(keywords::Invalid.ident());
- let marked_tts = mark_tts(&tts, mark);
+ let marked_tts = mark_tts(mac.node.stream(), mark);
let opt_expanded = match *ext {
NormalTT(ref expandfun, exp_span, allow_internal_unstable) => {
if ident.name != keywords::Invalid.name() {
},
});
- kind.make_from(expandfun.expand(self.cx, span, &marked_tts))
+ kind.make_from(expandfun.expand(self.cx, span, marked_tts))
}
IdentTT(ref expander, tt_span, allow_internal_unstable) => {
}
});
- kind.make_from(expander.expand(self.cx, span, ident, marked_tts))
+ let input: Vec<_> = marked_tts.into_trees().collect();
+ kind.make_from(expander.expand(self.cx, span, ident, input))
}
MultiDecorator(..) | MultiModifier(..) | SyntaxExtension::AttrProcMacro(..) => {
},
});
- let toks = marked_tts.into_iter().collect();
- let tok_result = expandfun.expand(self.cx, span, toks);
+ let tok_result = expandfun.expand(self.cx, span, marked_tts);
Some(self.parse_expansion(tok_result, kind, extname, span))
}
};
// Therefore, we must use the pretty printer (yuck) to turn the AST node into a
// string, which we then re-tokenise (double yuck), but first we have to patch
// the pretty-printed string on to the end of the existing codemap (infinity-yuck).
-fn tts_for_item(item: &Annotatable, parse_sess: &ParseSess) -> Vec<TokenTree> {
+fn stream_for_item(item: &Annotatable, parse_sess: &ParseSess) -> TokenStream {
let text = match *item {
Annotatable::Item(ref i) => pprust::item_to_string(i),
Annotatable::TraitItem(ref ti) => pprust::trait_item_to_string(ti),
Annotatable::ImplItem(ref ii) => pprust::impl_item_to_string(ii),
};
- string_to_tts(text, parse_sess)
+ string_to_stream(text, parse_sess)
}
-fn tts_for_attr_args(attr: &ast::Attribute, parse_sess: &ParseSess) -> Vec<TokenTree> {
+fn stream_for_attr_args(attr: &ast::Attribute, parse_sess: &ParseSess) -> TokenStream {
use ast::MetaItemKind::*;
use print::pp::Breaks;
use print::pprust::PrintState;
let token_string = match attr.value.node {
// For `#[foo]`, an empty token
- Word => return vec![],
+ Word => return TokenStream::empty(),
// For `#[foo(bar, baz)]`, returns `(bar, baz)`
List(ref items) => pprust::to_string(|s| {
s.popen()?;
}),
};
- string_to_tts(token_string, parse_sess)
+ string_to_stream(token_string, parse_sess)
}
-fn string_to_tts(text: String, parse_sess: &ParseSess) -> Vec<TokenTree> {
+fn string_to_stream(text: String, parse_sess: &ParseSess) -> TokenStream {
let filename = String::from("<macro expansion>");
- filemap_to_tts(parse_sess, parse_sess.codemap().new_filemap(filename, None, text))
+ filemap_to_stream(parse_sess, parse_sess.codemap().new_filemap(filename, None, text))
}
impl<'a, 'b> Folder for InvocationCollector<'a, 'b> {
}
// apply a given mark to the given token trees. Used prior to expansion of a macro.
-pub fn mark_tts(tts: &[TokenTree], m: Mark) -> Vec<TokenTree> {
+pub fn mark_tts(tts: TokenStream, m: Mark) -> TokenStream {
noop_fold_tts(tts, &mut Marker{mark:m, expn_id: None})
}
use ext::base::ExtCtxt;
use ext::expand::{Expansion, ExpansionKind};
use ext::hygiene::Mark;
+use tokenstream::TokenStream;
use fold::*;
use ptr::P;
use symbol::keywords;
fn mac_placeholder() -> ast::Mac {
dummy_spanned(ast::Mac_ {
path: ast::Path { span: DUMMY_SP, segments: Vec::new() },
- tts: Vec::new(),
+ tts: TokenStream::empty().into(),
})
}
use parse::parser::{Parser, PathStyle};
use parse::token;
use ptr::P;
-use tokenstream::TokenTree;
+use tokenstream::{TokenStream, TokenTree};
/// Quasiquoting works via token trees.
use std::rc::Rc;
use symbol::Symbol;
- use tokenstream::{self, TokenTree};
+ use tokenstream::{self, TokenTree, TokenStream};
pub use parse::new_parser_from_tts;
pub use syntax_pos::{BytePos, Span, DUMMY_SP};
if self.style == ast::AttrStyle::Inner {
r.push(TokenTree::Token(self.span, token::Not));
}
- r.push(TokenTree::Delimited(self.span, Rc::new(tokenstream::Delimited {
+ r.push(TokenTree::Delimited(self.span, tokenstream::Delimited {
delim: token::Bracket,
- tts: self.value.to_tokens(cx),
- })));
+ tts: self.value.to_tokens(cx).into_iter().collect::<TokenStream>().into(),
+ }));
r
}
}
impl ToTokens for () {
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
- vec![TokenTree::Delimited(DUMMY_SP, Rc::new(tokenstream::Delimited {
+ vec![TokenTree::Delimited(DUMMY_SP, tokenstream::Delimited {
delim: token::Paren,
- tts: vec![],
- }))]
+ tts: TokenStream::empty().into(),
+ })]
}
}
}
fn parse_tts(&self, s: String) -> Vec<TokenTree> {
- parse::parse_tts_from_source_str("<quote expansion>".to_string(), s, self.parse_sess())
+ let source_name = "<quote expansion>".to_owned();
+ parse::parse_stream_from_source_str(source_name, s, self.parse_sess())
+ .into_trees().collect()
}
}
}
// Replaces `Token::OpenDelim .. Token::CloseDelim` with `TokenTree::Delimited(..)`.
pub fn unflatten(tts: Vec<TokenTree>) -> Vec<TokenTree> {
- use std::rc::Rc;
use tokenstream::Delimited;
let mut results = Vec::new();
results.push(::std::mem::replace(&mut result, Vec::new()));
}
TokenTree::Token(span, token::CloseDelim(delim)) => {
- let tree =
- TokenTree::Delimited(span, Rc::new(Delimited { delim: delim, tts: result }));
+ let tree = TokenTree::Delimited(span, Delimited {
+ delim: delim,
+ tts: result.into_iter().map(TokenStream::from).collect::<TokenStream>().into(),
+ });
result = results.pop().unwrap();
result.push(tree);
}
},
TokenTree::Delimited(span, ref delimed) => {
let mut stmts = statements_mk_tt(cx, &delimed.open_tt(span), false);
- stmts.extend(statements_mk_tts(cx, &delimed.tts));
+ stmts.extend(statements_mk_tts(cx, delimed.stream()));
stmts.extend(statements_mk_tt(cx, &delimed.close_tt(span), false));
stmts
}
vec![stmt_let_sp, stmt_let_tt]
}
-fn statements_mk_tts(cx: &ExtCtxt, tts: &[TokenTree]) -> Vec<ast::Stmt> {
+fn statements_mk_tts(cx: &ExtCtxt, tts: TokenStream) -> Vec<ast::Stmt> {
let mut ss = Vec::new();
let mut quoted = false;
- for tt in tts {
- quoted = match *tt {
+ for tt in tts.into_trees() {
+ quoted = match tt {
TokenTree::Token(_, token::Dollar) if !quoted => true,
_ => {
- ss.extend(statements_mk_tt(cx, tt, quoted));
+ ss.extend(statements_mk_tt(cx, &tt, quoted));
false
}
}
let (cx_expr, tts) = parse_arguments_to_quote(cx, tts);
let mut vector = mk_stmts_let(cx, sp);
- vector.extend(statements_mk_tts(cx, &tts[..]));
+ vector.extend(statements_mk_tts(cx, tts.iter().cloned().collect()));
vector.push(cx.stmt_expr(cx.expr_ident(sp, id_ext("tt"))));
let block = cx.expr_block(cx.block(sp, vector));
let unflatten = vec![id_ext("syntax"), id_ext("ext"), id_ext("quote"), id_ext("unflatten")];
use syntax_pos::{self, BytePos, mk_sp, Span};
use codemap::Spanned;
use errors::FatalError;
-use ext::tt::quoted;
+use ext::tt::quoted::{self, TokenTree};
use parse::{Directory, ParseSess};
use parse::parser::{PathStyle, Parser};
use parse::token::{self, DocComment, Token, Nonterminal};
use print::pprust;
use symbol::keywords;
-use tokenstream::TokenTree;
+use tokenstream::TokenStream;
use util::small_vector::SmallVector;
use std::mem;
#[derive(Clone)]
enum TokenTreeOrTokenTreeVec {
- Tt(quoted::TokenTree),
- TtSeq(Vec<quoted::TokenTree>),
+ Tt(TokenTree),
+ TtSeq(Vec<TokenTree>),
}
impl TokenTreeOrTokenTreeVec {
}
}
- fn get_tt(&self, index: usize) -> quoted::TokenTree {
+ fn get_tt(&self, index: usize) -> TokenTree {
match *self {
TtSeq(ref v) => v[index].clone(),
Tt(ref tt) => tt.get_tt(index),
pub type NamedParseResult = ParseResult<HashMap<Ident, Rc<NamedMatch>>>;
-pub fn count_names(ms: &[quoted::TokenTree]) -> usize {
- use self::quoted::TokenTree;
-
+pub fn count_names(ms: &[TokenTree]) -> usize {
ms.iter().fold(0, |count, elt| {
count + match *elt {
TokenTree::Sequence(_, ref seq) => {
})
}
-fn initial_matcher_pos(ms: Vec<quoted::TokenTree>, lo: BytePos) -> Box<MatcherPos> {
+fn initial_matcher_pos(ms: Vec<TokenTree>, lo: BytePos) -> Box<MatcherPos> {
let match_idx_hi = count_names(&ms[..]);
let matches = create_matches(match_idx_hi);
Box::new(MatcherPos {
MatchedNonterminal(Rc<Nonterminal>)
}
-fn nameize<I: Iterator<Item=Rc<NamedMatch>>>(sess: &ParseSess, ms: &[quoted::TokenTree], mut res: I)
+fn nameize<I: Iterator<Item=Rc<NamedMatch>>>(sess: &ParseSess, ms: &[TokenTree], mut res: I)
-> NamedParseResult {
- use self::quoted::TokenTree;
-
fn n_rec<I: Iterator<Item=Rc<NamedMatch>>>(sess: &ParseSess, m: &TokenTree, mut res: &mut I,
ret_val: &mut HashMap<Ident, Rc<NamedMatch>>)
-> Result<(), (syntax_pos::Span, String)> {
eof_eis: &mut SmallVector<Box<MatcherPos>>,
bb_eis: &mut SmallVector<Box<MatcherPos>>,
token: &Token,
- span: &syntax_pos::Span) -> ParseResult<()> {
- use self::quoted::TokenTree;
-
+ span: &syntax_pos::Span)
+ -> ParseResult<()> {
while let Some(mut ei) = cur_eis.pop() {
// When unzipped trees end, remove them
while ei.idx >= ei.top_elts.len() {
Success(())
}
-pub fn parse(sess: &ParseSess,
- tts: Vec<TokenTree>,
- ms: &[quoted::TokenTree],
- directory: Option<Directory>)
+pub fn parse(sess: &ParseSess, tts: TokenStream, ms: &[TokenTree], directory: Option<Directory>)
-> NamedParseResult {
- use self::quoted::TokenTree;
-
let mut parser = Parser::new(sess, tts, directory, true);
let mut cur_eis = SmallVector::one(initial_matcher_pos(ms.to_owned(), parser.span.lo));
let mut next_eis = Vec::new(); // or proceed normally
use parse::parser::Parser;
use parse::token::{self, NtTT};
use parse::token::Token::*;
-use print;
use symbol::Symbol;
-use tokenstream::TokenTree;
+use tokenstream::{TokenStream, TokenTree};
use std::collections::{HashMap};
use std::collections::hash_map::{Entry};
fn expand<'cx>(&self,
cx: &'cx mut ExtCtxt,
sp: Span,
- arg: &[TokenTree])
+ input: TokenStream)
-> Box<MacResult+'cx> {
if !self.valid {
return DummyResult::any(sp);
generic_extension(cx,
sp,
self.name,
- arg,
+ input,
&self.lhses,
&self.rhses)
}
fn generic_extension<'cx>(cx: &'cx ExtCtxt,
sp: Span,
name: ast::Ident,
- arg: &[TokenTree],
+ arg: TokenStream,
lhses: &[quoted::TokenTree],
rhses: &[quoted::TokenTree])
-> Box<MacResult+'cx> {
if cx.trace_macros() {
- println!("{}! {{ {} }}",
- name,
- print::pprust::tts_to_string(arg));
+ println!("{}! {{ {} }}", name, arg);
}
// Which arm's failure should we report? (the one furthest along)
_ => cx.span_bug(sp, "malformed macro lhs")
};
- match TokenTree::parse(cx, lhs_tt, arg) {
+ match TokenTree::parse(cx, lhs_tt, arg.clone()) {
Success(named_matches) => {
let rhs = match rhses[i] {
// ignore delimiters
];
// Parse the macro_rules! invocation
- let argument_map = match parse(sess, def.body.clone(), &argument_gram, None) {
+ let argument_map = match parse(sess, def.body.clone().into(), &argument_gram, None) {
Success(m) => m,
Failure(sp, tok) => {
let s = parse_failure_msg(tok);
s.iter().map(|m| {
if let MatchedNonterminal(ref nt) = **m {
if let NtTT(ref tt) = **nt {
- let tt = quoted::parse(&[tt.clone()], true, sess).pop().unwrap();
+ let tt = quoted::parse(tt.clone().into(), true, sess).pop().unwrap();
valid &= check_lhs_nt_follows(sess, &tt);
return tt;
}
s.iter().map(|m| {
if let MatchedNonterminal(ref nt) = **m {
if let NtTT(ref tt) = **nt {
- return quoted::parse(&[tt.clone()], false, sess).pop().unwrap();
+ return quoted::parse(tt.clone().into(), false, sess).pop().unwrap();
}
}
sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
}
}
-pub fn parse(input: &[tokenstream::TokenTree], expect_matchers: bool, sess: &ParseSess)
+pub fn parse(input: tokenstream::TokenStream, expect_matchers: bool, sess: &ParseSess)
-> Vec<TokenTree> {
let mut result = Vec::new();
- let mut trees = input.iter().cloned();
+ let mut trees = input.trees();
while let Some(tree) = trees.next() {
let tree = parse_tree(tree, &mut trees, expect_matchers, sess);
match tree {
{
match tree {
tokenstream::TokenTree::Token(span, token::Dollar) => match trees.next() {
- Some(tokenstream::TokenTree::Delimited(span, ref delimited)) => {
+ Some(tokenstream::TokenTree::Delimited(span, delimited)) => {
if delimited.delim != token::Paren {
let tok = pprust::token_to_string(&token::OpenDelim(delimited.delim));
let msg = format!("expected `(`, found `{}`", tok);
sess.span_diagnostic.span_err(span, &msg);
}
- let sequence = parse(&delimited.tts, expect_matchers, sess);
+ let sequence = parse(delimited.tts.into(), expect_matchers, sess);
let (separator, op) = parse_sep_and_kleene_op(trees, span, sess);
let name_captures = macro_parser::count_names(&sequence);
TokenTree::Sequence(span, Rc::new(SequenceRepetition {
tokenstream::TokenTree::Delimited(span, delimited) => {
TokenTree::Delimited(span, Rc::new(Delimited {
delim: delimited.delim,
- tts: parse(&delimited.tts, expect_matchers, sess),
+ tts: parse(delimited.tts.into(), expect_matchers, sess),
}))
}
}
use ext::tt::quoted;
use parse::token::{self, SubstNt, Token, NtIdent, NtTT};
use syntax_pos::{Span, DUMMY_SP};
-use tokenstream::{TokenTree, Delimited};
+use tokenstream::{TokenStream, TokenTree, Delimited};
use util::small_vector::SmallVector;
use std::rc::Rc;
pub fn transcribe(sp_diag: &Handler,
interp: Option<HashMap<Ident, Rc<NamedMatch>>>,
src: Vec<quoted::TokenTree>)
- -> Vec<TokenTree> {
+ -> TokenStream {
let mut stack = SmallVector::one(Frame::new(src));
let interpolations = interp.unwrap_or_else(HashMap::new); /* just a convenience */
let mut repeats = Vec::new();
- let mut result = Vec::new();
+ let mut result: Vec<TokenStream> = Vec::new();
let mut result_stack = Vec::new();
loop {
*idx = 0;
if let Some(sep) = sep.clone() {
// repeat same span, I guess
- let prev_span = result.last().map(TokenTree::span).unwrap_or(DUMMY_SP);
- result.push(TokenTree::Token(prev_span, sep));
+ let prev_span = match result.last() {
+ Some(stream) => stream.trees().next().unwrap().span(),
+ None => DUMMY_SP,
+ };
+ result.push(TokenTree::Token(prev_span, sep).into());
}
continue
}
}
Frame::Delimited { forest, span, .. } => {
if result_stack.is_empty() {
- return result;
+ return TokenStream::concat(result);
}
- let tree = TokenTree::Delimited(span, Rc::new(Delimited {
+ let tree = TokenTree::Delimited(span, Delimited {
delim: forest.delim,
- tts: result,
- }));
+ tts: TokenStream::concat(result).into(),
+ });
result = result_stack.pop().unwrap();
- result.push(tree);
+ result.push(tree.into());
}
}
continue
// FIXME #2887: think about span stuff here
quoted::TokenTree::Token(sp, SubstNt(ident)) => {
match lookup_cur_matched(ident, &interpolations, &repeats) {
- None => result.push(TokenTree::Token(sp, SubstNt(ident))),
+ None => result.push(TokenTree::Token(sp, SubstNt(ident)).into()),
Some(cur_matched) => if let MatchedNonterminal(ref nt) = *cur_matched {
match **nt {
// sidestep the interpolation tricks for ident because
// (a) idents can be in lots of places, so it'd be a pain
// (b) we actually can, since it's a token.
NtIdent(ref sn) => {
- result.push(TokenTree::Token(sn.span, token::Ident(sn.node)));
+ let token = TokenTree::Token(sn.span, token::Ident(sn.node));
+ result.push(token.into());
}
- NtTT(ref tt) => result.push(tt.clone()),
+ NtTT(ref tt) => result.push(tt.clone().into()),
_ => {
- // FIXME(pcwalton): Bad copy
- result.push(TokenTree::Token(sp, token::Interpolated(nt.clone())));
+ let token = TokenTree::Token(sp, token::Interpolated(nt.clone()));
+ result.push(token.into());
}
}
} else {
stack.push(Frame::Delimited { forest: delimited, idx: 0, span: span });
result_stack.push(mem::replace(&mut result, Vec::new()));
}
- quoted::TokenTree::Token(span, tok) => result.push(TokenTree::Token(span, tok)),
+ quoted::TokenTree::Token(span, tok) => result.push(TokenTree::Token(span, tok).into()),
quoted::TokenTree::MetaVarDecl(..) => panic!("unexpected `TokenTree::MetaVarDecl"),
}
}
noop_fold_ty_params(tps, self)
}
- fn fold_tt(&mut self, tt: &TokenTree) -> TokenTree {
+ fn fold_tt(&mut self, tt: TokenTree) -> TokenTree {
noop_fold_tt(tt, self)
}
- fn fold_tts(&mut self, tts: &[TokenTree]) -> Vec<TokenTree> {
+ fn fold_tts(&mut self, tts: TokenStream) -> TokenStream {
noop_fold_tts(tts, self)
}
pub fn noop_fold_mac<T: Folder>(Spanned {node, span}: Mac, fld: &mut T) -> Mac {
Spanned {
node: Mac_ {
+ tts: fld.fold_tts(node.stream()).into(),
path: fld.fold_path(node.path),
- tts: fld.fold_tts(&node.tts),
},
span: fld.new_span(span)
}
}
}
-pub fn noop_fold_tt<T: Folder>(tt: &TokenTree, fld: &mut T) -> TokenTree {
- match *tt {
- TokenTree::Token(span, ref tok) =>
- TokenTree::Token(fld.new_span(span), fld.fold_token(tok.clone())),
- TokenTree::Delimited(span, ref delimed) => {
- TokenTree::Delimited(fld.new_span(span), Rc::new(
- Delimited {
- delim: delimed.delim,
- tts: fld.fold_tts(&delimed.tts),
- }
- ))
- },
+pub fn noop_fold_tt<T: Folder>(tt: TokenTree, fld: &mut T) -> TokenTree {
+ match tt {
+ TokenTree::Token(span, tok) =>
+ TokenTree::Token(fld.new_span(span), fld.fold_token(tok)),
+ TokenTree::Delimited(span, delimed) => TokenTree::Delimited(fld.new_span(span), Delimited {
+ tts: fld.fold_tts(delimed.stream()).into(),
+ delim: delimed.delim,
+ }),
}
}
-pub fn noop_fold_tts<T: Folder>(tts: &[TokenTree], fld: &mut T) -> Vec<TokenTree> {
- tts.iter().map(|tt| fld.fold_tt(tt)).collect()
+pub fn noop_fold_tts<T: Folder>(tts: TokenStream, fld: &mut T) -> TokenStream {
+ tts.trees().map(|tt| fld.fold_tt(tt)).collect()
}
// apply ident folder if it's an ident, apply other folds to interpolated nodes
token::NtIdent(id) => token::NtIdent(Spanned::<Ident>{node: fld.fold_ident(id.node), ..id}),
token::NtMeta(meta_item) => token::NtMeta(fld.fold_meta_item(meta_item)),
token::NtPath(path) => token::NtPath(fld.fold_path(path)),
- token::NtTT(tt) => token::NtTT(fld.fold_tt(&tt)),
+ token::NtTT(tt) => token::NtTT(fld.fold_tt(tt)),
token::NtArm(arm) => token::NtArm(fld.fold_arm(arm)),
token::NtImplItem(item) =>
token::NtImplItem(fld.fold_impl_item(item)
use parse::lexer::StringReader;
use parse::{token, PResult};
use syntax_pos::Span;
-use tokenstream::{Delimited, TokenTree};
-
-use std::rc::Rc;
+use tokenstream::{Delimited, TokenStream, TokenTree};
impl<'a> StringReader<'a> {
// Parse a stream of tokens into a list of `TokenTree`s, up to an `Eof`.
- pub fn parse_all_token_trees(&mut self) -> PResult<'a, Vec<TokenTree>> {
+ pub fn parse_all_token_trees(&mut self) -> PResult<'a, TokenStream> {
let mut tts = Vec::new();
while self.token != token::Eof {
- tts.push(self.parse_token_tree()?);
+ tts.push(self.parse_token_tree()?.into());
}
- Ok(tts)
+ Ok(TokenStream::concat(tts))
}
// Parse a stream of tokens into a list of `TokenTree`s, up to a `CloseDelim`.
- fn parse_token_trees_until_close_delim(&mut self) -> Vec<TokenTree> {
+ fn parse_token_trees_until_close_delim(&mut self) -> TokenStream {
let mut tts = vec![];
loop {
if let token::CloseDelim(..) = self.token {
- return tts;
+ return TokenStream::concat(tts);
}
match self.parse_token_tree() {
- Ok(tt) => tts.push(tt),
+ Ok(tt) => tts.push(tt.into()),
Err(mut e) => {
e.emit();
- return tts;
+ return TokenStream::concat(tts);
}
}
}
_ => {}
}
- Ok(TokenTree::Delimited(span, Rc::new(Delimited {
+ Ok(TokenTree::Delimited(span, Delimited {
delim: delim,
- tts: tts,
- })))
+ tts: tts.into(),
+ }))
},
token::CloseDelim(_) => {
// An unexpected closing delimiter (i.e., there is no
use ptr::P;
use str::char_at;
use symbol::Symbol;
-use tokenstream;
+use tokenstream::{TokenStream, TokenTree};
use std::cell::RefCell;
use std::collections::HashSet;
new_parser_from_source_str(sess, name, source).parse_stmt()
}
-pub fn parse_tts_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
- -> Vec<tokenstream::TokenTree> {
- filemap_to_tts(sess, sess.codemap().new_filemap(name, None, source))
+pub fn parse_stream_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
+ -> TokenStream {
+ filemap_to_stream(sess, sess.codemap().new_filemap(name, None, source))
}
// Create a new parser from a source string
/// Given a filemap and config, return a parser
pub fn filemap_to_parser<'a>(sess: &'a ParseSess, filemap: Rc<FileMap>, ) -> Parser<'a> {
let end_pos = filemap.end_pos;
- let mut parser = tts_to_parser(sess, filemap_to_tts(sess, filemap));
+ let mut parser = stream_to_parser(sess, filemap_to_stream(sess, filemap));
if parser.token == token::Eof && parser.span == syntax_pos::DUMMY_SP {
parser.span = syntax_pos::mk_sp(end_pos, end_pos);
// must preserve old name for now, because quote! from the *existing*
// compiler expands into it
-pub fn new_parser_from_tts<'a>(sess: &'a ParseSess, tts: Vec<tokenstream::TokenTree>)
- -> Parser<'a> {
- tts_to_parser(sess, tts)
-}
-
-pub fn new_parser_from_ts<'a>(sess: &'a ParseSess, ts: tokenstream::TokenStream) -> Parser<'a> {
- tts_to_parser(sess, ts.into_trees().collect())
+pub fn new_parser_from_tts<'a>(sess: &'a ParseSess, tts: Vec<TokenTree>) -> Parser<'a> {
+ stream_to_parser(sess, tts.into_iter().collect())
}
}
/// Given a filemap, produce a sequence of token-trees
-pub fn filemap_to_tts(sess: &ParseSess, filemap: Rc<FileMap>) -> Vec<tokenstream::TokenTree> {
+pub fn filemap_to_stream(sess: &ParseSess, filemap: Rc<FileMap>) -> TokenStream {
let mut srdr = lexer::StringReader::new(sess, filemap);
srdr.real_token();
panictry!(srdr.parse_all_token_trees())
}
-/// Given tts and the ParseSess, produce a parser
-pub fn tts_to_parser<'a>(sess: &'a ParseSess, tts: Vec<tokenstream::TokenTree>) -> Parser<'a> {
- let mut p = Parser::new(sess, tts, None, false);
+/// Given stream and the ParseSess, produce a parser
+pub fn stream_to_parser<'a>(sess: &'a ParseSess, stream: TokenStream) -> Parser<'a> {
+ let mut p = Parser::new(sess, stream, None, false);
p.check_unknown_macro_variable();
p
}
#[test]
fn string_to_tts_macro () {
let tts = string_to_tts("macro_rules! zip (($a)=>($a))".to_string());
- let tts: &[tokenstream::TokenTree] = &tts[..];
+ let tts: &[TokenTree] = &tts[..];
match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) {
(
use print::pprust;
use ptr::P;
use parse::PResult;
-use tokenstream::{self, Delimited, TokenTree, TokenStream};
+use tokenstream::{self, Delimited, ThinTokenStream, TokenTree, TokenStream};
use symbol::{Symbol, keywords};
use util::ThinVec;
delim: delimited.delim,
span: sp,
open_delim: delimited.delim == token::NoDelim,
- tree_cursor: delimited.tts.iter().cloned().collect::<TokenStream>().into_trees(),
+ tree_cursor: delimited.stream().into_trees(),
close_delim: delimited.delim == token::NoDelim,
}
}
loop {
let tree = if !self.frame.open_delim {
self.frame.open_delim = true;
- Delimited { delim: self.frame.delim, tts: Vec::new() }.open_tt(self.frame.span)
+ Delimited { delim: self.frame.delim, tts: TokenStream::empty().into() }
+ .open_tt(self.frame.span)
} else if let Some(tree) = self.frame.tree_cursor.next() {
tree
} else if !self.frame.close_delim {
self.frame.close_delim = true;
- Delimited { delim: self.frame.delim, tts: Vec::new() }.close_tt(self.frame.span)
+ Delimited { delim: self.frame.delim, tts: TokenStream::empty().into() }
+ .close_tt(self.frame.span)
} else if let Some(frame) = self.stack.pop() {
self.frame = frame;
continue
num_of_hashes = cmp::max(num_of_hashes, count);
}
- let body = TokenTree::Delimited(sp, Rc::new(Delimited {
+ let body = TokenTree::Delimited(sp, Delimited {
delim: token::Bracket,
- tts: vec![TokenTree::Token(sp, token::Ident(ast::Ident::from_str("doc"))),
- TokenTree::Token(sp, token::Eq),
- TokenTree::Token(sp, token::Literal(
- token::StrRaw(Symbol::intern(&stripped), num_of_hashes), None))],
- }));
+ tts: [TokenTree::Token(sp, token::Ident(ast::Ident::from_str("doc"))),
+ TokenTree::Token(sp, token::Eq),
+ TokenTree::Token(sp, token::Literal(
+ token::StrRaw(Symbol::intern(&stripped), num_of_hashes), None))]
+ .iter().cloned().collect::<TokenStream>().into(),
+ });
self.stack.push(mem::replace(&mut self.frame, TokenCursorFrame::new(sp, &Delimited {
delim: token::NoDelim,
tts: if doc_comment_style(&name.as_str()) == AttrStyle::Inner {
[TokenTree::Token(sp, token::Pound), TokenTree::Token(sp, token::Not), body]
- .iter().cloned().collect()
+ .iter().cloned().collect::<TokenStream>().into()
} else {
- [TokenTree::Token(sp, token::Pound), body].iter().cloned().collect()
+ [TokenTree::Token(sp, token::Pound), body]
+ .iter().cloned().collect::<TokenStream>().into()
},
})));
impl<'a> Parser<'a> {
pub fn new(sess: &'a ParseSess,
- tokens: Vec<TokenTree>,
+ tokens: TokenStream,
directory: Option<Directory>,
desugar_doc_comments: bool)
-> Self {
token_cursor: TokenCursor {
frame: TokenCursorFrame::new(syntax_pos::DUMMY_SP, &Delimited {
delim: token::NoDelim,
- tts: tokens,
+ tts: tokens.into(),
}),
stack: Vec::new(),
},
})
}
- fn expect_delimited_token_tree(&mut self) -> PResult<'a, (token::DelimToken, Vec<TokenTree>)> {
+ fn expect_delimited_token_tree(&mut self) -> PResult<'a, (token::DelimToken, ThinTokenStream)> {
match self.token {
token::OpenDelim(delim) => self.parse_token_tree().map(|tree| match tree {
- TokenTree::Delimited(_, delimited) => (delim, delimited.tts.clone()),
+ TokenTree::Delimited(_, delimited) => (delim, delimited.stream().into()),
_ => unreachable!(),
}),
_ => Err(self.fatal("expected open delimiter")),
self.token_cursor.stack.pop().unwrap());
self.span = frame.span;
self.bump();
- return Ok(TokenTree::Delimited(frame.span, Rc::new(Delimited {
+ return Ok(TokenTree::Delimited(frame.span, Delimited {
delim: frame.delim,
- tts: frame.tree_cursor.original_stream().trees().collect(),
- })));
+ tts: frame.tree_cursor.original_stream().into(),
+ }));
},
token::CloseDelim(_) | token::Eof => unreachable!(),
_ => Ok(TokenTree::Token(self.span, self.bump_and_get())),
token::NtStmt(ref e) => stmt_to_string(&e),
token::NtPat(ref e) => pat_to_string(&e),
token::NtIdent(ref e) => ident_to_string(e.node),
- token::NtTT(ref e) => tt_to_string(&e),
+ token::NtTT(ref tree) => tt_to_string(tree.clone()),
token::NtArm(ref e) => arm_to_string(&e),
token::NtImplItem(ref e) => impl_item_to_string(&e),
token::NtTraitItem(ref e) => trait_item_to_string(&e),
to_string(|s| s.print_lifetime(e))
}
-pub fn tt_to_string(tt: &tokenstream::TokenTree) -> String {
+pub fn tt_to_string(tt: tokenstream::TokenTree) -> String {
to_string(|s| s.print_tt(tt))
}
pub fn tts_to_string(tts: &[tokenstream::TokenTree]) -> String {
- to_string(|s| s.print_tts(tts))
+ to_string(|s| s.print_tts(tts.iter().cloned().collect()))
}
pub fn stmt_to_string(stmt: &ast::Stmt) -> String {
self.print_ident(item.ident)?;
self.cbox(INDENT_UNIT)?;
self.popen()?;
- self.print_tts(&node.tts[..])?;
+ self.print_tts(node.stream())?;
self.pclose()?;
word(&mut self.s, ";")?;
self.end()?;
/// appropriate macro, transcribe back into the grammar we just parsed from,
/// and then pretty-print the resulting AST nodes (so, e.g., we print
/// expression arguments as expressions). It can be done! I think.
- pub fn print_tt(&mut self, tt: &tokenstream::TokenTree) -> io::Result<()> {
- match *tt {
+ pub fn print_tt(&mut self, tt: tokenstream::TokenTree) -> io::Result<()> {
+ match tt {
TokenTree::Token(_, ref tk) => {
word(&mut self.s, &token_to_string(tk))?;
match *tk {
TokenTree::Delimited(_, ref delimed) => {
word(&mut self.s, &token_to_string(&delimed.open_token()))?;
space(&mut self.s)?;
- self.print_tts(&delimed.tts)?;
+ self.print_tts(delimed.stream())?;
space(&mut self.s)?;
word(&mut self.s, &token_to_string(&delimed.close_token()))
},
}
}
- pub fn print_tts(&mut self, tts: &[tokenstream::TokenTree]) -> io::Result<()> {
+ pub fn print_tts(&mut self, tts: tokenstream::TokenStream) -> io::Result<()> {
self.ibox(0)?;
- for (i, tt) in tts.iter().enumerate() {
+ for (i, tt) in tts.into_trees().enumerate() {
if i != 0 {
space(&mut self.s)?;
}
word(&mut self.s, "! ")?;
self.cbox(INDENT_UNIT)?;
self.popen()?;
- self.print_tts(&node.tts[..])?;
+ self.print_tts(node.stream())?;
self.pclose()?;
word(&mut self.s, ";")?;
self.end()?
word(&mut self.s, "! ")?;
self.cbox(INDENT_UNIT)?;
self.popen()?;
- self.print_tts(&node.tts[..])?;
+ self.print_tts(node.stream())?;
self.pclose()?;
word(&mut self.s, ";")?;
self.end()?
}
token::NoDelim => {}
}
- self.print_tts(&m.node.tts)?;
+ self.print_tts(m.node.stream())?;
match delim {
token::Paren => self.pclose(),
token::Bracket => word(&mut self.s, "]"),
//! and a borrowed TokenStream is sufficient to build an owned TokenStream without taking
//! ownership of the original.
-use ast::{self, LitKind};
use syntax_pos::{BytePos, Span, DUMMY_SP};
-use codemap::Spanned;
use ext::base;
use ext::tt::{macro_parser, quoted};
-use parse::{self, Directory};
-use parse::token::{self, Token, Lit};
+use parse::Directory;
+use parse::token::{self, Token};
use print::pprust;
use serialize::{Decoder, Decodable, Encoder, Encodable};
-use symbol::Symbol;
use util::RcSlice;
use std::{fmt, iter, mem};
-use std::rc::Rc;
+use std::hash::{self, Hash};
/// A delimited sequence of token trees
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
/// The type of delimiter
pub delim: token::DelimToken,
/// The delimited sequence of token trees
- pub tts: Vec<TokenTree>,
+ pub tts: ThinTokenStream,
}
impl Delimited {
}
/// Returns the token trees inside the delimiters.
- pub fn subtrees(&self) -> &[TokenTree] {
- &self.tts
+ pub fn stream(&self) -> TokenStream {
+ self.tts.clone().into()
}
}
/// A single token
Token(Span, token::Token),
/// A delimited sequence of token trees
- Delimited(Span, Rc<Delimited>),
+ Delimited(Span, Delimited),
}
impl TokenTree {
/// Use this token tree as a matcher to parse given tts.
- pub fn parse(cx: &base::ExtCtxt, mtch: &[quoted::TokenTree], tts: &[TokenTree])
+ pub fn parse(cx: &base::ExtCtxt, mtch: &[quoted::TokenTree], tts: TokenStream)
-> macro_parser::NamedParseResult {
// `None` is because we're not interpolating
let directory = Directory {
path: cx.current_expansion.module.directory.clone(),
ownership: cx.current_expansion.directory_ownership,
};
- macro_parser::parse(cx.parse_sess(), tts.iter().cloned().collect(), mtch, Some(directory))
+ macro_parser::parse(cx.parse_sess(), tts, mtch, Some(directory))
}
/// Check if this TokenTree is equal to the other, regardless of span information.
match (self, other) {
(&TokenTree::Token(_, ref tk), &TokenTree::Token(_, ref tk2)) => tk == tk2,
(&TokenTree::Delimited(_, ref dl), &TokenTree::Delimited(_, ref dl2)) => {
- (*dl).delim == (*dl2).delim && dl.tts.len() == dl2.tts.len() &&
- {
- for (tt1, tt2) in dl.tts.iter().zip(dl2.tts.iter()) {
- if !tt1.eq_unspanned(tt2) {
- return false;
- }
- }
- true
- }
+ dl.delim == dl2.delim &&
+ dl.stream().trees().zip(dl2.stream().trees()).all(|(tt, tt2)| tt.eq_unspanned(&tt2))
}
(_, _) => false,
}
_ => false,
}
}
-
- /// Indicates if the token is an identifier.
- pub fn is_ident(&self) -> bool {
- self.maybe_ident().is_some()
- }
-
- /// Returns an identifier.
- pub fn maybe_ident(&self) -> Option<ast::Ident> {
- match *self {
- TokenTree::Token(_, Token::Ident(t)) => Some(t.clone()),
- TokenTree::Delimited(_, ref dl) => {
- let tts = dl.subtrees();
- if tts.len() != 1 {
- return None;
- }
- tts[0].maybe_ident()
- }
- _ => None,
- }
- }
-
- /// Returns a Token literal.
- pub fn maybe_lit(&self) -> Option<token::Lit> {
- match *self {
- TokenTree::Token(_, Token::Literal(l, _)) => Some(l.clone()),
- TokenTree::Delimited(_, ref dl) => {
- let tts = dl.subtrees();
- if tts.len() != 1 {
- return None;
- }
- tts[0].maybe_lit()
- }
- _ => None,
- }
- }
-
- /// Returns an AST string literal.
- pub fn maybe_str(&self) -> Option<ast::Lit> {
- match *self {
- TokenTree::Token(sp, Token::Literal(Lit::Str_(s), _)) => {
- let l = LitKind::Str(Symbol::intern(&parse::str_lit(&s.as_str())),
- ast::StrStyle::Cooked);
- Some(Spanned {
- node: l,
- span: sp,
- })
- }
- TokenTree::Token(sp, Token::Literal(Lit::StrRaw(s, n), _)) => {
- let l = LitKind::Str(Symbol::intern(&parse::raw_str_lit(&s.as_str())),
- ast::StrStyle::Raw(n));
- Some(Spanned {
- node: l,
- span: sp,
- })
- }
- _ => None,
- }
- }
}
/// # Token Streams
}
}
+/// The `TokenStream` type is large enough to represent a single `TokenTree` without allocation.
+/// `ThinTokenStream` is smaller, but needs to allocate to represent a single `TokenTree`.
+/// We must use `ThinTokenStream` in `TokenTree::Delimited` to avoid infinite size due to recursion.
+#[derive(Debug, Clone)]
+pub struct ThinTokenStream(Option<RcSlice<TokenStream>>);
+
+impl From<TokenStream> for ThinTokenStream {
+ fn from(stream: TokenStream) -> ThinTokenStream {
+ ThinTokenStream(match stream.kind {
+ TokenStreamKind::Empty => None,
+ TokenStreamKind::Tree(tree) => Some(RcSlice::new(vec![tree.into()])),
+ TokenStreamKind::Stream(stream) => Some(stream),
+ })
+ }
+}
+
+impl From<ThinTokenStream> for TokenStream {
+ fn from(stream: ThinTokenStream) -> TokenStream {
+ stream.0.map(TokenStream::concat_rc_slice).unwrap_or_else(TokenStream::empty)
+ }
+}
+
+impl Eq for ThinTokenStream {}
+
+impl PartialEq<ThinTokenStream> for ThinTokenStream {
+ fn eq(&self, other: &ThinTokenStream) -> bool {
+ TokenStream::from(self.clone()) == TokenStream::from(other.clone())
+ }
+}
+
impl fmt::Display for TokenStream {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str(&pprust::tts_to_string(&self.trees().collect::<Vec<_>>()))
}
}
+impl Hash for TokenStream {
+ fn hash<H: hash::Hasher>(&self, state: &mut H) {
+ for tree in self.trees() {
+ tree.hash(state);
+ }
+ }
+}
+
+impl Encodable for ThinTokenStream {
+ fn encode<E: Encoder>(&self, encoder: &mut E) -> Result<(), E::Error> {
+ TokenStream::from(self.clone()).encode(encoder)
+ }
+}
+
+impl Decodable for ThinTokenStream {
+ fn decode<D: Decoder>(decoder: &mut D) -> Result<ThinTokenStream, D::Error> {
+ TokenStream::decode(decoder).map(Into::into)
+ }
+}
+
+impl Hash for ThinTokenStream {
+ fn hash<H: hash::Hasher>(&self, state: &mut H) {
+ TokenStream::from(self.clone()).hash(state);
+ }
+}
+
#[cfg(test)]
mod tests {
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use std::hash::{self, Hash};
use std::fmt;
use std::ops::Deref;
use std::rc::Rc;
}
}
-impl<T: Hash> Hash for RcSlice<T> {
- fn hash<H: hash::Hasher>(&self, state: &mut H) {
- self.deref().hash(state);
- }
-}
-
impl<T: fmt::Debug> fmt::Debug for RcSlice<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(self.deref(), f)
if p2.token != token::Eof {
let mut extra_tts = panictry!(p2.parse_all_token_trees());
extra_tts.extend(tts[first_colon..].iter().cloned());
- p = parse::tts_to_parser(cx.parse_sess, extra_tts);
+ p = parse::stream_to_parser(cx.parse_sess, extra_tts.into_iter().collect());
}
asm = s;