]> git.lizzy.rs Git - rust.git/commitdiff
Integrate `TokenStream`.
authorJeffrey Seyfried <jeffrey.seyfried@gmail.com>
Tue, 21 Feb 2017 05:05:59 +0000 (05:05 +0000)
committerJeffrey Seyfried <jeffrey.seyfried@gmail.com>
Fri, 3 Mar 2017 02:15:37 +0000 (02:15 +0000)
27 files changed:
src/libproc_macro/lib.rs
src/libproc_macro_plugin/qquote.rs
src/librustc/hir/mod.rs
src/librustc_incremental/calculate_svh/svh_visitor.rs
src/librustc_metadata/cstore_impl.rs
src/librustc_metadata/encoder.rs
src/librustc_resolve/build_reduced_graph.rs
src/librustc_resolve/macros.rs
src/librustc_save_analysis/span_utils.rs
src/librustdoc/visit_ast.rs
src/libsyntax/ast.rs
src/libsyntax/ext/base.rs
src/libsyntax/ext/expand.rs
src/libsyntax/ext/placeholders.rs
src/libsyntax/ext/quote.rs
src/libsyntax/ext/tt/macro_parser.rs
src/libsyntax/ext/tt/macro_rules.rs
src/libsyntax/ext/tt/quoted.rs
src/libsyntax/ext/tt/transcribe.rs
src/libsyntax/fold.rs
src/libsyntax/parse/lexer/tokentrees.rs
src/libsyntax/parse/mod.rs
src/libsyntax/parse/parser.rs
src/libsyntax/print/pprust.rs
src/libsyntax/tokenstream.rs
src/libsyntax/util/rc_slice.rs
src/libsyntax_ext/asm.rs

index 0516e111be3b38002bd924e6a7ada7c107082e4b..8d7fe655c23b2a9b8cf4e30274160660aea1d51a 100644 (file)
@@ -101,7 +101,7 @@ pub fn token_stream_wrap(inner: TokenStream_) -> TokenStream {
 
     pub fn token_stream_parse_items(stream: TokenStream) -> Result<Vec<P<ast::Item>>, LexError> {
         with_parse_sess(move |sess| {
-            let mut parser = parse::new_parser_from_ts(sess, stream.inner);
+            let mut parser = parse::stream_to_parser(sess, stream.inner);
             let mut items = Vec::new();
 
             while let Some(item) = try!(parser.parse_item().map_err(super::parse_to_lex_err)) {
@@ -177,9 +177,8 @@ fn from_str(src: &str) -> Result<TokenStream, LexError> {
         __internal::with_parse_sess(|sess| {
             let src = src.to_string();
             let name = "<proc-macro source code>".to_string();
-            let tts = parse::parse_tts_from_source_str(name, src, sess);
-
-            Ok(__internal::token_stream_wrap(tts.into_iter().collect()))
+            let stream = parse::parse_stream_from_source_str(name, src, sess);
+            Ok(__internal::token_stream_wrap(stream))
         })
     }
 }
index e3d85bca3e0d27fde2b0977b12122ebcefd04fd2..0276587ed52b1b7ca55179766580fba50c84cbbb 100644 (file)
@@ -18,7 +18,6 @@
 use syntax_pos::DUMMY_SP;
 
 use std::iter;
-use std::rc::Rc;
 
 pub fn qquote<'cx>(stream: TokenStream) -> TokenStream {
     stream.quote()
@@ -50,10 +49,7 @@ macro_rules! quote_tree {
 }
 
 fn delimit(delim: token::DelimToken, stream: TokenStream) -> TokenStream {
-    TokenTree::Delimited(DUMMY_SP, Rc::new(Delimited {
-        delim: delim,
-        tts: stream.into_trees().collect(),
-    })).into()
+    TokenTree::Delimited(DUMMY_SP, Delimited { delim: delim, tts: stream.into() }).into()
 }
 
 macro_rules! quote {
@@ -102,13 +98,6 @@ fn next(&mut self) -> Option<TokenStream> {
     }
 }
 
-impl Quote for Vec<TokenTree> {
-    fn quote(&self) -> TokenStream {
-        let stream = self.iter().cloned().collect::<TokenStream>();
-        quote!((quote stream).into_trees().collect::<::std::vec::Vec<_> >())
-    }
-}
-
 impl Quote for TokenTree {
     fn quote(&self) -> TokenStream {
         match *self {
@@ -124,12 +113,12 @@ fn quote(&self) -> TokenStream {
     }
 }
 
-impl Quote for Rc<Delimited> {
+impl Quote for Delimited {
     fn quote(&self) -> TokenStream {
-        quote!(::std::rc::Rc::new(::syntax::tokenstream::Delimited {
+        quote!(::syntax::tokenstream::Delimited {
             delim: (quote self.delim),
-            tts: (quote self.tts),
-        }))
+            tts: (quote self.stream()).into(),
+        })
     }
 }
 
index 8b6c75886baa8121ff62903a2c773202766531f3..20b6e285daebe6e33ab5232ca3f696a6dec711ec 100644 (file)
@@ -40,7 +40,7 @@
 use syntax::ast::{Attribute, Lit, StrStyle, FloatTy, IntTy, UintTy, MetaItem};
 use syntax::ptr::P;
 use syntax::symbol::{Symbol, keywords};
-use syntax::tokenstream::TokenTree;
+use syntax::tokenstream::TokenStream;
 use syntax::util::ThinVec;
 
 use std::collections::BTreeMap;
@@ -466,7 +466,7 @@ pub struct MacroDef {
     pub attrs: HirVec<Attribute>,
     pub id: NodeId,
     pub span: Span,
-    pub body: HirVec<TokenTree>,
+    pub body: TokenStream,
 }
 
 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
index b075fa599924996650b2571afa356f312f2484c7..e113d0971c21b1fef566bdfd71d44d10ade3f4e3 100644 (file)
@@ -866,8 +866,8 @@ fn visit_macro_def(&mut self, macro_def: &'tcx MacroDef) {
         debug!("visit_macro_def: st={:?}", self.st);
         SawMacroDef.hash(self.st);
         hash_attrs!(self, &macro_def.attrs);
-        for tt in &macro_def.body {
-            self.hash_token_tree(tt);
+        for tt in macro_def.body.trees() {
+            self.hash_token_tree(&tt);
         }
         visit::walk_macro_def(self, macro_def)
     }
@@ -1033,15 +1033,9 @@ fn hash_token_tree(&mut self, tt: &tokenstream::TokenTree) {
             }
             tokenstream::TokenTree::Delimited(span, ref delimited) => {
                 hash_span!(self, span);
-                let tokenstream::Delimited {
-                    ref delim,
-                    ref tts,
-                } = **delimited;
-
-                delim.hash(self.st);
-                tts.len().hash(self.st);
-                for sub_tt in tts {
-                    self.hash_token_tree(sub_tt);
+                delimited.delim.hash(self.st);
+                for sub_tt in delimited.stream().trees() {
+                    self.hash_token_tree(&sub_tt);
                 }
             }
         }
index 7b02280ef904bda504369b93dd1bb07a42428fa4..274ea7094cb6bb6397f7c38c2d20a4b3fc37d40f 100644 (file)
@@ -34,7 +34,7 @@
 
 use syntax::ast;
 use syntax::attr;
-use syntax::parse::filemap_to_tts;
+use syntax::parse::filemap_to_stream;
 use syntax::symbol::Symbol;
 use syntax_pos::{mk_sp, Span};
 use rustc::hir::svh::Svh;
@@ -397,7 +397,7 @@ fn load_macro(&self, id: DefId, sess: &Session) -> LoadedMacro {
 
         let filemap = sess.parse_sess.codemap().new_filemap(source_name, None, def.body);
         let local_span = mk_sp(filemap.start_pos, filemap.end_pos);
-        let body = filemap_to_tts(&sess.parse_sess, filemap);
+        let body = filemap_to_stream(&sess.parse_sess, filemap);
 
         // Mark the attrs as used
         let attrs = data.get_item_attrs(id.index);
@@ -415,7 +415,7 @@ fn load_macro(&self, id: DefId, sess: &Session) -> LoadedMacro {
             id: ast::DUMMY_NODE_ID,
             span: local_span,
             attrs: attrs,
-            body: body,
+            body: body.into(),
         })
     }
 
index af0edab7a83bd9eee79893a85d7b8c3d40b9189b..8ddc1642d9e1c2c77451118c3c21887148f1ec74 100644 (file)
@@ -853,9 +853,10 @@ fn encode_info_for_item(&mut self, (def_id, item): (DefId, &'tcx hir::Item)) ->
 
     /// Serialize the text of exported macros
     fn encode_info_for_macro_def(&mut self, macro_def: &hir::MacroDef) -> Entry<'tcx> {
+        use syntax::print::pprust;
         Entry {
             kind: EntryKind::MacroDef(self.lazy(&MacroDef {
-                body: ::syntax::print::pprust::tts_to_string(&macro_def.body)
+                body: pprust::tts_to_string(&macro_def.body.trees().collect::<Vec<_>>()),
             })),
             visibility: self.lazy(&ty::Visibility::Public),
             span: self.lazy(&macro_def.span),
index 89cff39c59e31317f3af47fe0fa81921c7962925..751f59d0290ac2b94e610e966846c1d75794bbd9 100644 (file)
@@ -516,7 +516,7 @@ pub fn get_macro(&mut self, def: Def) -> Rc<SyntaxExtension> {
             expansion: Cell::new(LegacyScope::Empty),
         });
         self.invocations.insert(mark, invocation);
-        macro_rules.body = mark_tts(&macro_rules.body, mark);
+        macro_rules.body = mark_tts(macro_rules.stream(), mark).into();
         let ext = Rc::new(macro_rules::compile(&self.session.parse_sess, &macro_rules));
         self.macro_map.insert(def_id, ext.clone());
         ext
index b7068f4b09f5fcca3359b955d6e1d279736755c7..36645418d4f785fca47d0f3e4b30cdd3fee7a29b 100644 (file)
@@ -545,7 +545,7 @@ fn collect_def_ids(&mut self, invocation: &'a InvocationData<'a>, expansion: &Ex
 
     pub fn define_macro(&mut self, item: &ast::Item, legacy_scope: &mut LegacyScope<'a>) {
         let tts = match item.node {
-            ast::ItemKind::Mac(ref mac) => &mac.node.tts,
+            ast::ItemKind::Mac(ref mac) => mac.node.stream(),
             _ => unreachable!(),
         };
 
@@ -562,7 +562,7 @@ pub fn define_macro(&mut self, item: &ast::Item, legacy_scope: &mut LegacyScope<
             attrs: item.attrs.clone(),
             id: ast::DUMMY_NODE_ID,
             span: item.span,
-            body: mark_tts(tts, mark),
+            body: mark_tts(tts, mark).into(),
         };
 
         *legacy_scope = LegacyScope::Binding(self.arenas.alloc_legacy_binding(LegacyBinding {
index 6c93744f014a3c2b678be69ef78b351c56e8bc2f..34402742e6c3325c18bd749309c082785cd1e9f9 100644 (file)
@@ -284,7 +284,7 @@ pub fn spans_with_brackets(&self, span: Span, nesting: isize, limit: isize) -> V
     pub fn signature_string_for_span(&self, span: Span) -> String {
         let mut toks = self.retokenise_span(span);
         toks.real_token();
-        let mut toks = toks.parse_all_token_trees().unwrap().into_iter();
+        let mut toks = toks.parse_all_token_trees().unwrap().trees();
         let mut prev = toks.next().unwrap();
 
         let first_span = prev.span();
index 236d9f230b5d470718467b351ed6ce75d16fafa5..42928427233d780723a0996911e96dabfe6446e3 100644 (file)
@@ -211,7 +211,8 @@ pub fn visit_mod_contents(&mut self, span: Span, attrs: hir::HirVec<ast::Attribu
                     };
 
                     // FIXME(jseyfried) merge with `self.visit_macro()`
-                    let matchers = def.body.chunks(4).map(|arm| arm[0].span()).collect();
+                    let tts = def.stream().trees().collect::<Vec<_>>();
+                    let matchers = tts.chunks(4).map(|arm| arm[0].span()).collect();
                     om.macros.push(Macro {
                         def_id: def_id,
                         attrs: def.attrs.clone().into(),
@@ -520,8 +521,9 @@ pub fn visit_item(&mut self, item: &hir::Item,
 
     // convert each exported_macro into a doc item
     fn visit_local_macro(&self, def: &hir::MacroDef) -> Macro {
+        let tts = def.body.trees().collect::<Vec<_>>();
         // Extract the spans of all matchers. They represent the "interface" of the macro.
-        let matchers = def.body.chunks(4).map(|arm| arm[0].span()).collect();
+        let matchers = tts.chunks(4).map(|arm| arm[0].span()).collect();
 
         Macro {
             def_id: self.cx.tcx.hir.local_def_id(def.id),
index 09fb369cd3568b8f1a4d01507d7606b33f998bce..9cc754cbf4d1912590251b479d80716f69f35f68 100644 (file)
@@ -24,7 +24,7 @@
 use print::pprust;
 use ptr::P;
 use symbol::{Symbol, keywords};
-use tokenstream::{TokenTree};
+use tokenstream::{ThinTokenStream, TokenStream};
 
 use std::collections::HashSet;
 use std::fmt;
@@ -1033,7 +1033,13 @@ pub enum CaptureBy {
 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
 pub struct Mac_ {
     pub path: Path,
-    pub tts: Vec<TokenTree>,
+    pub tts: ThinTokenStream,
+}
+
+impl Mac_ {
+    pub fn stream(&self) -> TokenStream {
+        self.tts.clone().into()
+    }
 }
 
 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
@@ -1915,7 +1921,13 @@ pub struct MacroDef {
     pub attrs: Vec<Attribute>,
     pub id: NodeId,
     pub span: Span,
-    pub body: Vec<TokenTree>,
+    pub body: ThinTokenStream,
+}
+
+impl MacroDef {
+    pub fn stream(&self) -> TokenStream {
+        self.body.clone().into()
+    }
 }
 
 #[cfg(test)]
index c7d2f0cd31dc609e2b044f38c88faa119f4e8e73..e242cf2777fe59dd00fbed82820a06848bb39c82 100644 (file)
@@ -188,10 +188,7 @@ fn expand<'cx>(&self,
 
 /// Represents a thing that maps token trees to Macro Results
 pub trait TTMacroExpander {
-    fn expand<'cx>(&self,
-                   ecx: &'cx mut ExtCtxt,
-                   span: Span,
-                   token_tree: &[tokenstream::TokenTree])
+    fn expand<'cx>(&self, ecx: &'cx mut ExtCtxt, span: Span, input: TokenStream)
                    -> Box<MacResult+'cx>;
 }
 
@@ -200,15 +197,11 @@ fn expand<'cx>(&self,
                 -> Box<MacResult+'cx>;
 
 impl<F> TTMacroExpander for F
-    where F : for<'cx> Fn(&'cx mut ExtCtxt, Span, &[tokenstream::TokenTree])
-                          -> Box<MacResult+'cx>
+    where F: for<'cx> Fn(&'cx mut ExtCtxt, Span, &[tokenstream::TokenTree]) -> Box<MacResult+'cx>
 {
-    fn expand<'cx>(&self,
-                   ecx: &'cx mut ExtCtxt,
-                   span: Span,
-                   token_tree: &[tokenstream::TokenTree])
+    fn expand<'cx>(&self, ecx: &'cx mut ExtCtxt, span: Span, input: TokenStream)
                    -> Box<MacResult+'cx> {
-        (*self)(ecx, span, token_tree)
+        (*self)(ecx, span, &input.trees().collect::<Vec<_>>())
     }
 }
 
@@ -654,9 +647,8 @@ pub fn monotonic_expander<'b>(&'b mut self) -> expand::MacroExpander<'b, 'a> {
         expand::MacroExpander::new(self, true)
     }
 
-    pub fn new_parser_from_tts(&self, tts: &[tokenstream::TokenTree])
-        -> parser::Parser<'a> {
-        parse::tts_to_parser(self.parse_sess, tts.to_vec())
+    pub fn new_parser_from_tts(&self, tts: &[tokenstream::TokenTree]) -> parser::Parser<'a> {
+        parse::stream_to_parser(self.parse_sess, tts.iter().cloned().collect())
     }
     pub fn codemap(&self) -> &'a CodeMap { self.parse_sess.codemap() }
     pub fn parse_sess(&self) -> &'a parse::ParseSess { self.parse_sess }
index 8107696b8b920212e4ed42bf4f821d78baf9f36c..f1662284a88206657283952e9c444e4c5ea39397 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-use ast::{self, Block, Ident, Mac_, PatKind};
+use ast::{self, Block, Ident, PatKind};
 use ast::{Name, MacStmtStyle, StmtKind, ItemKind};
 use attr::{self, HasAttrs};
 use codemap::{ExpnInfo, NameAndSpan, MacroBang, MacroAttribute};
 use feature_gate::{self, Features, is_builtin_attr};
 use fold;
 use fold::*;
+use parse::{filemap_to_stream, ParseSess, DirectoryOwnership, PResult, token};
 use parse::parser::Parser;
-use parse::token;
-use parse::{ParseSess, DirectoryOwnership, PResult, filemap_to_tts};
 use print::pprust;
 use ptr::P;
 use std_inject;
 use symbol::Symbol;
 use symbol::keywords;
 use syntax_pos::{self, Span, ExpnId};
-use tokenstream::{TokenTree, TokenStream};
+use tokenstream::TokenStream;
 use util::small_vector::SmallVector;
 use visit::Visitor;
 
@@ -462,8 +461,8 @@ fn expand_attr_invoc(&mut self, invoc: Invocation, ext: Rc<SyntaxExtension>) ->
                 kind.expect_from_annotatables(items)
             }
             SyntaxExtension::AttrProcMacro(ref mac) => {
-                let attr_toks = tts_for_attr_args(&attr, &self.cx.parse_sess).into_iter().collect();
-                let item_toks = tts_for_item(&item, &self.cx.parse_sess).into_iter().collect();
+                let attr_toks = stream_for_attr_args(&attr, &self.cx.parse_sess);
+                let item_toks = stream_for_item(&item, &self.cx.parse_sess);
 
                 let tok_result = mac.expand(self.cx, attr.span, attr_toks, item_toks);
                 self.parse_expansion(tok_result, kind, name, attr.span)
@@ -487,11 +486,11 @@ fn expand_bang_invoc(&mut self, invoc: Invocation, ext: Rc<SyntaxExtension>) ->
             InvocationKind::Bang { mac, ident, span } => (mac, ident, span),
             _ => unreachable!(),
         };
-        let Mac_ { path, tts, .. } = mac.node;
+        let path = &mac.node.path;
 
         let extname = path.segments.last().unwrap().identifier.name;
         let ident = ident.unwrap_or(keywords::Invalid.ident());
-        let marked_tts = mark_tts(&tts, mark);
+        let marked_tts = mark_tts(mac.node.stream(), mark);
         let opt_expanded = match *ext {
             NormalTT(ref expandfun, exp_span, allow_internal_unstable) => {
                 if ident.name != keywords::Invalid.name() {
@@ -510,7 +509,7 @@ fn expand_bang_invoc(&mut self, invoc: Invocation, ext: Rc<SyntaxExtension>) ->
                     },
                 });
 
-                kind.make_from(expandfun.expand(self.cx, span, &marked_tts))
+                kind.make_from(expandfun.expand(self.cx, span, marked_tts))
             }
 
             IdentTT(ref expander, tt_span, allow_internal_unstable) => {
@@ -529,7 +528,8 @@ fn expand_bang_invoc(&mut self, invoc: Invocation, ext: Rc<SyntaxExtension>) ->
                     }
                 });
 
-                kind.make_from(expander.expand(self.cx, span, ident, marked_tts))
+                let input: Vec<_> = marked_tts.into_trees().collect();
+                kind.make_from(expander.expand(self.cx, span, ident, input))
             }
 
             MultiDecorator(..) | MultiModifier(..) | SyntaxExtension::AttrProcMacro(..) => {
@@ -563,8 +563,7 @@ fn expand_bang_invoc(&mut self, invoc: Invocation, ext: Rc<SyntaxExtension>) ->
                     },
                 });
 
-                let toks = marked_tts.into_iter().collect();
-                let tok_result = expandfun.expand(self.cx, span, toks);
+                let tok_result = expandfun.expand(self.cx, span, marked_tts);
                 Some(self.parse_expansion(tok_result, kind, extname, span))
             }
         };
@@ -821,23 +820,23 @@ fn find_attr_invoc(attrs: &mut Vec<ast::Attribute>) -> Option<ast::Attribute> {
 // Therefore, we must use the pretty printer (yuck) to turn the AST node into a
 // string, which we then re-tokenise (double yuck), but first we have to patch
 // the pretty-printed string on to the end of the existing codemap (infinity-yuck).
-fn tts_for_item(item: &Annotatable, parse_sess: &ParseSess) -> Vec<TokenTree> {
+fn stream_for_item(item: &Annotatable, parse_sess: &ParseSess) -> TokenStream {
     let text = match *item {
         Annotatable::Item(ref i) => pprust::item_to_string(i),
         Annotatable::TraitItem(ref ti) => pprust::trait_item_to_string(ti),
         Annotatable::ImplItem(ref ii) => pprust::impl_item_to_string(ii),
     };
-    string_to_tts(text, parse_sess)
+    string_to_stream(text, parse_sess)
 }
 
-fn tts_for_attr_args(attr: &ast::Attribute, parse_sess: &ParseSess) -> Vec<TokenTree> {
+fn stream_for_attr_args(attr: &ast::Attribute, parse_sess: &ParseSess) -> TokenStream {
     use ast::MetaItemKind::*;
     use print::pp::Breaks;
     use print::pprust::PrintState;
 
     let token_string = match attr.value.node {
         // For `#[foo]`, an empty token
-        Word => return vec![],
+        Word => return TokenStream::empty(),
         // For `#[foo(bar, baz)]`, returns `(bar, baz)`
         List(ref items) => pprust::to_string(|s| {
             s.popen()?;
@@ -853,12 +852,12 @@ fn tts_for_attr_args(attr: &ast::Attribute, parse_sess: &ParseSess) -> Vec<Token
         }),
     };
 
-    string_to_tts(token_string, parse_sess)
+    string_to_stream(token_string, parse_sess)
 }
 
-fn string_to_tts(text: String, parse_sess: &ParseSess) -> Vec<TokenTree> {
+fn string_to_stream(text: String, parse_sess: &ParseSess) -> TokenStream {
     let filename = String::from("<macro expansion>");
-    filemap_to_tts(parse_sess, parse_sess.codemap().new_filemap(filename, None, text))
+    filemap_to_stream(parse_sess, parse_sess.codemap().new_filemap(filename, None, text))
 }
 
 impl<'a, 'b> Folder for InvocationCollector<'a, 'b> {
@@ -1162,6 +1161,6 @@ fn new_span(&mut self, mut span: Span) -> Span {
 }
 
 // apply a given mark to the given token trees. Used prior to expansion of a macro.
-pub fn mark_tts(tts: &[TokenTree], m: Mark) -> Vec<TokenTree> {
+pub fn mark_tts(tts: TokenStream, m: Mark) -> TokenStream {
     noop_fold_tts(tts, &mut Marker{mark:m, expn_id: None})
 }
index 0636a78b2152f11ac0ed6e868cc78d3f85d16cba..e2fb1946e90dbdd05b1299e1a6d6c33dae3a2882 100644 (file)
@@ -13,6 +13,7 @@
 use ext::base::ExtCtxt;
 use ext::expand::{Expansion, ExpansionKind};
 use ext::hygiene::Mark;
+use tokenstream::TokenStream;
 use fold::*;
 use ptr::P;
 use symbol::keywords;
@@ -26,7 +27,7 @@ pub fn placeholder(kind: ExpansionKind, id: ast::NodeId) -> Expansion {
     fn mac_placeholder() -> ast::Mac {
         dummy_spanned(ast::Mac_ {
             path: ast::Path { span: DUMMY_SP, segments: Vec::new() },
-            tts: Vec::new(),
+            tts: TokenStream::empty().into(),
         })
     }
 
index b1b69c80f4d0016c790c4554d0df6cc73350fd7b..69ff726e719a99fb98dae70f3a65aa09baee668c 100644 (file)
@@ -16,7 +16,7 @@
 use parse::parser::{Parser, PathStyle};
 use parse::token;
 use ptr::P;
-use tokenstream::TokenTree;
+use tokenstream::{TokenStream, TokenTree};
 
 
 /// Quasiquoting works via token trees.
@@ -35,7 +35,7 @@ pub mod rt {
     use std::rc::Rc;
     use symbol::Symbol;
 
-    use tokenstream::{self, TokenTree};
+    use tokenstream::{self, TokenTree, TokenStream};
 
     pub use parse::new_parser_from_tts;
     pub use syntax_pos::{BytePos, Span, DUMMY_SP};
@@ -227,10 +227,10 @@ fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> {
             if self.style == ast::AttrStyle::Inner {
                 r.push(TokenTree::Token(self.span, token::Not));
             }
-            r.push(TokenTree::Delimited(self.span, Rc::new(tokenstream::Delimited {
+            r.push(TokenTree::Delimited(self.span, tokenstream::Delimited {
                 delim: token::Bracket,
-                tts: self.value.to_tokens(cx),
-            })));
+                tts: self.value.to_tokens(cx).into_iter().collect::<TokenStream>().into(),
+            }));
             r
         }
     }
@@ -244,10 +244,10 @@ fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> {
 
     impl ToTokens for () {
         fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
-            vec![TokenTree::Delimited(DUMMY_SP, Rc::new(tokenstream::Delimited {
+            vec![TokenTree::Delimited(DUMMY_SP, tokenstream::Delimited {
                 delim: token::Paren,
-                tts: vec![],
-            }))]
+                tts: TokenStream::empty().into(),
+            })]
         }
     }
 
@@ -355,14 +355,15 @@ fn parse_expr(&self, s: String) -> P<ast::Expr> {
         }
 
         fn parse_tts(&self, s: String) -> Vec<TokenTree> {
-            parse::parse_tts_from_source_str("<quote expansion>".to_string(), s, self.parse_sess())
+            let source_name = "<quote expansion>".to_owned();
+            parse::parse_stream_from_source_str(source_name, s, self.parse_sess())
+                .into_trees().collect()
         }
     }
 }
 
 // Replaces `Token::OpenDelim .. Token::CloseDelim` with `TokenTree::Delimited(..)`.
 pub fn unflatten(tts: Vec<TokenTree>) -> Vec<TokenTree> {
-    use std::rc::Rc;
     use tokenstream::Delimited;
 
     let mut results = Vec::new();
@@ -373,8 +374,10 @@ pub fn unflatten(tts: Vec<TokenTree>) -> Vec<TokenTree> {
                 results.push(::std::mem::replace(&mut result, Vec::new()));
             }
             TokenTree::Token(span, token::CloseDelim(delim)) => {
-                let tree =
-                    TokenTree::Delimited(span, Rc::new(Delimited { delim: delim, tts: result }));
+                let tree = TokenTree::Delimited(span, Delimited {
+                    delim: delim,
+                    tts: result.into_iter().map(TokenStream::from).collect::<TokenStream>().into(),
+                });
                 result = results.pop().unwrap();
                 result.push(tree);
             }
@@ -747,7 +750,7 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, quoted: bool) -> Vec<ast::Stmt
         },
         TokenTree::Delimited(span, ref delimed) => {
             let mut stmts = statements_mk_tt(cx, &delimed.open_tt(span), false);
-            stmts.extend(statements_mk_tts(cx, &delimed.tts));
+            stmts.extend(statements_mk_tts(cx, delimed.stream()));
             stmts.extend(statements_mk_tt(cx, &delimed.close_tt(span), false));
             stmts
         }
@@ -810,14 +813,14 @@ fn mk_stmts_let(cx: &ExtCtxt, sp: Span) -> Vec<ast::Stmt> {
     vec![stmt_let_sp, stmt_let_tt]
 }
 
-fn statements_mk_tts(cx: &ExtCtxt, tts: &[TokenTree]) -> Vec<ast::Stmt> {
+fn statements_mk_tts(cx: &ExtCtxt, tts: TokenStream) -> Vec<ast::Stmt> {
     let mut ss = Vec::new();
     let mut quoted = false;
-    for tt in tts {
-        quoted = match *tt {
+    for tt in tts.into_trees() {
+        quoted = match tt {
             TokenTree::Token(_, token::Dollar) if !quoted => true,
             _ => {
-                ss.extend(statements_mk_tt(cx, tt, quoted));
+                ss.extend(statements_mk_tt(cx, &tt, quoted));
                 false
             }
         }
@@ -829,7 +832,7 @@ fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[TokenTree]) -> (P<ast::Expr>, P<ast
     let (cx_expr, tts) = parse_arguments_to_quote(cx, tts);
 
     let mut vector = mk_stmts_let(cx, sp);
-    vector.extend(statements_mk_tts(cx, &tts[..]));
+    vector.extend(statements_mk_tts(cx, tts.iter().cloned().collect()));
     vector.push(cx.stmt_expr(cx.expr_ident(sp, id_ext("tt"))));
     let block = cx.expr_block(cx.block(sp, vector));
     let unflatten = vec![id_ext("syntax"), id_ext("ext"), id_ext("quote"), id_ext("unflatten")];
index 6ab5123bc87b16b799aa6630843dfd08accd9af1..b9cb3d82d4f7c172a779f9d42007768b580a06b9 100644 (file)
 use syntax_pos::{self, BytePos, mk_sp, Span};
 use codemap::Spanned;
 use errors::FatalError;
-use ext::tt::quoted;
+use ext::tt::quoted::{self, TokenTree};
 use parse::{Directory, ParseSess};
 use parse::parser::{PathStyle, Parser};
 use parse::token::{self, DocComment, Token, Nonterminal};
 use print::pprust;
 use symbol::keywords;
-use tokenstream::TokenTree;
+use tokenstream::TokenStream;
 use util::small_vector::SmallVector;
 
 use std::mem;
 
 #[derive(Clone)]
 enum TokenTreeOrTokenTreeVec {
-    Tt(quoted::TokenTree),
-    TtSeq(Vec<quoted::TokenTree>),
+    Tt(TokenTree),
+    TtSeq(Vec<TokenTree>),
 }
 
 impl TokenTreeOrTokenTreeVec {
@@ -113,7 +113,7 @@ fn len(&self) -> usize {
         }
     }
 
-    fn get_tt(&self, index: usize) -> quoted::TokenTree {
+    fn get_tt(&self, index: usize) -> TokenTree {
         match *self {
             TtSeq(ref v) => v[index].clone(),
             Tt(ref tt) => tt.get_tt(index),
@@ -144,9 +144,7 @@ struct MatcherPos {
 
 pub type NamedParseResult = ParseResult<HashMap<Ident, Rc<NamedMatch>>>;
 
-pub fn count_names(ms: &[quoted::TokenTree]) -> usize {
-    use self::quoted::TokenTree;
-
+pub fn count_names(ms: &[TokenTree]) -> usize {
     ms.iter().fold(0, |count, elt| {
         count + match *elt {
             TokenTree::Sequence(_, ref seq) => {
@@ -163,7 +161,7 @@ pub fn count_names(ms: &[quoted::TokenTree]) -> usize {
     })
 }
 
-fn initial_matcher_pos(ms: Vec<quoted::TokenTree>, lo: BytePos) -> Box<MatcherPos> {
+fn initial_matcher_pos(ms: Vec<TokenTree>, lo: BytePos) -> Box<MatcherPos> {
     let match_idx_hi = count_names(&ms[..]);
     let matches = create_matches(match_idx_hi);
     Box::new(MatcherPos {
@@ -202,10 +200,8 @@ pub enum NamedMatch {
     MatchedNonterminal(Rc<Nonterminal>)
 }
 
-fn nameize<I: Iterator<Item=Rc<NamedMatch>>>(sess: &ParseSess, ms: &[quoted::TokenTree], mut res: I)
+fn nameize<I: Iterator<Item=Rc<NamedMatch>>>(sess: &ParseSess, ms: &[TokenTree], mut res: I)
                                              -> NamedParseResult {
-    use self::quoted::TokenTree;
-
     fn n_rec<I: Iterator<Item=Rc<NamedMatch>>>(sess: &ParseSess, m: &TokenTree, mut res: &mut I,
              ret_val: &mut HashMap<Ident, Rc<NamedMatch>>)
              -> Result<(), (syntax_pos::Span, String)> {
@@ -289,9 +285,8 @@ fn inner_parse_loop(sess: &ParseSess,
                     eof_eis: &mut SmallVector<Box<MatcherPos>>,
                     bb_eis: &mut SmallVector<Box<MatcherPos>>,
                     token: &Token,
-                    span: &syntax_pos::Span) -> ParseResult<()> {
-    use self::quoted::TokenTree;
-
+                    span: &syntax_pos::Span)
+                    -> ParseResult<()> {
     while let Some(mut ei) = cur_eis.pop() {
         // When unzipped trees end, remove them
         while ei.idx >= ei.top_elts.len() {
@@ -419,13 +414,8 @@ fn inner_parse_loop(sess: &ParseSess,
     Success(())
 }
 
-pub fn parse(sess: &ParseSess,
-             tts: Vec<TokenTree>,
-             ms: &[quoted::TokenTree],
-             directory: Option<Directory>)
+pub fn parse(sess: &ParseSess, tts: TokenStream, ms: &[TokenTree], directory: Option<Directory>)
              -> NamedParseResult {
-    use self::quoted::TokenTree;
-
     let mut parser = Parser::new(sess, tts, directory, true);
     let mut cur_eis = SmallVector::one(initial_matcher_pos(ms.to_owned(), parser.span.lo));
     let mut next_eis = Vec::new(); // or proceed normally
index 193c06707c7a6860a2bb794aa01946f2fcc94442..1d386c1a3ac930f494630fcefc44039feb0be40b 100644 (file)
@@ -22,9 +22,8 @@
 use parse::parser::Parser;
 use parse::token::{self, NtTT};
 use parse::token::Token::*;
-use print;
 use symbol::Symbol;
-use tokenstream::TokenTree;
+use tokenstream::{TokenStream, TokenTree};
 
 use std::collections::{HashMap};
 use std::collections::hash_map::{Entry};
@@ -68,7 +67,7 @@ impl TTMacroExpander for MacroRulesMacroExpander {
     fn expand<'cx>(&self,
                    cx: &'cx mut ExtCtxt,
                    sp: Span,
-                   arg: &[TokenTree])
+                   input: TokenStream)
                    -> Box<MacResult+'cx> {
         if !self.valid {
             return DummyResult::any(sp);
@@ -76,7 +75,7 @@ fn expand<'cx>(&self,
         generic_extension(cx,
                           sp,
                           self.name,
-                          arg,
+                          input,
                           &self.lhses,
                           &self.rhses)
     }
@@ -86,14 +85,12 @@ fn expand<'cx>(&self,
 fn generic_extension<'cx>(cx: &'cx ExtCtxt,
                           sp: Span,
                           name: ast::Ident,
-                          arg: &[TokenTree],
+                          arg: TokenStream,
                           lhses: &[quoted::TokenTree],
                           rhses: &[quoted::TokenTree])
                           -> Box<MacResult+'cx> {
     if cx.trace_macros() {
-        println!("{}! {{ {} }}",
-                 name,
-                 print::pprust::tts_to_string(arg));
+        println!("{}! {{ {} }}", name, arg);
     }
 
     // Which arm's failure should we report? (the one furthest along)
@@ -106,7 +103,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
             _ => cx.span_bug(sp, "malformed macro lhs")
         };
 
-        match TokenTree::parse(cx, lhs_tt, arg) {
+        match TokenTree::parse(cx, lhs_tt, arg.clone()) {
             Success(named_matches) => {
                 let rhs = match rhses[i] {
                     // ignore delimiters
@@ -186,7 +183,7 @@ pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension {
     ];
 
     // Parse the macro_rules! invocation
-    let argument_map = match parse(sess, def.body.clone(), &argument_gram, None) {
+    let argument_map = match parse(sess, def.body.clone().into(), &argument_gram, None) {
         Success(m) => m,
         Failure(sp, tok) => {
             let s = parse_failure_msg(tok);
@@ -205,7 +202,7 @@ pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension {
             s.iter().map(|m| {
                 if let MatchedNonterminal(ref nt) = **m {
                     if let NtTT(ref tt) = **nt {
-                        let tt = quoted::parse(&[tt.clone()], true, sess).pop().unwrap();
+                        let tt = quoted::parse(tt.clone().into(), true, sess).pop().unwrap();
                         valid &= check_lhs_nt_follows(sess, &tt);
                         return tt;
                     }
@@ -221,7 +218,7 @@ pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension {
             s.iter().map(|m| {
                 if let MatchedNonterminal(ref nt) = **m {
                     if let NtTT(ref tt) = **nt {
-                        return quoted::parse(&[tt.clone()], false, sess).pop().unwrap();
+                        return quoted::parse(tt.clone().into(), false, sess).pop().unwrap();
                     }
                 }
                 sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
index 530824b28348a175fcb7c07ea8af1bce7f8b4006..d56859d805c878c44951b992a7e9858a500a0a87 100644 (file)
@@ -124,10 +124,10 @@ pub fn span(&self) -> Span {
     }
 }
 
-pub fn parse(input: &[tokenstream::TokenTree], expect_matchers: bool, sess: &ParseSess)
+pub fn parse(input: tokenstream::TokenStream, expect_matchers: bool, sess: &ParseSess)
              -> Vec<TokenTree> {
     let mut result = Vec::new();
-    let mut trees = input.iter().cloned();
+    let mut trees = input.trees();
     while let Some(tree) = trees.next() {
         let tree = parse_tree(tree, &mut trees, expect_matchers, sess);
         match tree {
@@ -161,13 +161,13 @@ fn parse_tree<I>(tree: tokenstream::TokenTree,
 {
     match tree {
         tokenstream::TokenTree::Token(span, token::Dollar) => match trees.next() {
-            Some(tokenstream::TokenTree::Delimited(span, ref delimited)) => {
+            Some(tokenstream::TokenTree::Delimited(span, delimited)) => {
                 if delimited.delim != token::Paren {
                     let tok = pprust::token_to_string(&token::OpenDelim(delimited.delim));
                     let msg = format!("expected `(`, found `{}`", tok);
                     sess.span_diagnostic.span_err(span, &msg);
                 }
-                let sequence = parse(&delimited.tts, expect_matchers, sess);
+                let sequence = parse(delimited.tts.into(), expect_matchers, sess);
                 let (separator, op) = parse_sep_and_kleene_op(trees, span, sess);
                 let name_captures = macro_parser::count_names(&sequence);
                 TokenTree::Sequence(span, Rc::new(SequenceRepetition {
@@ -197,7 +197,7 @@ fn parse_tree<I>(tree: tokenstream::TokenTree,
         tokenstream::TokenTree::Delimited(span, delimited) => {
             TokenTree::Delimited(span, Rc::new(Delimited {
                 delim: delimited.delim,
-                tts: parse(&delimited.tts, expect_matchers, sess),
+                tts: parse(delimited.tts.into(), expect_matchers, sess),
             }))
         }
     }
index 90f64a5208f75cc006830fc74b5abf51a68f2dff..24004492be2a0835d98c175d1b2063319f6a02c0 100644 (file)
@@ -14,7 +14,7 @@
 use ext::tt::quoted;
 use parse::token::{self, SubstNt, Token, NtIdent, NtTT};
 use syntax_pos::{Span, DUMMY_SP};
-use tokenstream::{TokenTree, Delimited};
+use tokenstream::{TokenStream, TokenTree, Delimited};
 use util::small_vector::SmallVector;
 
 use std::rc::Rc;
@@ -66,11 +66,11 @@ fn next(&mut self) -> Option<quoted::TokenTree> {
 pub fn transcribe(sp_diag: &Handler,
                   interp: Option<HashMap<Ident, Rc<NamedMatch>>>,
                   src: Vec<quoted::TokenTree>)
-                  -> Vec<TokenTree> {
+                  -> TokenStream {
     let mut stack = SmallVector::one(Frame::new(src));
     let interpolations = interp.unwrap_or_else(HashMap::new); /* just a convenience */
     let mut repeats = Vec::new();
-    let mut result = Vec::new();
+    let mut result: Vec<TokenStream> = Vec::new();
     let mut result_stack = Vec::new();
 
     loop {
@@ -84,8 +84,11 @@ pub fn transcribe(sp_diag: &Handler,
                     *idx = 0;
                     if let Some(sep) = sep.clone() {
                         // repeat same span, I guess
-                        let prev_span = result.last().map(TokenTree::span).unwrap_or(DUMMY_SP);
-                        result.push(TokenTree::Token(prev_span, sep));
+                        let prev_span = match result.last() {
+                            Some(stream) => stream.trees().next().unwrap().span(),
+                            None => DUMMY_SP,
+                        };
+                        result.push(TokenTree::Token(prev_span, sep).into());
                     }
                     continue
                 }
@@ -97,14 +100,14 @@ pub fn transcribe(sp_diag: &Handler,
                 }
                 Frame::Delimited { forest, span, .. } => {
                     if result_stack.is_empty() {
-                        return result;
+                        return TokenStream::concat(result);
                     }
-                    let tree = TokenTree::Delimited(span, Rc::new(Delimited {
+                    let tree = TokenTree::Delimited(span, Delimited {
                         delim: forest.delim,
-                        tts: result,
-                    }));
+                        tts: TokenStream::concat(result).into(),
+                    });
                     result = result_stack.pop().unwrap();
-                    result.push(tree);
+                    result.push(tree.into());
                 }
             }
             continue
@@ -148,19 +151,20 @@ pub fn transcribe(sp_diag: &Handler,
             // FIXME #2887: think about span stuff here
             quoted::TokenTree::Token(sp, SubstNt(ident)) => {
                 match lookup_cur_matched(ident, &interpolations, &repeats) {
-                    None => result.push(TokenTree::Token(sp, SubstNt(ident))),
+                    None => result.push(TokenTree::Token(sp, SubstNt(ident)).into()),
                     Some(cur_matched) => if let MatchedNonterminal(ref nt) = *cur_matched {
                         match **nt {
                             // sidestep the interpolation tricks for ident because
                             // (a) idents can be in lots of places, so it'd be a pain
                             // (b) we actually can, since it's a token.
                             NtIdent(ref sn) => {
-                                result.push(TokenTree::Token(sn.span, token::Ident(sn.node)));
+                                let token = TokenTree::Token(sn.span, token::Ident(sn.node));
+                                result.push(token.into());
                             }
-                            NtTT(ref tt) => result.push(tt.clone()),
+                            NtTT(ref tt) => result.push(tt.clone().into()),
                             _ => {
-                                // FIXME(pcwalton): Bad copy
-                                result.push(TokenTree::Token(sp, token::Interpolated(nt.clone())));
+                                let token = TokenTree::Token(sp, token::Interpolated(nt.clone()));
+                                result.push(token.into());
                             }
                         }
                     } else {
@@ -174,7 +178,7 @@ pub fn transcribe(sp_diag: &Handler,
                 stack.push(Frame::Delimited { forest: delimited, idx: 0, span: span });
                 result_stack.push(mem::replace(&mut result, Vec::new()));
             }
-            quoted::TokenTree::Token(span, tok) => result.push(TokenTree::Token(span, tok)),
+            quoted::TokenTree::Token(span, tok) => result.push(TokenTree::Token(span, tok).into()),
             quoted::TokenTree::MetaVarDecl(..) => panic!("unexpected `TokenTree::MetaVarDecl"),
         }
     }
index 257b7efba5c8e8d2f49cf96eef59d93a56a125f1..4242b0f8b9803d17df7b06e2c18b55987a1ce671 100644 (file)
@@ -233,11 +233,11 @@ fn fold_ty_params(&mut self, tps: Vec<TyParam>) -> Vec<TyParam> {
         noop_fold_ty_params(tps, self)
     }
 
-    fn fold_tt(&mut self, tt: &TokenTree) -> TokenTree {
+    fn fold_tt(&mut self, tt: TokenTree) -> TokenTree {
         noop_fold_tt(tt, self)
     }
 
-    fn fold_tts(&mut self, tts: &[TokenTree]) -> Vec<TokenTree> {
+    fn fold_tts(&mut self, tts: TokenStream) -> TokenStream {
         noop_fold_tts(tts, self)
     }
 
@@ -497,8 +497,8 @@ pub fn noop_fold_attribute<T: Folder>(attr: Attribute, fld: &mut T) -> Option<At
 pub fn noop_fold_mac<T: Folder>(Spanned {node, span}: Mac, fld: &mut T) -> Mac {
     Spanned {
         node: Mac_ {
+            tts: fld.fold_tts(node.stream()).into(),
             path: fld.fold_path(node.path),
-            tts: fld.fold_tts(&node.tts),
         },
         span: fld.new_span(span)
     }
@@ -539,23 +539,19 @@ pub fn noop_fold_arg<T: Folder>(Arg {id, pat, ty}: Arg, fld: &mut T) -> Arg {
     }
 }
 
-pub fn noop_fold_tt<T: Folder>(tt: &TokenTree, fld: &mut T) -> TokenTree {
-    match *tt {
-        TokenTree::Token(span, ref tok) =>
-            TokenTree::Token(fld.new_span(span), fld.fold_token(tok.clone())),
-        TokenTree::Delimited(span, ref delimed) => {
-            TokenTree::Delimited(fld.new_span(span), Rc::new(
-                            Delimited {
-                                delim: delimed.delim,
-                                tts: fld.fold_tts(&delimed.tts),
-                            }
-                        ))
-        },
+pub fn noop_fold_tt<T: Folder>(tt: TokenTree, fld: &mut T) -> TokenTree {
+    match tt {
+        TokenTree::Token(span, tok) =>
+            TokenTree::Token(fld.new_span(span), fld.fold_token(tok)),
+        TokenTree::Delimited(span, delimed) => TokenTree::Delimited(fld.new_span(span), Delimited {
+            tts: fld.fold_tts(delimed.stream()).into(),
+            delim: delimed.delim,
+        }),
     }
 }
 
-pub fn noop_fold_tts<T: Folder>(tts: &[TokenTree], fld: &mut T) -> Vec<TokenTree> {
-    tts.iter().map(|tt| fld.fold_tt(tt)).collect()
+pub fn noop_fold_tts<T: Folder>(tts: TokenStream, fld: &mut T) -> TokenStream {
+    tts.trees().map(|tt| fld.fold_tt(tt)).collect()
 }
 
 // apply ident folder if it's an ident, apply other folds to interpolated nodes
@@ -617,7 +613,7 @@ pub fn noop_fold_interpolated<T: Folder>(nt: token::Nonterminal, fld: &mut T)
         token::NtIdent(id) => token::NtIdent(Spanned::<Ident>{node: fld.fold_ident(id.node), ..id}),
         token::NtMeta(meta_item) => token::NtMeta(fld.fold_meta_item(meta_item)),
         token::NtPath(path) => token::NtPath(fld.fold_path(path)),
-        token::NtTT(tt) => token::NtTT(fld.fold_tt(&tt)),
+        token::NtTT(tt) => token::NtTT(fld.fold_tt(tt)),
         token::NtArm(arm) => token::NtArm(fld.fold_arm(arm)),
         token::NtImplItem(item) =>
             token::NtImplItem(fld.fold_impl_item(item)
index eafc3f77ab05227bbe40a6f20ab9eec74f559371..554a1fcfc71a6beb3497510e05c3a4aba10160a8 100644 (file)
 use parse::lexer::StringReader;
 use parse::{token, PResult};
 use syntax_pos::Span;
-use tokenstream::{Delimited, TokenTree};
-
-use std::rc::Rc;
+use tokenstream::{Delimited, TokenStream, TokenTree};
 
 impl<'a> StringReader<'a> {
     // Parse a stream of tokens into a list of `TokenTree`s, up to an `Eof`.
-    pub fn parse_all_token_trees(&mut self) -> PResult<'a, Vec<TokenTree>> {
+    pub fn parse_all_token_trees(&mut self) -> PResult<'a, TokenStream> {
         let mut tts = Vec::new();
         while self.token != token::Eof {
-            tts.push(self.parse_token_tree()?);
+            tts.push(self.parse_token_tree()?.into());
         }
-        Ok(tts)
+        Ok(TokenStream::concat(tts))
     }
 
     // Parse a stream of tokens into a list of `TokenTree`s, up to a `CloseDelim`.
-    fn parse_token_trees_until_close_delim(&mut self) -> Vec<TokenTree> {
+    fn parse_token_trees_until_close_delim(&mut self) -> TokenStream {
         let mut tts = vec![];
         loop {
             if let token::CloseDelim(..) = self.token {
-                return tts;
+                return TokenStream::concat(tts);
             }
             match self.parse_token_tree() {
-                Ok(tt) => tts.push(tt),
+                Ok(tt) => tts.push(tt.into()),
                 Err(mut e) => {
                     e.emit();
-                    return tts;
+                    return TokenStream::concat(tts);
                 }
             }
         }
@@ -111,10 +109,10 @@ fn parse_token_tree(&mut self) -> PResult<'a, TokenTree> {
                     _ => {}
                 }
 
-                Ok(TokenTree::Delimited(span, Rc::new(Delimited {
+                Ok(TokenTree::Delimited(span, Delimited {
                     delim: delim,
-                    tts: tts,
-                })))
+                    tts: tts.into(),
+                }))
             },
             token::CloseDelim(_) => {
                 // An unexpected closing delimiter (i.e., there is no
index f783e32d621040f02fb38682dc06ec38a5f47b26..7207463e1b9ab0c81df1fb00319164817fe519b4 100644 (file)
@@ -19,7 +19,7 @@
 use ptr::P;
 use str::char_at;
 use symbol::Symbol;
-use tokenstream;
+use tokenstream::{TokenStream, TokenTree};
 
 use std::cell::RefCell;
 use std::collections::HashSet;
@@ -141,9 +141,9 @@ pub fn parse_stmt_from_source_str<'a>(name: String, source: String, sess: &'a Pa
     new_parser_from_source_str(sess, name, source).parse_stmt()
 }
 
-pub fn parse_tts_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
-                                     -> Vec<tokenstream::TokenTree> {
-    filemap_to_tts(sess, sess.codemap().new_filemap(name, None, source))
+pub fn parse_stream_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
+                                        -> TokenStream {
+    filemap_to_stream(sess, sess.codemap().new_filemap(name, None, source))
 }
 
 // Create a new parser from a source string
@@ -175,7 +175,7 @@ pub fn new_sub_parser_from_file<'a>(sess: &'a ParseSess,
 /// Given a filemap and config, return a parser
 pub fn filemap_to_parser<'a>(sess: &'a ParseSess, filemap: Rc<FileMap>, ) -> Parser<'a> {
     let end_pos = filemap.end_pos;
-    let mut parser = tts_to_parser(sess, filemap_to_tts(sess, filemap));
+    let mut parser = stream_to_parser(sess, filemap_to_stream(sess, filemap));
 
     if parser.token == token::Eof && parser.span == syntax_pos::DUMMY_SP {
         parser.span = syntax_pos::mk_sp(end_pos, end_pos);
@@ -186,13 +186,8 @@ pub fn filemap_to_parser<'a>(sess: &'a ParseSess, filemap: Rc<FileMap>, ) -> Par
 
 // must preserve old name for now, because quote! from the *existing*
 // compiler expands into it
-pub fn new_parser_from_tts<'a>(sess: &'a ParseSess, tts: Vec<tokenstream::TokenTree>)
-                               -> Parser<'a> {
-    tts_to_parser(sess, tts)
-}
-
-pub fn new_parser_from_ts<'a>(sess: &'a ParseSess, ts: tokenstream::TokenStream) -> Parser<'a> {
-    tts_to_parser(sess, ts.into_trees().collect())
+pub fn new_parser_from_tts<'a>(sess: &'a ParseSess, tts: Vec<TokenTree>) -> Parser<'a> {
+    stream_to_parser(sess, tts.into_iter().collect())
 }
 
 
@@ -215,15 +210,15 @@ fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option<Span>)
 }
 
 /// Given a filemap, produce a sequence of token-trees
-pub fn filemap_to_tts(sess: &ParseSess, filemap: Rc<FileMap>) -> Vec<tokenstream::TokenTree> {
+pub fn filemap_to_stream(sess: &ParseSess, filemap: Rc<FileMap>) -> TokenStream {
     let mut srdr = lexer::StringReader::new(sess, filemap);
     srdr.real_token();
     panictry!(srdr.parse_all_token_trees())
 }
 
-/// Given tts and the ParseSess, produce a parser
-pub fn tts_to_parser<'a>(sess: &'a ParseSess, tts: Vec<tokenstream::TokenTree>) -> Parser<'a> {
-    let mut p = Parser::new(sess, tts, None, false);
+/// Given stream and the ParseSess, produce a parser
+pub fn stream_to_parser<'a>(sess: &'a ParseSess, stream: TokenStream) -> Parser<'a> {
+    let mut p = Parser::new(sess, stream, None, false);
     p.check_unknown_macro_variable();
     p
 }
@@ -660,7 +655,7 @@ fn sp(a: u32, b: u32) -> Span {
     #[test]
     fn string_to_tts_macro () {
         let tts = string_to_tts("macro_rules! zip (($a)=>($a))".to_string());
-        let tts: &[tokenstream::TokenTree] = &tts[..];
+        let tts: &[TokenTree] = &tts[..];
 
         match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) {
             (
index b12b0c03267010e30b2050a101451934bc551f7c..c88b859e036d45c88ffbb31ab5feb24b6c8ec9d4 100644 (file)
@@ -53,7 +53,7 @@
 use print::pprust;
 use ptr::P;
 use parse::PResult;
-use tokenstream::{self, Delimited, TokenTree, TokenStream};
+use tokenstream::{self, Delimited, ThinTokenStream, TokenTree, TokenStream};
 use symbol::{Symbol, keywords};
 use util::ThinVec;
 
@@ -200,7 +200,7 @@ fn new(sp: Span, delimited: &Delimited) -> Self {
             delim: delimited.delim,
             span: sp,
             open_delim: delimited.delim == token::NoDelim,
-            tree_cursor: delimited.tts.iter().cloned().collect::<TokenStream>().into_trees(),
+            tree_cursor: delimited.stream().into_trees(),
             close_delim: delimited.delim == token::NoDelim,
         }
     }
@@ -211,12 +211,14 @@ fn next(&mut self) -> TokenAndSpan {
         loop {
             let tree = if !self.frame.open_delim {
                 self.frame.open_delim = true;
-                Delimited { delim: self.frame.delim, tts: Vec::new() }.open_tt(self.frame.span)
+                Delimited { delim: self.frame.delim, tts: TokenStream::empty().into() }
+                    .open_tt(self.frame.span)
             } else if let Some(tree) = self.frame.tree_cursor.next() {
                 tree
             } else if !self.frame.close_delim {
                 self.frame.close_delim = true;
-                Delimited { delim: self.frame.delim, tts: Vec::new() }.close_tt(self.frame.span)
+                Delimited { delim: self.frame.delim, tts: TokenStream::empty().into() }
+                    .close_tt(self.frame.span)
             } else if let Some(frame) = self.stack.pop() {
                 self.frame = frame;
                 continue
@@ -255,21 +257,23 @@ fn next_desugared(&mut self) -> TokenAndSpan {
             num_of_hashes = cmp::max(num_of_hashes, count);
         }
 
-        let body = TokenTree::Delimited(sp, Rc::new(Delimited {
+        let body = TokenTree::Delimited(sp, Delimited {
             delim: token::Bracket,
-            tts: vec![TokenTree::Token(sp, token::Ident(ast::Ident::from_str("doc"))),
-                      TokenTree::Token(sp, token::Eq),
-                      TokenTree::Token(sp, token::Literal(
-                          token::StrRaw(Symbol::intern(&stripped), num_of_hashes), None))],
-        }));
+            tts: [TokenTree::Token(sp, token::Ident(ast::Ident::from_str("doc"))),
+                  TokenTree::Token(sp, token::Eq),
+                  TokenTree::Token(sp, token::Literal(
+                      token::StrRaw(Symbol::intern(&stripped), num_of_hashes), None))]
+                .iter().cloned().collect::<TokenStream>().into(),
+        });
 
         self.stack.push(mem::replace(&mut self.frame, TokenCursorFrame::new(sp, &Delimited {
             delim: token::NoDelim,
             tts: if doc_comment_style(&name.as_str()) == AttrStyle::Inner {
                 [TokenTree::Token(sp, token::Pound), TokenTree::Token(sp, token::Not), body]
-                    .iter().cloned().collect()
+                    .iter().cloned().collect::<TokenStream>().into()
             } else {
-                [TokenTree::Token(sp, token::Pound), body].iter().cloned().collect()
+                [TokenTree::Token(sp, token::Pound), body]
+                    .iter().cloned().collect::<TokenStream>().into()
             },
         })));
 
@@ -405,7 +409,7 @@ fn from(expr: P<Expr>) -> Self {
 
 impl<'a> Parser<'a> {
     pub fn new(sess: &'a ParseSess,
-               tokens: Vec<TokenTree>,
+               tokens: TokenStream,
                directory: Option<Directory>,
                desugar_doc_comments: bool)
                -> Self {
@@ -423,7 +427,7 @@ pub fn new(sess: &'a ParseSess,
             token_cursor: TokenCursor {
                 frame: TokenCursorFrame::new(syntax_pos::DUMMY_SP, &Delimited {
                     delim: token::NoDelim,
-                    tts: tokens,
+                    tts: tokens.into(),
                 }),
                 stack: Vec::new(),
             },
@@ -2098,10 +2102,10 @@ pub fn mk_lit_u32(&mut self, i: u32, attrs: ThinVec<Attribute>) -> P<Expr> {
         })
     }
 
-    fn expect_delimited_token_tree(&mut self) -> PResult<'a, (token::DelimToken, Vec<TokenTree>)> {
+    fn expect_delimited_token_tree(&mut self) -> PResult<'a, (token::DelimToken, ThinTokenStream)> {
         match self.token {
             token::OpenDelim(delim) => self.parse_token_tree().map(|tree| match tree {
-                TokenTree::Delimited(_, delimited) => (delim, delimited.tts.clone()),
+                TokenTree::Delimited(_, delimited) => (delim, delimited.stream().into()),
                 _ => unreachable!(),
             }),
             _ => Err(self.fatal("expected open delimiter")),
@@ -2649,10 +2653,10 @@ pub fn parse_token_tree(&mut self) -> PResult<'a, TokenTree> {
                                          self.token_cursor.stack.pop().unwrap());
                 self.span = frame.span;
                 self.bump();
-                return Ok(TokenTree::Delimited(frame.span, Rc::new(Delimited {
+                return Ok(TokenTree::Delimited(frame.span, Delimited {
                     delim: frame.delim,
-                    tts: frame.tree_cursor.original_stream().trees().collect(),
-                })));
+                    tts: frame.tree_cursor.original_stream().into(),
+                }));
             },
             token::CloseDelim(_) | token::Eof => unreachable!(),
             _ => Ok(TokenTree::Token(self.span, self.bump_and_get())),
index ec962d03458d1753c905e7cffd90269efb092f7d..53ef8e8dfa49c3c225a8abfacd59827f6c1da906 100644 (file)
@@ -286,7 +286,7 @@ pub fn token_to_string(tok: &Token) -> String {
             token::NtStmt(ref e)        => stmt_to_string(&e),
             token::NtPat(ref e)         => pat_to_string(&e),
             token::NtIdent(ref e)       => ident_to_string(e.node),
-            token::NtTT(ref e)          => tt_to_string(&e),
+            token::NtTT(ref tree)       => tt_to_string(tree.clone()),
             token::NtArm(ref e)         => arm_to_string(&e),
             token::NtImplItem(ref e)    => impl_item_to_string(&e),
             token::NtTraitItem(ref e)   => trait_item_to_string(&e),
@@ -321,12 +321,12 @@ pub fn lifetime_to_string(e: &ast::Lifetime) -> String {
     to_string(|s| s.print_lifetime(e))
 }
 
-pub fn tt_to_string(tt: &tokenstream::TokenTree) -> String {
+pub fn tt_to_string(tt: tokenstream::TokenTree) -> String {
     to_string(|s| s.print_tt(tt))
 }
 
 pub fn tts_to_string(tts: &[tokenstream::TokenTree]) -> String {
-    to_string(|s| s.print_tts(tts))
+    to_string(|s| s.print_tts(tts.iter().cloned().collect()))
 }
 
 pub fn stmt_to_string(stmt: &ast::Stmt) -> String {
@@ -1324,7 +1324,7 @@ pub fn print_item(&mut self, item: &ast::Item) -> io::Result<()> {
                 self.print_ident(item.ident)?;
                 self.cbox(INDENT_UNIT)?;
                 self.popen()?;
-                self.print_tts(&node.tts[..])?;
+                self.print_tts(node.stream())?;
                 self.pclose()?;
                 word(&mut self.s, ";")?;
                 self.end()?;
@@ -1456,8 +1456,8 @@ pub fn print_struct(&mut self,
     /// appropriate macro, transcribe back into the grammar we just parsed from,
     /// and then pretty-print the resulting AST nodes (so, e.g., we print
     /// expression arguments as expressions). It can be done! I think.
-    pub fn print_tt(&mut self, tt: &tokenstream::TokenTree) -> io::Result<()> {
-        match *tt {
+    pub fn print_tt(&mut self, tt: tokenstream::TokenTree) -> io::Result<()> {
+        match tt {
             TokenTree::Token(_, ref tk) => {
                 word(&mut self.s, &token_to_string(tk))?;
                 match *tk {
@@ -1470,16 +1470,16 @@ pub fn print_tt(&mut self, tt: &tokenstream::TokenTree) -> io::Result<()> {
             TokenTree::Delimited(_, ref delimed) => {
                 word(&mut self.s, &token_to_string(&delimed.open_token()))?;
                 space(&mut self.s)?;
-                self.print_tts(&delimed.tts)?;
+                self.print_tts(delimed.stream())?;
                 space(&mut self.s)?;
                 word(&mut self.s, &token_to_string(&delimed.close_token()))
             },
         }
     }
 
-    pub fn print_tts(&mut self, tts: &[tokenstream::TokenTree]) -> io::Result<()> {
+    pub fn print_tts(&mut self, tts: tokenstream::TokenStream) -> io::Result<()> {
         self.ibox(0)?;
-        for (i, tt) in tts.iter().enumerate() {
+        for (i, tt) in tts.into_trees().enumerate() {
             if i != 0 {
                 space(&mut self.s)?;
             }
@@ -1550,7 +1550,7 @@ pub fn print_trait_item(&mut self, ti: &ast::TraitItem)
                 word(&mut self.s, "! ")?;
                 self.cbox(INDENT_UNIT)?;
                 self.popen()?;
-                self.print_tts(&node.tts[..])?;
+                self.print_tts(node.stream())?;
                 self.pclose()?;
                 word(&mut self.s, ";")?;
                 self.end()?
@@ -1586,7 +1586,7 @@ pub fn print_impl_item(&mut self, ii: &ast::ImplItem) -> io::Result<()> {
                 word(&mut self.s, "! ")?;
                 self.cbox(INDENT_UNIT)?;
                 self.popen()?;
-                self.print_tts(&node.tts[..])?;
+                self.print_tts(node.stream())?;
                 self.pclose()?;
                 word(&mut self.s, ";")?;
                 self.end()?
@@ -1779,7 +1779,7 @@ pub fn print_mac(&mut self, m: &ast::Mac, delim: token::DelimToken)
             }
             token::NoDelim => {}
         }
-        self.print_tts(&m.node.tts)?;
+        self.print_tts(m.node.stream())?;
         match delim {
             token::Paren => self.pclose(),
             token::Bracket => word(&mut self.s, "]"),
index 083435a04336201542a00469119d5fde4846c3e6..b7728609acaa533a5a74e419520b8400ce884776 100644 (file)
 //! and a borrowed TokenStream is sufficient to build an owned TokenStream without taking
 //! ownership of the original.
 
-use ast::{self, LitKind};
 use syntax_pos::{BytePos, Span, DUMMY_SP};
-use codemap::Spanned;
 use ext::base;
 use ext::tt::{macro_parser, quoted};
-use parse::{self, Directory};
-use parse::token::{self, Token, Lit};
+use parse::Directory;
+use parse::token::{self, Token};
 use print::pprust;
 use serialize::{Decoder, Decodable, Encoder, Encodable};
-use symbol::Symbol;
 use util::RcSlice;
 
 use std::{fmt, iter, mem};
-use std::rc::Rc;
+use std::hash::{self, Hash};
 
 /// A delimited sequence of token trees
 #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
@@ -43,7 +40,7 @@ pub struct Delimited {
     /// The type of delimiter
     pub delim: token::DelimToken,
     /// The delimited sequence of token trees
-    pub tts: Vec<TokenTree>,
+    pub tts: ThinTokenStream,
 }
 
 impl Delimited {
@@ -76,8 +73,8 @@ pub fn close_tt(&self, span: Span) -> TokenTree {
     }
 
     /// Returns the token trees inside the delimiters.
-    pub fn subtrees(&self) -> &[TokenTree] {
-        &self.tts
+    pub fn stream(&self) -> TokenStream {
+        self.tts.clone().into()
     }
 }
 
@@ -98,19 +95,19 @@ pub enum TokenTree {
     /// A single token
     Token(Span, token::Token),
     /// A delimited sequence of token trees
-    Delimited(Span, Rc<Delimited>),
+    Delimited(Span, Delimited),
 }
 
 impl TokenTree {
     /// Use this token tree as a matcher to parse given tts.
-    pub fn parse(cx: &base::ExtCtxt, mtch: &[quoted::TokenTree], tts: &[TokenTree])
+    pub fn parse(cx: &base::ExtCtxt, mtch: &[quoted::TokenTree], tts: TokenStream)
                  -> macro_parser::NamedParseResult {
         // `None` is because we're not interpolating
         let directory = Directory {
             path: cx.current_expansion.module.directory.clone(),
             ownership: cx.current_expansion.directory_ownership,
         };
-        macro_parser::parse(cx.parse_sess(), tts.iter().cloned().collect(), mtch, Some(directory))
+        macro_parser::parse(cx.parse_sess(), tts, mtch, Some(directory))
     }
 
     /// Check if this TokenTree is equal to the other, regardless of span information.
@@ -118,15 +115,8 @@ pub fn eq_unspanned(&self, other: &TokenTree) -> bool {
         match (self, other) {
             (&TokenTree::Token(_, ref tk), &TokenTree::Token(_, ref tk2)) => tk == tk2,
             (&TokenTree::Delimited(_, ref dl), &TokenTree::Delimited(_, ref dl2)) => {
-                (*dl).delim == (*dl2).delim && dl.tts.len() == dl2.tts.len() &&
-                {
-                    for (tt1, tt2) in dl.tts.iter().zip(dl2.tts.iter()) {
-                        if !tt1.eq_unspanned(tt2) {
-                            return false;
-                        }
-                    }
-                    true
-                }
+                dl.delim == dl2.delim &&
+                dl.stream().trees().zip(dl2.stream().trees()).all(|(tt, tt2)| tt.eq_unspanned(&tt2))
             }
             (_, _) => false,
         }
@@ -146,64 +136,6 @@ pub fn eq_token(&self, t: Token) -> bool {
             _ => false,
         }
     }
-
-    /// Indicates if the token is an identifier.
-    pub fn is_ident(&self) -> bool {
-        self.maybe_ident().is_some()
-    }
-
-    /// Returns an identifier.
-    pub fn maybe_ident(&self) -> Option<ast::Ident> {
-        match *self {
-            TokenTree::Token(_, Token::Ident(t)) => Some(t.clone()),
-            TokenTree::Delimited(_, ref dl) => {
-                let tts = dl.subtrees();
-                if tts.len() != 1 {
-                    return None;
-                }
-                tts[0].maybe_ident()
-            }
-            _ => None,
-        }
-    }
-
-    /// Returns a Token literal.
-    pub fn maybe_lit(&self) -> Option<token::Lit> {
-        match *self {
-            TokenTree::Token(_, Token::Literal(l, _)) => Some(l.clone()),
-            TokenTree::Delimited(_, ref dl) => {
-                let tts = dl.subtrees();
-                if tts.len() != 1 {
-                    return None;
-                }
-                tts[0].maybe_lit()
-            }
-            _ => None,
-        }
-    }
-
-    /// Returns an AST string literal.
-    pub fn maybe_str(&self) -> Option<ast::Lit> {
-        match *self {
-            TokenTree::Token(sp, Token::Literal(Lit::Str_(s), _)) => {
-                let l = LitKind::Str(Symbol::intern(&parse::str_lit(&s.as_str())),
-                                     ast::StrStyle::Cooked);
-                Some(Spanned {
-                    node: l,
-                    span: sp,
-                })
-            }
-            TokenTree::Token(sp, Token::Literal(Lit::StrRaw(s, n), _)) => {
-                let l = LitKind::Str(Symbol::intern(&parse::raw_str_lit(&s.as_str())),
-                                     ast::StrStyle::Raw(n));
-                Some(Spanned {
-                    node: l,
-                    span: sp,
-                })
-            }
-            _ => None,
-        }
-    }
 }
 
 /// # Token Streams
@@ -396,6 +328,36 @@ fn look_ahead(streams: &[TokenStream], mut n: usize) -> Result<TokenTree, usize>
     }
 }
 
+/// The `TokenStream` type is large enough to represent a single `TokenTree` without allocation.
+/// `ThinTokenStream` is smaller, but needs to allocate to represent a single `TokenTree`.
+/// We must use `ThinTokenStream` in `TokenTree::Delimited` to avoid infinite size due to recursion.
+#[derive(Debug, Clone)]
+pub struct ThinTokenStream(Option<RcSlice<TokenStream>>);
+
+impl From<TokenStream> for ThinTokenStream {
+    fn from(stream: TokenStream) -> ThinTokenStream {
+        ThinTokenStream(match stream.kind {
+            TokenStreamKind::Empty => None,
+            TokenStreamKind::Tree(tree) => Some(RcSlice::new(vec![tree.into()])),
+            TokenStreamKind::Stream(stream) => Some(stream),
+        })
+    }
+}
+
+impl From<ThinTokenStream> for TokenStream {
+    fn from(stream: ThinTokenStream) -> TokenStream {
+        stream.0.map(TokenStream::concat_rc_slice).unwrap_or_else(TokenStream::empty)
+    }
+}
+
+impl Eq for ThinTokenStream {}
+
+impl PartialEq<ThinTokenStream> for ThinTokenStream {
+    fn eq(&self, other: &ThinTokenStream) -> bool {
+        TokenStream::from(self.clone()) == TokenStream::from(other.clone())
+    }
+}
+
 impl fmt::Display for TokenStream {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         f.write_str(&pprust::tts_to_string(&self.trees().collect::<Vec<_>>()))
@@ -414,6 +376,32 @@ fn decode<D: Decoder>(decoder: &mut D) -> Result<TokenStream, D::Error> {
     }
 }
 
+impl Hash for TokenStream {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) {
+        for tree in self.trees() {
+            tree.hash(state);
+        }
+    }
+}
+
+impl Encodable for ThinTokenStream {
+    fn encode<E: Encoder>(&self, encoder: &mut E) -> Result<(), E::Error> {
+        TokenStream::from(self.clone()).encode(encoder)
+    }
+}
+
+impl Decodable for ThinTokenStream {
+    fn decode<D: Decoder>(decoder: &mut D) -> Result<ThinTokenStream, D::Error> {
+        TokenStream::decode(decoder).map(Into::into)
+    }
+}
+
+impl Hash for ThinTokenStream {
+    fn hash<H: hash::Hasher>(&self, state: &mut H) {
+        TokenStream::from(self.clone()).hash(state);
+    }
+}
+
 
 #[cfg(test)]
 mod tests {
index cb3becf83f6828e7ab747fe982b2fdf225cc0ede..195fb23f9d8c75ed8b8c3f0e4ce1042f207b259b 100644 (file)
@@ -8,7 +8,6 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-use std::hash::{self, Hash};
 use std::fmt;
 use std::ops::Deref;
 use std::rc::Rc;
@@ -37,12 +36,6 @@ fn deref(&self) -> &[T] {
     }
 }
 
-impl<T: Hash> Hash for RcSlice<T> {
-    fn hash<H: hash::Hasher>(&self, state: &mut H) {
-        self.deref().hash(state);
-    }
-}
-
 impl<T: fmt::Debug> fmt::Debug for RcSlice<T> {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         fmt::Debug::fmt(self.deref(), f)
index a5e083f926a07ad97184355a2604f2c73ee4726a..767ec94a0ce61955123341be14a3ee161eea9739 100644 (file)
@@ -107,7 +107,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt,
                 if p2.token != token::Eof {
                     let mut extra_tts = panictry!(p2.parse_all_token_trees());
                     extra_tts.extend(tts[first_colon..].iter().cloned());
-                    p = parse::tts_to_parser(cx.parse_sess, extra_tts);
+                    p = parse::stream_to_parser(cx.parse_sess, extra_tts.into_iter().collect());
                 }
 
                 asm = s;