]> git.lizzy.rs Git - rust.git/blobdiff - crates/mbe/src/syntax_bridge.rs
Reverse fixups
[rust.git] / crates / mbe / src / syntax_bridge.rs
index 037881cef6e051d549c996f87dc61b4b5db3db57..d3489813e175e6742405f63840b3d978fd14d538 100644 (file)
@@ -1,39 +1,42 @@
 //! Conversions between [`SyntaxNode`] and [`tt::TokenTree`].
 
-use parser::{ParseError, TreeSink};
 use rustc_hash::{FxHashMap, FxHashSet};
+use stdx::{always, non_empty_vec::NonEmptyVec};
 use syntax::{
     ast::{self, make::tokens::doc_comment},
-    tokenize, AstToken, Parse, PreorderWithTokens, SmolStr, SyntaxElement, SyntaxKind,
+    AstToken, Parse, PreorderWithTokens, SmolStr, SyntaxElement, SyntaxKind,
     SyntaxKind::*,
-    SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextSize, Token as RawToken, WalkEvent,
-    T,
+    SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextSize, WalkEvent, T,
 };
 use tt::buffer::{Cursor, TokenBuffer};
 
-use crate::{
-    subtree_source::SubtreeTokenSource, tt_iter::TtIter, ExpandError, ParserEntryPoint, TokenMap,
-};
+use crate::{to_parser_input::to_parser_input, tt_iter::TtIter, TokenMap};
 
 /// Convert the syntax node to a `TokenTree` (what macro
 /// will consume).
 pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> (tt::Subtree, TokenMap) {
-    syntax_node_to_token_tree_censored(node, &Default::default())
+    syntax_node_to_token_tree_censored(node, Default::default(), Default::default())
 }
 
+// TODO rename
 /// Convert the syntax node to a `TokenTree` (what macro will consume)
 /// with the censored range excluded.
 pub fn syntax_node_to_token_tree_censored(
     node: &SyntaxNode,
-    censor: &FxHashSet<SyntaxNode>,
+    replace: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
+    append: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
 ) -> (tt::Subtree, TokenMap) {
     let global_offset = node.text_range().start();
-    let mut c = Convertor::new(node, global_offset, censor);
+    let mut c = Convertor::new(node, global_offset, replace, append);
     let subtree = convert_tokens(&mut c);
     c.id_alloc.map.shrink_to_fit();
+    always!(c.replace.is_empty());
+    always!(c.append.is_empty());
     (subtree, c.id_alloc.map)
 }
 
+pub type SyntheticToken = (SyntaxKind, SmolStr);
+
 // The following items are what `rustc` macro can be parsed into :
 // link: https://github.com/rust-lang/rust/blob/9ebf47851a357faa4cd97f4b1dc7835f6376e639/src/libsyntax/ext/expand.rs#L141
 // * Expr(P<ast::Expr>)                     -> token_tree_to_expr
@@ -48,36 +51,41 @@ pub fn syntax_node_to_token_tree_censored(
 
 pub fn token_tree_to_syntax_node(
     tt: &tt::Subtree,
-    entry_point: ParserEntryPoint,
-) -> Result<(Parse<SyntaxNode>, TokenMap), ExpandError> {
+    entry_point: parser::TopEntryPoint,
+) -> (Parse<SyntaxNode>, TokenMap) {
     let buffer = match tt {
         tt::Subtree { delimiter: None, token_trees } => {
             TokenBuffer::from_tokens(token_trees.as_slice())
         }
         _ => TokenBuffer::from_subtree(tt),
     };
-    let mut token_source = SubtreeTokenSource::new(&buffer);
+    let parser_input = to_parser_input(&buffer);
+    let parser_output = entry_point.parse(&parser_input);
     let mut tree_sink = TtTreeSink::new(buffer.begin());
-    parser::parse(&mut token_source, &mut tree_sink, entry_point);
-    if tree_sink.roots.len() != 1 {
-        return Err(ExpandError::ConversionError);
+    for event in parser_output.iter() {
+        match event {
+            parser::Step::Token { kind, n_input_tokens: n_raw_tokens } => {
+                tree_sink.token(kind, n_raw_tokens)
+            }
+            parser::Step::Enter { kind } => tree_sink.start_node(kind),
+            parser::Step::Exit => tree_sink.finish_node(),
+            parser::Step::Error { msg } => tree_sink.error(msg.to_string()),
+        }
     }
-    //FIXME: would be cool to report errors
     let (parse, range_map) = tree_sink.finish();
-    Ok((parse, range_map))
+    (parse, range_map)
 }
 
 /// Convert a string to a `TokenTree`
 pub fn parse_to_token_tree(text: &str) -> Option<(tt::Subtree, TokenMap)> {
-    let (tokens, errors) = tokenize(text);
-    if !errors.is_empty() {
+    let lexed = parser::LexedStr::new(text);
+    if lexed.errors().next().is_some() {
         return None;
     }
 
     let mut conv = RawConvertor {
-        text,
-        offset: TextSize::default(),
-        inner: tokens.iter(),
+        lexed,
+        pos: 0,
         id_alloc: TokenIdAlloc {
             map: Default::default(),
             global_offset: TextSize::default(),
@@ -99,7 +107,7 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec<tt::Subtree> {
     let mut res = Vec::new();
 
     while iter.peek_n(0).is_some() {
-        let expanded = iter.expect_fragment(ParserEntryPoint::Expr);
+        let expanded = iter.expect_fragment(parser::PrefixEntryPoint::Expr);
 
         res.push(match expanded.value {
             None => break,
@@ -136,25 +144,26 @@ struct StackEntry {
         idx: !0,
         open_range: TextRange::empty(TextSize::of('.')),
     };
-    let mut stack = vec![entry];
+    let mut stack = NonEmptyVec::new(entry);
 
     loop {
-        let entry = stack.last_mut().unwrap();
-        let result = &mut entry.subtree.token_trees;
+        let StackEntry { subtree, .. } = stack.last_mut();
+        let result = &mut subtree.token_trees;
         let (token, range) = match conv.bump() {
-            None => break,
             Some(it) => it,
+            None => break,
         };
 
-        let k: SyntaxKind = token.kind();
-        if k == COMMENT {
+        let kind = token.kind(&conv);
+        if kind == COMMENT {
             if let Some(tokens) = conv.convert_doc_comment(&token) {
                 // FIXME: There has to be a better way to do this
                 // Add the comments token id to the converted doc string
                 let id = conv.id_alloc().alloc(range);
                 result.extend(tokens.into_iter().map(|mut tt| {
                     if let tt::TokenTree::Subtree(sub) = &mut tt {
-                        if let tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) = &mut sub.token_trees[2]
+                        if let Some(tt::TokenTree::Leaf(tt::Leaf::Literal(lit))) =
+                            sub.token_trees.get_mut(2)
                         {
                             lit.id = id
                         }
@@ -164,26 +173,26 @@ struct StackEntry {
             }
             continue;
         }
-
-        result.push(if k.is_punct() && k != UNDERSCORE {
+        let tt = if kind.is_punct() && kind != UNDERSCORE {
             assert_eq!(range.len(), TextSize::of('.'));
 
-            if let Some(delim) = entry.subtree.delimiter {
+            if let Some(delim) = subtree.delimiter {
                 let expected = match delim.kind {
                     tt::DelimiterKind::Parenthesis => T![')'],
                     tt::DelimiterKind::Brace => T!['}'],
                     tt::DelimiterKind::Bracket => T![']'],
                 };
 
-                if k == expected {
-                    let entry = stack.pop().unwrap();
-                    conv.id_alloc().close_delim(entry.idx, Some(range));
-                    stack.last_mut().unwrap().subtree.token_trees.push(entry.subtree.into());
+                if kind == expected {
+                    if let Some(entry) = stack.pop() {
+                        conv.id_alloc().close_delim(entry.idx, Some(range));
+                        stack.last_mut().subtree.token_trees.push(entry.subtree.into());
+                    }
                     continue;
                 }
             }
 
-            let delim = match k {
+            let delim = match kind {
                 T!['('] => Some(tt::DelimiterKind::Parenthesis),
                 T!['{'] => Some(tt::DelimiterKind::Brace),
                 T!['['] => Some(tt::DelimiterKind::Bracket),
@@ -196,36 +205,35 @@ struct StackEntry {
                 subtree.delimiter = Some(tt::Delimiter { id, kind });
                 stack.push(StackEntry { subtree, idx, open_range: range });
                 continue;
-            } else {
-                let spacing = match conv.peek() {
-                    Some(next)
-                        if next.kind().is_trivia()
-                            || next.kind() == T!['[']
-                            || next.kind() == T!['{']
-                            || next.kind() == T!['('] =>
-                    {
-                        tt::Spacing::Alone
-                    }
-                    Some(next) if next.kind().is_punct() && next.kind() != UNDERSCORE => {
-                        tt::Spacing::Joint
-                    }
-                    _ => tt::Spacing::Alone,
-                };
-                let char = match token.to_char() {
-                    Some(c) => c,
-                    None => {
-                        panic!("Token from lexer must be single char: token = {:#?}", token);
-                    }
-                };
-                tt::Leaf::from(tt::Punct { char, spacing, id: conv.id_alloc().alloc(range) }).into()
             }
+
+            let spacing = match conv.peek().map(|next| next.kind(&conv)) {
+                Some(kind)
+                    if !kind.is_trivia()
+                        && kind.is_punct()
+                        && kind != T!['[']
+                        && kind != T!['{']
+                        && kind != T!['(']
+                        && kind != UNDERSCORE =>
+                {
+                    tt::Spacing::Joint
+                }
+                _ => tt::Spacing::Alone,
+            };
+            let char = match token.to_char(&conv) {
+                Some(c) => c,
+                None => {
+                    panic!("Token from lexer must be single char: token = {:#?}", token);
+                }
+            };
+            tt::Leaf::from(tt::Punct { char, spacing, id: conv.id_alloc().alloc(range) }).into()
         } else {
             macro_rules! make_leaf {
                 ($i:ident) => {
-                    tt::$i { id: conv.id_alloc().alloc(range), text: token.to_text() }.into()
+                    tt::$i { id: conv.id_alloc().alloc(range), text: token.to_text(conv) }.into()
                 };
             }
-            let leaf: tt::Leaf = match k {
+            let leaf: tt::Leaf = match kind {
                 T![true] | T![false] => make_leaf!(Ident),
                 IDENT => make_leaf!(Ident),
                 UNDERSCORE => make_leaf!(Ident),
@@ -243,7 +251,7 @@ macro_rules! make_leaf {
 
                     let r = TextRange::at(range.start() + char_unit, range.len() - char_unit);
                     let ident = tt::Leaf::from(tt::Ident {
-                        text: SmolStr::new(&token.to_text()[1..]),
+                        text: SmolStr::new(&token.to_text(conv)[1..]),
                         id: conv.id_alloc().alloc(r),
                     });
                     result.push(ident.into());
@@ -253,15 +261,15 @@ macro_rules! make_leaf {
             };
 
             leaf.into()
-        });
+        };
+        result.push(tt);
     }
 
     // If we get here, we've consumed all input tokens.
     // We might have more than one subtree in the stack, if the delimiters are improperly balanced.
     // Merge them so we're left with one.
-    while stack.len() > 1 {
-        let entry = stack.pop().unwrap();
-        let parent = stack.last_mut().unwrap();
+    while let Some(entry) = stack.pop() {
+        let parent = stack.last_mut();
 
         conv.id_alloc().close_delim(entry.idx, None);
         let leaf: tt::Leaf = tt::Punct {
@@ -278,13 +286,12 @@ macro_rules! make_leaf {
         parent.subtree.token_trees.extend(entry.subtree.token_trees);
     }
 
-    let subtree = stack.pop().unwrap().subtree;
-    if subtree.token_trees.len() == 1 {
-        if let tt::TokenTree::Subtree(first) = &subtree.token_trees[0] {
-            return first.clone();
-        }
+    let subtree = stack.into_last().subtree;
+    if let [tt::TokenTree::Subtree(first)] = &*subtree.token_trees {
+        first.clone()
+    } else {
+        subtree
     }
-    subtree
 }
 
 /// Returns the textual content of a doc comment block as a quoted string
@@ -307,6 +314,7 @@ fn doc_comment_text(comment: &ast::Comment) -> SmolStr {
 }
 
 fn convert_doc_comment(token: &syntax::SyntaxToken) -> Option<Vec<tt::TokenTree>> {
+    cov_mark::hit!(test_meta_doc_comments);
     let comment = ast::Comment::cast(token.clone())?;
     let doc = comment.kind().doc?;
 
@@ -314,7 +322,8 @@ fn convert_doc_comment(token: &syntax::SyntaxToken) -> Option<Vec<tt::TokenTree>
     let meta_tkns = vec![mk_ident("doc"), mk_punct('='), mk_doc_literal(&comment)];
 
     // Make `#![]`
-    let mut token_trees = vec![mk_punct('#')];
+    let mut token_trees = Vec::with_capacity(3);
+    token_trees.push(mk_punct('#'));
     if let ast::CommentPlacement::Inner = doc {
         token_trees.push(mk_punct('!'));
     }
@@ -389,24 +398,23 @@ fn close_delim(&mut self, idx: usize, close_abs_range: Option<TextRange>) {
     }
 }
 
-/// A Raw Token (straightly from lexer) convertor
+/// A raw token (straight from lexer) convertor
 struct RawConvertor<'a> {
-    text: &'a str,
-    offset: TextSize,
+    lexed: parser::LexedStr<'a>,
+    pos: usize,
     id_alloc: TokenIdAlloc,
-    inner: std::slice::Iter<'a, RawToken>,
 }
 
-trait SrcToken: std::fmt::Debug {
-    fn kind(&self) -> SyntaxKind;
+trait SrcToken<Ctx>: std::fmt::Debug {
+    fn kind(&self, ctx: &Ctx) -> SyntaxKind;
 
-    fn to_char(&self) -> Option<char>;
+    fn to_char(&self, ctx: &Ctx) -> Option<char>;
 
-    fn to_text(&self) -> SmolStr;
+    fn to_text(&self, ctx: &Ctx) -> SmolStr;
 }
 
-trait TokenConvertor {
-    type Token: SrcToken;
+trait TokenConvertor: Sized {
+    type Token: SrcToken<Self>;
 
     fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>>;
 
@@ -417,42 +425,45 @@ trait TokenConvertor {
     fn id_alloc(&mut self) -> &mut TokenIdAlloc;
 }
 
-impl<'a> SrcToken for (&'a RawToken, &'a str) {
-    fn kind(&self) -> SyntaxKind {
-        self.0.kind
+impl<'a> SrcToken<RawConvertor<'a>> for usize {
+    fn kind(&self, ctx: &RawConvertor<'a>) -> SyntaxKind {
+        ctx.lexed.kind(*self)
     }
 
-    fn to_char(&self) -> Option<char> {
-        self.1.chars().next()
+    fn to_char(&self, ctx: &RawConvertor<'a>) -> Option<char> {
+        ctx.lexed.text(*self).chars().next()
     }
 
-    fn to_text(&self) -> SmolStr {
-        self.1.into()
+    fn to_text(&self, ctx: &RawConvertor<'_>) -> SmolStr {
+        ctx.lexed.text(*self).into()
     }
 }
 
 impl<'a> TokenConvertor for RawConvertor<'a> {
-    type Token = (&'a RawToken, &'a str);
+    type Token = usize;
 
-    fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>> {
-        convert_doc_comment(&doc_comment(token.1))
+    fn convert_doc_comment(&self, &token: &usize) -> Option<Vec<tt::TokenTree>> {
+        let text = self.lexed.text(token);
+        convert_doc_comment(&doc_comment(text))
     }
 
     fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
-        let token = self.inner.next()?;
-        let range = TextRange::at(self.offset, token.len);
-        self.offset += token.len;
+        if self.pos == self.lexed.len() {
+            return None;
+        }
+        let token = self.pos;
+        self.pos += 1;
+        let range = self.lexed.text_range(token);
+        let range = TextRange::new(range.start.try_into().unwrap(), range.end.try_into().unwrap());
 
-        Some(((token, &self.text[range]), range))
+        Some((token, range))
     }
 
     fn peek(&self) -> Option<Self::Token> {
-        let token = self.inner.as_slice().get(0);
-
-        token.map(|it| {
-            let range = TextRange::at(self.offset, it.len);
-            (it, &self.text[range])
-        })
+        if self.pos == self.lexed.len() {
+            return None;
+        }
+        Some(self.pos)
     }
 
     fn id_alloc(&mut self) -> &mut TokenIdAlloc {
@@ -460,87 +471,122 @@ fn id_alloc(&mut self) -> &mut TokenIdAlloc {
     }
 }
 
-struct Convertor<'c> {
+struct Convertor {
     id_alloc: TokenIdAlloc,
     current: Option<SyntaxToken>,
+    current_synthetic: Vec<SyntheticToken>,
     preorder: PreorderWithTokens,
-    censor: &'c FxHashSet<SyntaxNode>,
+    replace: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
+    append: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
     range: TextRange,
     punct_offset: Option<(SyntaxToken, TextSize)>,
 }
 
-impl<'c> Convertor<'c> {
+impl Convertor {
     fn new(
         node: &SyntaxNode,
         global_offset: TextSize,
-        censor: &'c FxHashSet<SyntaxNode>,
-    ) -> Convertor<'c> {
+        mut replace: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
+        mut append: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
+    ) -> Convertor {
         let range = node.text_range();
         let mut preorder = node.preorder_with_tokens();
-        let first = Self::next_token(&mut preorder, censor);
+        let (first, synthetic) = Self::next_token(&mut preorder, &mut replace, &mut append);
         Convertor {
             id_alloc: { TokenIdAlloc { map: TokenMap::default(), global_offset, next_id: 0 } },
             current: first,
+            current_synthetic: synthetic,
             preorder,
             range,
-            censor,
+            replace,
+            append,
             punct_offset: None,
         }
     }
 
     fn next_token(
         preorder: &mut PreorderWithTokens,
-        censor: &FxHashSet<SyntaxNode>,
-    ) -> Option<SyntaxToken> {
+        replace: &mut FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
+        append: &mut FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
+    ) -> (Option<SyntaxToken>, Vec<SyntheticToken>) {
         while let Some(ev) = preorder.next() {
             let ele = match ev {
                 WalkEvent::Enter(ele) => ele,
+                WalkEvent::Leave(SyntaxElement::Node(node)) => {
+                    if let Some(mut v) = append.remove(&node) {
+                        eprintln!("after {:?}, appending {:?}", node, v);
+                        if !v.is_empty() {
+                            v.reverse();
+                            return (None, v);
+                        }
+                    }
+                    continue;
+                }
                 _ => continue,
             };
             match ele {
-                SyntaxElement::Token(t) => return Some(t),
-                SyntaxElement::Node(node) if censor.contains(&node) => preorder.skip_subtree(),
-                SyntaxElement::Node(_) => (),
+                SyntaxElement::Token(t) => return (Some(t), Vec::new()),
+                SyntaxElement::Node(node) => {
+                    if let Some(mut v) = replace.remove(&node) {
+                        preorder.skip_subtree();
+                        eprintln!("replacing {:?} by {:?}", node, v);
+                        if !v.is_empty() {
+                            v.reverse();
+                            return (None, v);
+                        }
+                    }
+                }
             }
         }
-        None
+        (None, Vec::new())
     }
 }
 
 #[derive(Debug)]
 enum SynToken {
     Ordinary(SyntaxToken),
+    // FIXME is this supposed to be `Punct`?
     Punch(SyntaxToken, TextSize),
+    Synthetic(SyntheticToken),
 }
 
 impl SynToken {
-    fn token(&self) -> &SyntaxToken {
+    fn token(&self) -> Option<&SyntaxToken> {
         match self {
-            SynToken::Ordinary(it) => it,
-            SynToken::Punch(it, _) => it,
+            SynToken::Ordinary(it) | SynToken::Punch(it, _) => Some(it),
+            SynToken::Synthetic(_) => None,
         }
     }
 }
 
-impl SrcToken for SynToken {
-    fn kind(&self) -> SyntaxKind {
-        self.token().kind()
+impl SrcToken<Convertor> for SynToken {
+    fn kind(&self, _ctx: &Convertor) -> SyntaxKind {
+        match self {
+            SynToken::Ordinary(token) => token.kind(),
+            SynToken::Punch(token, _) => token.kind(),
+            SynToken::Synthetic((kind, _)) => *kind,
+        }
     }
-    fn to_char(&self) -> Option<char> {
+    fn to_char(&self, _ctx: &Convertor) -> Option<char> {
         match self {
             SynToken::Ordinary(_) => None,
             SynToken::Punch(it, i) => it.text().chars().nth((*i).into()),
+            SynToken::Synthetic(_) => None,
         }
     }
-    fn to_text(&self) -> SmolStr {
-        self.token().text().into()
+    fn to_text(&self, _ctx: &Convertor) -> SmolStr {
+        match self {
+            SynToken::Ordinary(token) => token.text().into(),
+            SynToken::Punch(token, _) => token.text().into(),
+            SynToken::Synthetic((_, text)) => text.clone(),
+        }
     }
 }
 
-impl TokenConvertor for Convertor<'_> {
+impl TokenConvertor for Convertor {
     type Token = SynToken;
     fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>> {
-        convert_doc_comment(token.token())
+        convert_doc_comment(token.token()?)
     }
 
     fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
@@ -554,15 +600,29 @@ fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
             }
         }
 
+        if let Some(synth_token) = self.current_synthetic.pop() {
+            if self.current_synthetic.is_empty() {
+                let (new_current, new_synth) =
+                    Self::next_token(&mut self.preorder, &mut self.replace, &mut self.append);
+                self.current = new_current;
+                self.current_synthetic = new_synth;
+            }
+            // TODO fix range?
+            return Some((SynToken::Synthetic(synth_token), self.range));
+        }
+
         let curr = self.current.clone()?;
         if !&self.range.contains_range(curr.text_range()) {
             return None;
         }
-        self.current = Self::next_token(&mut self.preorder, self.censor);
+        let (new_current, new_synth) =
+            Self::next_token(&mut self.preorder, &mut self.replace, &mut self.append);
+        self.current = new_current;
+        self.current_synthetic = new_synth;
         let token = if curr.kind().is_punct() {
+            self.punct_offset = Some((curr.clone(), 0.into()));
             let range = curr.text_range();
             let range = TextRange::at(range.start(), TextSize::of('.'));
-            self.punct_offset = Some((curr.clone(), 0.into()));
             (SynToken::Punch(curr, 0.into()), range)
         } else {
             self.punct_offset = None;
@@ -581,6 +641,11 @@ fn peek(&self) -> Option<Self::Token> {
             }
         }
 
+        if let Some(synth_token) = self.current_synthetic.last() {
+            // TODO fix range?
+            return Some(SynToken::Synthetic(synth_token.clone()));
+        }
+
         let curr = self.current.clone()?;
         if !self.range.contains_range(curr.text_range()) {
             return None;
@@ -606,10 +671,6 @@ struct TtTreeSink<'a> {
     text_pos: TextSize,
     inner: SyntaxTreeBuilder,
     token_map: TokenMap,
-
-    // Number of roots
-    // Use for detect ill-form tree which is not single root
-    roots: smallvec::SmallVec<[usize; 1]>,
 }
 
 impl<'a> TtTreeSink<'a> {
@@ -620,7 +681,6 @@ fn new(cursor: Cursor<'a>) -> Self {
             open_delims: FxHashMap::default(),
             text_pos: 0.into(),
             inner: SyntaxTreeBuilder::default(),
-            roots: smallvec::SmallVec::new(),
             token_map: TokenMap::default(),
         }
     }
@@ -631,77 +691,75 @@ fn finish(mut self) -> (Parse<SyntaxNode>, TokenMap) {
     }
 }
 
-fn delim_to_str(d: Option<tt::DelimiterKind>, closing: bool) -> &'static str {
+fn delim_to_str(d: tt::DelimiterKind, closing: bool) -> &'static str {
     let texts = match d {
-        Some(tt::DelimiterKind::Parenthesis) => "()",
-        Some(tt::DelimiterKind::Brace) => "{}",
-        Some(tt::DelimiterKind::Bracket) => "[]",
-        None => return "",
+        tt::DelimiterKind::Parenthesis => "()",
+        tt::DelimiterKind::Brace => "{}",
+        tt::DelimiterKind::Bracket => "[]",
     };
 
     let idx = closing as usize;
     &texts[idx..texts.len() - (1 - idx)]
 }
 
-impl<'a> TreeSink for TtTreeSink<'a> {
+impl<'a> TtTreeSink<'a> {
     fn token(&mut self, kind: SyntaxKind, mut n_tokens: u8) {
-        if kind == L_DOLLAR || kind == R_DOLLAR {
-            self.cursor = self.cursor.bump_subtree();
-            return;
-        }
         if kind == LIFETIME_IDENT {
             n_tokens = 2;
         }
 
         let mut last = self.cursor;
         for _ in 0..n_tokens {
-            let tmp_str: SmolStr;
+            let tmp: u8;
             if self.cursor.eof() {
                 break;
             }
             last = self.cursor;
-            let text: &str = match self.cursor.token_tree() {
-                Some(tt::buffer::TokenTreeRef::Leaf(leaf, _)) => {
-                    // Mark the range if needed
-                    let (text, id) = match leaf {
-                        tt::Leaf::Ident(ident) => (&ident.text, ident.id),
-                        tt::Leaf::Punct(punct) => {
-                            assert!(punct.char.is_ascii());
-                            let char = &(punct.char as u8);
-                            tmp_str = SmolStr::new_inline(
-                                std::str::from_utf8(std::slice::from_ref(char)).unwrap(),
-                            );
-                            (&tmp_str, punct.id)
+            let text: &str = loop {
+                break match self.cursor.token_tree() {
+                    Some(tt::buffer::TokenTreeRef::Leaf(leaf, _)) => {
+                        // Mark the range if needed
+                        let (text, id) = match leaf {
+                            tt::Leaf::Ident(ident) => (ident.text.as_str(), ident.id),
+                            tt::Leaf::Punct(punct) => {
+                                assert!(punct.char.is_ascii());
+                                tmp = punct.char as u8;
+                                (std::str::from_utf8(std::slice::from_ref(&tmp)).unwrap(), punct.id)
+                            }
+                            tt::Leaf::Literal(lit) => (lit.text.as_str(), lit.id),
+                        };
+                        let range = TextRange::at(self.text_pos, TextSize::of(text));
+                        self.token_map.insert(id, range);
+                        self.cursor = self.cursor.bump();
+                        text
+                    }
+                    Some(tt::buffer::TokenTreeRef::Subtree(subtree, _)) => {
+                        self.cursor = self.cursor.subtree().unwrap();
+                        match subtree.delimiter {
+                            Some(d) => {
+                                self.open_delims.insert(d.id, self.text_pos);
+                                delim_to_str(d.kind, false)
+                            }
+                            None => continue,
                         }
-                        tt::Leaf::Literal(lit) => (&lit.text, lit.id),
-                    };
-                    let range = TextRange::at(self.text_pos, TextSize::of(text.as_str()));
-                    self.token_map.insert(id, range);
-                    self.cursor = self.cursor.bump();
-                    text
-                }
-                Some(tt::buffer::TokenTreeRef::Subtree(subtree, _)) => {
-                    self.cursor = self.cursor.subtree().unwrap();
-                    if let Some(id) = subtree.delimiter.map(|it| it.id) {
-                        self.open_delims.insert(id, self.text_pos);
                     }
-                    delim_to_str(subtree.delimiter_kind(), false)
-                }
-                None => {
-                    if let Some(parent) = self.cursor.end() {
+                    None => {
+                        let parent = self.cursor.end().unwrap();
                         self.cursor = self.cursor.bump();
-                        if let Some(id) = parent.delimiter.map(|it| it.id) {
-                            if let Some(open_delim) = self.open_delims.get(&id) {
-                                let open_range = TextRange::at(*open_delim, TextSize::of('('));
-                                let close_range = TextRange::at(self.text_pos, TextSize::of('('));
-                                self.token_map.insert_delim(id, open_range, close_range);
+                        match parent.delimiter {
+                            Some(d) => {
+                                if let Some(open_delim) = self.open_delims.get(&d.id) {
+                                    let open_range = TextRange::at(*open_delim, TextSize::of('('));
+                                    let close_range =
+                                        TextRange::at(self.text_pos, TextSize::of('('));
+                                    self.token_map.insert_delim(d.id, open_range, close_range);
+                                }
+                                delim_to_str(d.kind, true)
                             }
+                            None => continue,
                         }
-                        delim_to_str(parent.delimiter_kind(), true)
-                    } else {
-                        continue;
                     }
-                }
+                };
             };
             self.buf += text;
             self.text_pos += TextSize::of(text);
@@ -727,106 +785,13 @@ fn token(&mut self, kind: SyntaxKind, mut n_tokens: u8) {
 
     fn start_node(&mut self, kind: SyntaxKind) {
         self.inner.start_node(kind);
-
-        match self.roots.last_mut() {
-            None | Some(0) => self.roots.push(1),
-            Some(ref mut n) => **n += 1,
-        };
     }
 
     fn finish_node(&mut self) {
         self.inner.finish_node();
-        *self.roots.last_mut().unwrap() -= 1;
     }
 
-    fn error(&mut self, error: ParseError) {
+    fn error(&mut self, error: String) {
         self.inner.error(error, self.text_pos)
     }
 }
-
-#[cfg(test)]
-mod tests {
-    use super::*;
-    use crate::tests::parse_macro;
-    use parser::TokenSource;
-    use syntax::{
-        ast::{make, AstNode},
-        ted,
-    };
-    use test_utils::assert_eq_text;
-
-    #[test]
-    fn convert_tt_token_source() {
-        let expansion = parse_macro(
-            r#"
-            macro_rules! literals {
-                ($i:ident) => {
-                    {
-                        let a = 'c';
-                        let c = 1000;
-                        let f = 12E+99_f64;
-                        let s = "rust1";
-                    }
-                }
-            }
-            "#,
-        )
-        .expand_tt("literals!(foo);");
-        let tts = &[expansion.into()];
-        let buffer = tt::buffer::TokenBuffer::from_tokens(tts);
-        let mut tt_src = SubtreeTokenSource::new(&buffer);
-        let mut tokens = vec![];
-        while tt_src.current().kind != EOF {
-            tokens.push((tt_src.current().kind, tt_src.text()));
-            tt_src.bump();
-        }
-
-        // [${]
-        // [let] [a] [=] ['c'] [;]
-        assert_eq!(tokens[2 + 3].1, "'c'");
-        assert_eq!(tokens[2 + 3].0, CHAR);
-        // [let] [c] [=] [1000] [;]
-        assert_eq!(tokens[2 + 5 + 3].1, "1000");
-        assert_eq!(tokens[2 + 5 + 3].0, INT_NUMBER);
-        // [let] [f] [=] [12E+99_f64] [;]
-        assert_eq!(tokens[2 + 10 + 3].1, "12E+99_f64");
-        assert_eq!(tokens[2 + 10 + 3].0, FLOAT_NUMBER);
-
-        // [let] [s] [=] ["rust1"] [;]
-        assert_eq!(tokens[2 + 15 + 3].1, "\"rust1\"");
-        assert_eq!(tokens[2 + 15 + 3].0, STRING);
-    }
-
-    #[test]
-    fn test_token_tree_last_child_is_white_space() {
-        let source_file = ast::SourceFile::parse("f!{}").ok().unwrap();
-        let macro_call = source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
-        let token_tree = macro_call.token_tree().unwrap();
-
-        // Token Tree now is :
-        // TokenTree
-        // - TokenTree
-        //   - T!['{']
-        //   - T!['}']
-
-        let token_tree = token_tree.clone_for_update();
-        ted::append_child(token_tree.syntax(), make::tokens::single_space());
-        let token_tree = token_tree.clone_subtree();
-        // Token Tree now is :
-        // TokenTree
-        // - T!['{']
-        // - T!['}']
-        // - WHITE_SPACE
-
-        let tt = syntax_node_to_token_tree(token_tree.syntax()).0;
-        assert_eq!(tt.delimiter_kind(), Some(tt::DelimiterKind::Brace));
-    }
-
-    #[test]
-    fn test_missing_closing_delim() {
-        let source_file = ast::SourceFile::parse("m!(x").tree();
-        let node = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
-        let tt = syntax_node_to_token_tree(node.syntax()).0.to_string();
-        assert_eq_text!(&*tt, "( x");
-    }
-}