X-Git-Url: https://git.lizzy.rs/?a=blobdiff_plain;f=crates%2Fmbe%2Fsrc%2Fsyntax_bridge.rs;h=7e12647cd86230d4703d2d844744914f28f058cb;hb=0b53744f2d7e0694cd7207cca632fd6de1dc5bff;hp=0b65fa171f41137e566d6f546d193b807a34b422;hpb=b9fa37f5b4e452a496bd7c4baeba08cbdebf6336;p=rust.git diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index 0b65fa171f4..7e12647cd86 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs @@ -1,37 +1,57 @@ //! Conversions between [`SyntaxNode`] and [`tt::TokenTree`]. -use parser::{ParseError, TreeSink}; -use rustc_hash::{FxHashMap, FxHashSet}; +use rustc_hash::FxHashMap; +use stdx::{always, non_empty_vec::NonEmptyVec}; use syntax::{ ast::{self, make::tokens::doc_comment}, - tokenize, AstToken, Parse, PreorderWithTokens, SmolStr, SyntaxElement, SyntaxKind, + AstToken, Parse, PreorderWithTokens, SmolStr, SyntaxElement, SyntaxKind, SyntaxKind::*, - SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextSize, Token as RawToken, WalkEvent, - T, + SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextSize, WalkEvent, T, }; use tt::buffer::{Cursor, TokenBuffer}; -use crate::{ - subtree_source::SubtreeTokenSource, tt_iter::TtIter, ExpandError, ParserEntryPoint, TokenMap, -}; +use crate::{to_parser_input::to_parser_input, tt_iter::TtIter, TokenMap}; /// Convert the syntax node to a `TokenTree` (what macro /// will consume). pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> (tt::Subtree, TokenMap) { - syntax_node_to_token_tree_censored(node, &Default::default()) + let (subtree, token_map, _) = syntax_node_to_token_tree_with_modifications( + node, + Default::default(), + 0, + Default::default(), + Default::default(), + ); + (subtree, token_map) } /// Convert the syntax node to a `TokenTree` (what macro will consume) /// with the censored range excluded. -pub fn syntax_node_to_token_tree_censored( +pub fn syntax_node_to_token_tree_with_modifications( node: &SyntaxNode, - censor: &FxHashSet, -) -> (tt::Subtree, TokenMap) { + existing_token_map: TokenMap, + next_id: u32, + replace: FxHashMap>, + append: FxHashMap>, +) -> (tt::Subtree, TokenMap, u32) { let global_offset = node.text_range().start(); - let mut c = Convertor::new(node, global_offset, censor); + let mut c = Convertor::new(node, global_offset, existing_token_map, next_id, replace, append); let subtree = convert_tokens(&mut c); c.id_alloc.map.shrink_to_fit(); - (subtree, c.id_alloc.map) + always!(c.replace.is_empty(), "replace: {:?}", c.replace); + always!(c.append.is_empty(), "append: {:?}", c.append); + (subtree, c.id_alloc.map, c.id_alloc.next_id) +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +pub struct SyntheticTokenId(pub u32); + +#[derive(Debug, Clone)] +pub struct SyntheticToken { + pub kind: SyntaxKind, + pub text: SmolStr, + pub range: TextRange, + pub id: SyntheticTokenId, } // The following items are what `rustc` macro can be parsed into : @@ -48,36 +68,41 @@ pub fn syntax_node_to_token_tree_censored( pub fn token_tree_to_syntax_node( tt: &tt::Subtree, - entry_point: ParserEntryPoint, -) -> Result<(Parse, TokenMap), ExpandError> { + entry_point: parser::TopEntryPoint, +) -> (Parse, TokenMap) { let buffer = match tt { tt::Subtree { delimiter: None, token_trees } => { TokenBuffer::from_tokens(token_trees.as_slice()) } _ => TokenBuffer::from_subtree(tt), }; - let mut token_source = SubtreeTokenSource::new(&buffer); + let parser_input = to_parser_input(&buffer); + let parser_output = entry_point.parse(&parser_input); let mut tree_sink = TtTreeSink::new(buffer.begin()); - parser::parse(&mut token_source, &mut tree_sink, entry_point); - if tree_sink.roots.len() != 1 { - return Err(ExpandError::ConversionError); + for event in parser_output.iter() { + match event { + parser::Step::Token { kind, n_input_tokens: n_raw_tokens } => { + tree_sink.token(kind, n_raw_tokens) + } + parser::Step::Enter { kind } => tree_sink.start_node(kind), + parser::Step::Exit => tree_sink.finish_node(), + parser::Step::Error { msg } => tree_sink.error(msg.to_string()), + } } - //FIXME: would be cool to report errors let (parse, range_map) = tree_sink.finish(); - Ok((parse, range_map)) + (parse, range_map) } /// Convert a string to a `TokenTree` pub fn parse_to_token_tree(text: &str) -> Option<(tt::Subtree, TokenMap)> { - let (tokens, errors) = tokenize(text); - if !errors.is_empty() { + let lexed = parser::LexedStr::new(text); + if lexed.errors().next().is_some() { return None; } let mut conv = RawConvertor { - text, - offset: TextSize::default(), - inner: tokens.iter(), + lexed, + pos: 0, id_alloc: TokenIdAlloc { map: Default::default(), global_offset: TextSize::default(), @@ -99,7 +124,7 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec { let mut res = Vec::new(); while iter.peek_n(0).is_some() { - let expanded = iter.expect_fragment(ParserEntryPoint::Expr); + let expanded = iter.expect_fragment(parser::PrefixEntryPoint::Expr); res.push(match expanded.value { None => break, @@ -136,25 +161,27 @@ struct StackEntry { idx: !0, open_range: TextRange::empty(TextSize::of('.')), }; - let mut stack = vec![entry]; + let mut stack = NonEmptyVec::new(entry); loop { - let entry = stack.last_mut().unwrap(); - let result = &mut entry.subtree.token_trees; + let StackEntry { subtree, .. } = stack.last_mut(); + let result = &mut subtree.token_trees; let (token, range) = match conv.bump() { - None => break, Some(it) => it, + None => break, }; + let synth_id = token.synthetic_id(&conv); - let k: SyntaxKind = token.kind(); - if k == COMMENT { + let kind = token.kind(&conv); + if kind == COMMENT { if let Some(tokens) = conv.convert_doc_comment(&token) { // FIXME: There has to be a better way to do this // Add the comments token id to the converted doc string - let id = conv.id_alloc().alloc(range); + let id = conv.id_alloc().alloc(range, synth_id); result.extend(tokens.into_iter().map(|mut tt| { if let tt::TokenTree::Subtree(sub) = &mut tt { - if let tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) = &mut sub.token_trees[2] + if let Some(tt::TokenTree::Leaf(tt::Leaf::Literal(lit))) = + sub.token_trees.get_mut(2) { lit.id = id } @@ -164,26 +191,28 @@ struct StackEntry { } continue; } + let tt = if kind.is_punct() && kind != UNDERSCORE { + if synth_id.is_none() { + assert_eq!(range.len(), TextSize::of('.')); + } - result.push(if k.is_punct() && k != UNDERSCORE { - assert_eq!(range.len(), TextSize::of('.')); - - if let Some(delim) = entry.subtree.delimiter { + if let Some(delim) = subtree.delimiter { let expected = match delim.kind { tt::DelimiterKind::Parenthesis => T![')'], tt::DelimiterKind::Brace => T!['}'], tt::DelimiterKind::Bracket => T![']'], }; - if k == expected { - let entry = stack.pop().unwrap(); - conv.id_alloc().close_delim(entry.idx, Some(range)); - stack.last_mut().unwrap().subtree.token_trees.push(entry.subtree.into()); + if kind == expected { + if let Some(entry) = stack.pop() { + conv.id_alloc().close_delim(entry.idx, Some(range)); + stack.last_mut().subtree.token_trees.push(entry.subtree.into()); + } continue; } } - let delim = match k { + let delim = match kind { T!['('] => Some(tt::DelimiterKind::Parenthesis), T!['{'] => Some(tt::DelimiterKind::Brace), T!['['] => Some(tt::DelimiterKind::Bracket), @@ -196,36 +225,37 @@ struct StackEntry { subtree.delimiter = Some(tt::Delimiter { id, kind }); stack.push(StackEntry { subtree, idx, open_range: range }); continue; - } else { - let spacing = match conv.peek() { - Some(next) - if next.kind().is_trivia() - || next.kind() == T!['['] - || next.kind() == T!['{'] - || next.kind() == T!['('] => - { - tt::Spacing::Alone - } - Some(next) if next.kind().is_punct() && next.kind() != UNDERSCORE => { - tt::Spacing::Joint - } - _ => tt::Spacing::Alone, - }; - let char = match token.to_char() { - Some(c) => c, - None => { - panic!("Token from lexer must be single char: token = {:#?}", token); - } - }; - tt::Leaf::from(tt::Punct { char, spacing, id: conv.id_alloc().alloc(range) }).into() } + + let spacing = match conv.peek().map(|next| next.kind(&conv)) { + Some(kind) + if !kind.is_trivia() + && kind.is_punct() + && kind != T!['['] + && kind != T!['{'] + && kind != T!['('] + && kind != UNDERSCORE => + { + tt::Spacing::Joint + } + _ => tt::Spacing::Alone, + }; + let char = match token.to_char(&conv) { + Some(c) => c, + None => { + panic!("Token from lexer must be single char: token = {:#?}", token); + } + }; + tt::Leaf::from(tt::Punct { char, spacing, id: conv.id_alloc().alloc(range, synth_id) }) + .into() } else { macro_rules! make_leaf { ($i:ident) => { - tt::$i { id: conv.id_alloc().alloc(range), text: token.to_text() }.into() + tt::$i { id: conv.id_alloc().alloc(range, synth_id), text: token.to_text(conv) } + .into() }; } - let leaf: tt::Leaf = match k { + let leaf: tt::Leaf = match kind { T![true] | T![false] => make_leaf!(Ident), IDENT => make_leaf!(Ident), UNDERSCORE => make_leaf!(Ident), @@ -237,14 +267,14 @@ macro_rules! make_leaf { let apostrophe = tt::Leaf::from(tt::Punct { char: '\'', spacing: tt::Spacing::Joint, - id: conv.id_alloc().alloc(r), + id: conv.id_alloc().alloc(r, synth_id), }); result.push(apostrophe.into()); let r = TextRange::at(range.start() + char_unit, range.len() - char_unit); let ident = tt::Leaf::from(tt::Ident { - text: SmolStr::new(&token.to_text()[1..]), - id: conv.id_alloc().alloc(r), + text: SmolStr::new(&token.to_text(conv)[1..]), + id: conv.id_alloc().alloc(r, synth_id), }); result.push(ident.into()); continue; @@ -253,19 +283,19 @@ macro_rules! make_leaf { }; leaf.into() - }); + }; + result.push(tt); } // If we get here, we've consumed all input tokens. // We might have more than one subtree in the stack, if the delimiters are improperly balanced. // Merge them so we're left with one. - while stack.len() > 1 { - let entry = stack.pop().unwrap(); - let parent = stack.last_mut().unwrap(); + while let Some(entry) = stack.pop() { + let parent = stack.last_mut(); conv.id_alloc().close_delim(entry.idx, None); let leaf: tt::Leaf = tt::Punct { - id: conv.id_alloc().alloc(entry.open_range), + id: conv.id_alloc().alloc(entry.open_range, None), char: match entry.subtree.delimiter.unwrap().kind { tt::DelimiterKind::Parenthesis => '(', tt::DelimiterKind::Brace => '{', @@ -278,13 +308,12 @@ macro_rules! make_leaf { parent.subtree.token_trees.extend(entry.subtree.token_trees); } - let subtree = stack.pop().unwrap().subtree; - if subtree.token_trees.len() == 1 { - if let tt::TokenTree::Subtree(first) = &subtree.token_trees[0] { - return first.clone(); - } + let subtree = stack.into_last().subtree; + if let [tt::TokenTree::Subtree(first)] = &*subtree.token_trees { + first.clone() + } else { + subtree } - subtree } /// Returns the textual content of a doc comment block as a quoted string @@ -315,7 +344,8 @@ fn convert_doc_comment(token: &syntax::SyntaxToken) -> Option let meta_tkns = vec![mk_ident("doc"), mk_punct('='), mk_doc_literal(&comment)]; // Make `#![]` - let mut token_trees = vec![mk_punct('#')]; + let mut token_trees = Vec::with_capacity(3); + token_trees.push(mk_punct('#')); if let ast::CommentPlacement::Inner = doc { token_trees.push(mk_punct('!')); } @@ -359,11 +389,18 @@ struct TokenIdAlloc { } impl TokenIdAlloc { - fn alloc(&mut self, absolute_range: TextRange) -> tt::TokenId { + fn alloc( + &mut self, + absolute_range: TextRange, + synthetic_id: Option, + ) -> tt::TokenId { let relative_range = absolute_range - self.global_offset; let token_id = tt::TokenId(self.next_id); self.next_id += 1; self.map.insert(token_id, relative_range); + if let Some(id) = synthetic_id { + self.map.insert_synthetic(token_id, id); + } token_id } @@ -390,24 +427,25 @@ fn close_delim(&mut self, idx: usize, close_abs_range: Option) { } } -/// A Raw Token (straightly from lexer) convertor +/// A raw token (straight from lexer) convertor struct RawConvertor<'a> { - text: &'a str, - offset: TextSize, + lexed: parser::LexedStr<'a>, + pos: usize, id_alloc: TokenIdAlloc, - inner: std::slice::Iter<'a, RawToken>, } -trait SrcToken: std::fmt::Debug { - fn kind(&self) -> SyntaxKind; +trait SrcToken: std::fmt::Debug { + fn kind(&self, ctx: &Ctx) -> SyntaxKind; + + fn to_char(&self, ctx: &Ctx) -> Option; - fn to_char(&self) -> Option; + fn to_text(&self, ctx: &Ctx) -> SmolStr; - fn to_text(&self) -> SmolStr; + fn synthetic_id(&self, ctx: &Ctx) -> Option; } -trait TokenConvertor { - type Token: SrcToken; +trait TokenConvertor: Sized { + type Token: SrcToken; fn convert_doc_comment(&self, token: &Self::Token) -> Option>; @@ -418,42 +456,49 @@ trait TokenConvertor { fn id_alloc(&mut self) -> &mut TokenIdAlloc; } -impl<'a> SrcToken for (&'a RawToken, &'a str) { - fn kind(&self) -> SyntaxKind { - self.0.kind +impl<'a> SrcToken> for usize { + fn kind(&self, ctx: &RawConvertor<'a>) -> SyntaxKind { + ctx.lexed.kind(*self) } - fn to_char(&self) -> Option { - self.1.chars().next() + fn to_char(&self, ctx: &RawConvertor<'a>) -> Option { + ctx.lexed.text(*self).chars().next() } - fn to_text(&self) -> SmolStr { - self.1.into() + fn to_text(&self, ctx: &RawConvertor<'_>) -> SmolStr { + ctx.lexed.text(*self).into() + } + + fn synthetic_id(&self, _ctx: &RawConvertor<'a>) -> Option { + None } } impl<'a> TokenConvertor for RawConvertor<'a> { - type Token = (&'a RawToken, &'a str); + type Token = usize; - fn convert_doc_comment(&self, token: &Self::Token) -> Option> { - convert_doc_comment(&doc_comment(token.1)) + fn convert_doc_comment(&self, &token: &usize) -> Option> { + let text = self.lexed.text(token); + convert_doc_comment(&doc_comment(text)) } fn bump(&mut self) -> Option<(Self::Token, TextRange)> { - let token = self.inner.next()?; - let range = TextRange::at(self.offset, token.len); - self.offset += token.len; + if self.pos == self.lexed.len() { + return None; + } + let token = self.pos; + self.pos += 1; + let range = self.lexed.text_range(token); + let range = TextRange::new(range.start.try_into().unwrap(), range.end.try_into().unwrap()); - Some(((token, &self.text[range]), range)) + Some((token, range)) } fn peek(&self) -> Option { - let token = self.inner.as_slice().get(0); - - token.map(|it| { - let range = TextRange::at(self.offset, it.len); - (it, &self.text[range]) - }) + if self.pos == self.lexed.len() { + return None; + } + Some(self.pos) } fn id_alloc(&mut self) -> &mut TokenIdAlloc { @@ -461,87 +506,130 @@ fn id_alloc(&mut self) -> &mut TokenIdAlloc { } } -struct Convertor<'c> { +struct Convertor { id_alloc: TokenIdAlloc, current: Option, + current_synthetic: Vec, preorder: PreorderWithTokens, - censor: &'c FxHashSet, + replace: FxHashMap>, + append: FxHashMap>, range: TextRange, punct_offset: Option<(SyntaxToken, TextSize)>, } -impl<'c> Convertor<'c> { +impl Convertor { fn new( node: &SyntaxNode, global_offset: TextSize, - censor: &'c FxHashSet, - ) -> Convertor<'c> { + existing_token_map: TokenMap, + next_id: u32, + mut replace: FxHashMap>, + mut append: FxHashMap>, + ) -> Convertor { let range = node.text_range(); let mut preorder = node.preorder_with_tokens(); - let first = Self::next_token(&mut preorder, censor); + let (first, synthetic) = Self::next_token(&mut preorder, &mut replace, &mut append); Convertor { - id_alloc: { TokenIdAlloc { map: TokenMap::default(), global_offset, next_id: 0 } }, + id_alloc: { TokenIdAlloc { map: existing_token_map, global_offset, next_id } }, current: first, + current_synthetic: synthetic, preorder, range, - censor, + replace, + append, punct_offset: None, } } fn next_token( preorder: &mut PreorderWithTokens, - censor: &FxHashSet, - ) -> Option { + replace: &mut FxHashMap>, + append: &mut FxHashMap>, + ) -> (Option, Vec) { while let Some(ev) = preorder.next() { let ele = match ev { WalkEvent::Enter(ele) => ele, + WalkEvent::Leave(SyntaxElement::Node(node)) => { + if let Some(mut v) = append.remove(&node) { + if !v.is_empty() { + v.reverse(); + return (None, v); + } + } + continue; + } _ => continue, }; match ele { - SyntaxElement::Token(t) => return Some(t), - SyntaxElement::Node(node) if censor.contains(&node) => preorder.skip_subtree(), - SyntaxElement::Node(_) => (), + SyntaxElement::Token(t) => return (Some(t), Vec::new()), + SyntaxElement::Node(node) => { + if let Some(mut v) = replace.remove(&node) { + preorder.skip_subtree(); + if !v.is_empty() { + v.reverse(); + return (None, v); + } + } + } } } - None + (None, Vec::new()) } } #[derive(Debug)] enum SynToken { Ordinary(SyntaxToken), + // FIXME is this supposed to be `Punct`? Punch(SyntaxToken, TextSize), + Synthetic(SyntheticToken), } impl SynToken { - fn token(&self) -> &SyntaxToken { + fn token(&self) -> Option<&SyntaxToken> { match self { - SynToken::Ordinary(it) => it, - SynToken::Punch(it, _) => it, + SynToken::Ordinary(it) | SynToken::Punch(it, _) => Some(it), + SynToken::Synthetic(_) => None, } } } -impl SrcToken for SynToken { - fn kind(&self) -> SyntaxKind { - self.token().kind() +impl SrcToken for SynToken { + fn kind(&self, _ctx: &Convertor) -> SyntaxKind { + match self { + SynToken::Ordinary(token) => token.kind(), + SynToken::Punch(token, _) => token.kind(), + SynToken::Synthetic(token) => token.kind, + } } - fn to_char(&self) -> Option { + fn to_char(&self, _ctx: &Convertor) -> Option { match self { SynToken::Ordinary(_) => None, SynToken::Punch(it, i) => it.text().chars().nth((*i).into()), + SynToken::Synthetic(token) if token.text.len() == 1 => token.text.chars().next(), + SynToken::Synthetic(_) => None, + } + } + fn to_text(&self, _ctx: &Convertor) -> SmolStr { + match self { + SynToken::Ordinary(token) => token.text().into(), + SynToken::Punch(token, _) => token.text().into(), + SynToken::Synthetic(token) => token.text.clone(), } } - fn to_text(&self) -> SmolStr { - self.token().text().into() + + fn synthetic_id(&self, _ctx: &Convertor) -> Option { + match self { + SynToken::Synthetic(token) => Some(token.id), + _ => None, + } } } -impl TokenConvertor for Convertor<'_> { +impl TokenConvertor for Convertor { type Token = SynToken; fn convert_doc_comment(&self, token: &Self::Token) -> Option> { - convert_doc_comment(token.token()) + convert_doc_comment(token.token()?) } fn bump(&mut self) -> Option<(Self::Token, TextRange)> { @@ -555,15 +643,29 @@ fn bump(&mut self) -> Option<(Self::Token, TextRange)> { } } + if let Some(synth_token) = self.current_synthetic.pop() { + if self.current_synthetic.is_empty() { + let (new_current, new_synth) = + Self::next_token(&mut self.preorder, &mut self.replace, &mut self.append); + self.current = new_current; + self.current_synthetic = new_synth; + } + let range = synth_token.range; + return Some((SynToken::Synthetic(synth_token), range)); + } + let curr = self.current.clone()?; if !&self.range.contains_range(curr.text_range()) { return None; } - self.current = Self::next_token(&mut self.preorder, self.censor); + let (new_current, new_synth) = + Self::next_token(&mut self.preorder, &mut self.replace, &mut self.append); + self.current = new_current; + self.current_synthetic = new_synth; let token = if curr.kind().is_punct() { + self.punct_offset = Some((curr.clone(), 0.into())); let range = curr.text_range(); let range = TextRange::at(range.start(), TextSize::of('.')); - self.punct_offset = Some((curr.clone(), 0.into())); (SynToken::Punch(curr, 0.into()), range) } else { self.punct_offset = None; @@ -582,6 +684,10 @@ fn peek(&self) -> Option { } } + if let Some(synth_token) = self.current_synthetic.last() { + return Some(SynToken::Synthetic(synth_token.clone())); + } + let curr = self.current.clone()?; if !self.range.contains_range(curr.text_range()) { return None; @@ -607,10 +713,6 @@ struct TtTreeSink<'a> { text_pos: TextSize, inner: SyntaxTreeBuilder, token_map: TokenMap, - - // Number of roots - // Use for detect ill-form tree which is not single root - roots: smallvec::SmallVec<[usize; 1]>, } impl<'a> TtTreeSink<'a> { @@ -621,7 +723,6 @@ fn new(cursor: Cursor<'a>) -> Self { open_delims: FxHashMap::default(), text_pos: 0.into(), inner: SyntaxTreeBuilder::default(), - roots: smallvec::SmallVec::new(), token_map: TokenMap::default(), } } @@ -643,7 +744,7 @@ fn delim_to_str(d: tt::DelimiterKind, closing: bool) -> &'static str { &texts[idx..texts.len() - (1 - idx)] } -impl<'a> TreeSink for TtTreeSink<'a> { +impl<'a> TtTreeSink<'a> { fn token(&mut self, kind: SyntaxKind, mut n_tokens: u8) { if kind == LIFETIME_IDENT { n_tokens = 2; @@ -651,7 +752,7 @@ fn token(&mut self, kind: SyntaxKind, mut n_tokens: u8) { let mut last = self.cursor; for _ in 0..n_tokens { - let tmp_str: SmolStr; + let tmp: u8; if self.cursor.eof() { break; } @@ -661,18 +762,15 @@ fn token(&mut self, kind: SyntaxKind, mut n_tokens: u8) { Some(tt::buffer::TokenTreeRef::Leaf(leaf, _)) => { // Mark the range if needed let (text, id) = match leaf { - tt::Leaf::Ident(ident) => (&ident.text, ident.id), + tt::Leaf::Ident(ident) => (ident.text.as_str(), ident.id), tt::Leaf::Punct(punct) => { assert!(punct.char.is_ascii()); - let char = &(punct.char as u8); - tmp_str = SmolStr::new_inline( - std::str::from_utf8(std::slice::from_ref(char)).unwrap(), - ); - (&tmp_str, punct.id) + tmp = punct.char as u8; + (std::str::from_utf8(std::slice::from_ref(&tmp)).unwrap(), punct.id) } - tt::Leaf::Literal(lit) => (&lit.text, lit.id), + tt::Leaf::Literal(lit) => (lit.text.as_str(), lit.id), }; - let range = TextRange::at(self.text_pos, TextSize::of(text.as_str())); + let range = TextRange::at(self.text_pos, TextSize::of(text)); self.token_map.insert(id, range); self.cursor = self.cursor.bump(); text @@ -729,19 +827,13 @@ fn token(&mut self, kind: SyntaxKind, mut n_tokens: u8) { fn start_node(&mut self, kind: SyntaxKind) { self.inner.start_node(kind); - - match self.roots.last_mut() { - None | Some(0) => self.roots.push(1), - Some(ref mut n) => **n += 1, - }; } fn finish_node(&mut self) { self.inner.finish_node(); - *self.roots.last_mut().unwrap() -= 1; } - fn error(&mut self, error: ParseError) { + fn error(&mut self, error: String) { self.inner.error(error, self.text_pos) } }