1 //! Conversions between [`SyntaxNode`] and [`tt::TokenTree`].
3 use rustc_hash::FxHashMap;
4 use stdx::{always, non_empty_vec::NonEmptyVec};
6 ast::{self, make::tokens::doc_comment},
7 AstToken, Parse, PreorderWithTokens, SmolStr, SyntaxElement, SyntaxKind,
9 SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextSize, WalkEvent, T,
11 use tt::buffer::{Cursor, TokenBuffer};
13 use crate::{to_parser_input::to_parser_input, tt_iter::TtIter, TokenMap};
15 /// Convert the syntax node to a `TokenTree` (what macro
17 pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> (tt::Subtree, TokenMap) {
18 syntax_node_to_token_tree_censored(node, Default::default(), Default::default())
22 /// Convert the syntax node to a `TokenTree` (what macro will consume)
23 /// with the censored range excluded.
24 pub fn syntax_node_to_token_tree_censored(
26 replace: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
27 append: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
28 ) -> (tt::Subtree, TokenMap) {
29 let global_offset = node.text_range().start();
30 let mut c = Convertor::new(node, global_offset, replace, append);
31 let subtree = convert_tokens(&mut c);
32 c.id_alloc.map.shrink_to_fit();
33 always!(c.replace.is_empty(), "replace: {:?}", c.replace);
34 always!(c.append.is_empty(), "append: {:?}", c.append);
35 (subtree, c.id_alloc.map)
38 #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
39 pub struct SyntheticTokenId(pub u32);
41 #[derive(Debug, Clone)]
42 pub struct SyntheticToken {
46 pub id: SyntheticTokenId,
49 // The following items are what `rustc` macro can be parsed into :
50 // link: https://github.com/rust-lang/rust/blob/9ebf47851a357faa4cd97f4b1dc7835f6376e639/src/libsyntax/ext/expand.rs#L141
51 // * Expr(P<ast::Expr>) -> token_tree_to_expr
52 // * Pat(P<ast::Pat>) -> token_tree_to_pat
53 // * Ty(P<ast::Ty>) -> token_tree_to_ty
54 // * Stmts(SmallVec<[ast::Stmt; 1]>) -> token_tree_to_stmts
55 // * Items(SmallVec<[P<ast::Item>; 1]>) -> token_tree_to_items
57 // * TraitItems(SmallVec<[ast::TraitItem; 1]>)
58 // * AssocItems(SmallVec<[ast::AssocItem; 1]>)
59 // * ForeignItems(SmallVec<[ast::ForeignItem; 1]>
61 pub fn token_tree_to_syntax_node(
63 entry_point: parser::TopEntryPoint,
64 ) -> (Parse<SyntaxNode>, TokenMap) {
65 let buffer = match tt {
66 tt::Subtree { delimiter: None, token_trees } => {
67 TokenBuffer::from_tokens(token_trees.as_slice())
69 _ => TokenBuffer::from_subtree(tt),
71 let parser_input = to_parser_input(&buffer);
72 let parser_output = entry_point.parse(&parser_input);
73 let mut tree_sink = TtTreeSink::new(buffer.begin());
74 for event in parser_output.iter() {
76 parser::Step::Token { kind, n_input_tokens: n_raw_tokens } => {
77 tree_sink.token(kind, n_raw_tokens)
79 parser::Step::Enter { kind } => tree_sink.start_node(kind),
80 parser::Step::Exit => tree_sink.finish_node(),
81 parser::Step::Error { msg } => tree_sink.error(msg.to_string()),
84 let (parse, range_map) = tree_sink.finish();
88 /// Convert a string to a `TokenTree`
89 pub fn parse_to_token_tree(text: &str) -> Option<(tt::Subtree, TokenMap)> {
90 let lexed = parser::LexedStr::new(text);
91 if lexed.errors().next().is_some() {
95 let mut conv = RawConvertor {
98 id_alloc: TokenIdAlloc {
99 map: Default::default(),
100 global_offset: TextSize::default(),
105 let subtree = convert_tokens(&mut conv);
106 Some((subtree, conv.id_alloc.map))
109 /// Split token tree with separate expr: $($e:expr)SEP*
110 pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec<tt::Subtree> {
111 if tt.token_trees.is_empty() {
115 let mut iter = TtIter::new(tt);
116 let mut res = Vec::new();
118 while iter.peek_n(0).is_some() {
119 let expanded = iter.expect_fragment(parser::PrefixEntryPoint::Expr);
121 res.push(match expanded.value {
123 Some(tt @ tt::TokenTree::Leaf(_)) => {
124 tt::Subtree { delimiter: None, token_trees: vec![tt] }
126 Some(tt::TokenTree::Subtree(tt)) => tt,
129 let mut fork = iter.clone();
130 if fork.expect_char(sep).is_err() {
136 if iter.peek_n(0).is_some() {
137 res.push(tt::Subtree { delimiter: None, token_trees: iter.into_iter().cloned().collect() });
143 fn convert_tokens<C: TokenConvertor>(conv: &mut C) -> tt::Subtree {
145 subtree: tt::Subtree,
147 open_range: TextRange,
150 let entry = StackEntry {
151 subtree: tt::Subtree { delimiter: None, ..Default::default() },
152 // never used (delimiter is `None`)
154 open_range: TextRange::empty(TextSize::of('.')),
156 let mut stack = NonEmptyVec::new(entry);
159 let StackEntry { subtree, .. } = stack.last_mut();
160 let result = &mut subtree.token_trees;
161 let (token, range) = match conv.bump() {
165 let synth_id = token.synthetic_id(&conv);
167 let kind = token.kind(&conv);
169 if let Some(tokens) = conv.convert_doc_comment(&token) {
170 // FIXME: There has to be a better way to do this
171 // Add the comments token id to the converted doc string
172 let id = conv.id_alloc().alloc(range, synth_id);
173 result.extend(tokens.into_iter().map(|mut tt| {
174 if let tt::TokenTree::Subtree(sub) = &mut tt {
175 if let Some(tt::TokenTree::Leaf(tt::Leaf::Literal(lit))) =
176 sub.token_trees.get_mut(2)
186 let tt = if kind.is_punct() && kind != UNDERSCORE {
187 // assert_eq!(range.len(), TextSize::of('.'));
189 if let Some(delim) = subtree.delimiter {
190 let expected = match delim.kind {
191 tt::DelimiterKind::Parenthesis => T![')'],
192 tt::DelimiterKind::Brace => T!['}'],
193 tt::DelimiterKind::Bracket => T![']'],
196 if kind == expected {
197 if let Some(entry) = stack.pop() {
198 conv.id_alloc().close_delim(entry.idx, Some(range));
199 stack.last_mut().subtree.token_trees.push(entry.subtree.into());
205 let delim = match kind {
206 T!['('] => Some(tt::DelimiterKind::Parenthesis),
207 T!['{'] => Some(tt::DelimiterKind::Brace),
208 T!['['] => Some(tt::DelimiterKind::Bracket),
212 if let Some(kind) = delim {
213 let mut subtree = tt::Subtree::default();
214 let (id, idx) = conv.id_alloc().open_delim(range);
215 subtree.delimiter = Some(tt::Delimiter { id, kind });
216 stack.push(StackEntry { subtree, idx, open_range: range });
220 let spacing = match conv.peek().map(|next| next.kind(&conv)) {
227 && kind != UNDERSCORE =>
231 _ => tt::Spacing::Alone,
233 let char = match token.to_char(&conv) {
236 panic!("Token from lexer must be single char: token = {:#?}", token);
239 tt::Leaf::from(tt::Punct { char, spacing, id: conv.id_alloc().alloc(range, synth_id) })
242 macro_rules! make_leaf {
244 tt::$i { id: conv.id_alloc().alloc(range, synth_id), text: token.to_text(conv) }
248 let leaf: tt::Leaf = match kind {
249 T![true] | T![false] => make_leaf!(Ident),
250 IDENT => make_leaf!(Ident),
251 UNDERSCORE => make_leaf!(Ident),
252 k if k.is_keyword() => make_leaf!(Ident),
253 k if k.is_literal() => make_leaf!(Literal),
255 let char_unit = TextSize::of('\'');
256 let r = TextRange::at(range.start(), char_unit);
257 let apostrophe = tt::Leaf::from(tt::Punct {
259 spacing: tt::Spacing::Joint,
260 id: conv.id_alloc().alloc(r, synth_id),
262 result.push(apostrophe.into());
264 let r = TextRange::at(range.start() + char_unit, range.len() - char_unit);
265 let ident = tt::Leaf::from(tt::Ident {
266 text: SmolStr::new(&token.to_text(conv)[1..]),
267 id: conv.id_alloc().alloc(r, synth_id),
269 result.push(ident.into());
280 // If we get here, we've consumed all input tokens.
281 // We might have more than one subtree in the stack, if the delimiters are improperly balanced.
282 // Merge them so we're left with one.
283 while let Some(entry) = stack.pop() {
284 let parent = stack.last_mut();
286 conv.id_alloc().close_delim(entry.idx, None);
287 let leaf: tt::Leaf = tt::Punct {
288 id: conv.id_alloc().alloc(entry.open_range, None),
289 char: match entry.subtree.delimiter.unwrap().kind {
290 tt::DelimiterKind::Parenthesis => '(',
291 tt::DelimiterKind::Brace => '{',
292 tt::DelimiterKind::Bracket => '[',
294 spacing: tt::Spacing::Alone,
297 parent.subtree.token_trees.push(leaf.into());
298 parent.subtree.token_trees.extend(entry.subtree.token_trees);
301 let subtree = stack.into_last().subtree;
302 if let [tt::TokenTree::Subtree(first)] = &*subtree.token_trees {
309 /// Returns the textual content of a doc comment block as a quoted string
310 /// That is, strips leading `///` (or `/**`, etc)
311 /// and strips the ending `*/`
312 /// And then quote the string, which is needed to convert to `tt::Literal`
313 fn doc_comment_text(comment: &ast::Comment) -> SmolStr {
314 let prefix_len = comment.prefix().len();
315 let mut text = &comment.text()[prefix_len..];
317 // Remove ending "*/"
318 if comment.kind().shape == ast::CommentShape::Block {
319 text = &text[0..text.len() - 2];
323 // Note that `tt::Literal` expect an escaped string
324 let text = format!("\"{}\"", text.escape_debug());
328 fn convert_doc_comment(token: &syntax::SyntaxToken) -> Option<Vec<tt::TokenTree>> {
329 cov_mark::hit!(test_meta_doc_comments);
330 let comment = ast::Comment::cast(token.clone())?;
331 let doc = comment.kind().doc?;
333 // Make `doc="\" Comments\""
334 let meta_tkns = vec![mk_ident("doc"), mk_punct('='), mk_doc_literal(&comment)];
337 let mut token_trees = Vec::with_capacity(3);
338 token_trees.push(mk_punct('#'));
339 if let ast::CommentPlacement::Inner = doc {
340 token_trees.push(mk_punct('!'));
342 token_trees.push(tt::TokenTree::from(tt::Subtree {
343 delimiter: Some(tt::Delimiter {
344 kind: tt::DelimiterKind::Bracket,
345 id: tt::TokenId::unspecified(),
347 token_trees: meta_tkns,
350 return Some(token_trees);
353 fn mk_ident(s: &str) -> tt::TokenTree {
354 tt::TokenTree::from(tt::Leaf::from(tt::Ident {
356 id: tt::TokenId::unspecified(),
360 fn mk_punct(c: char) -> tt::TokenTree {
361 tt::TokenTree::from(tt::Leaf::from(tt::Punct {
363 spacing: tt::Spacing::Alone,
364 id: tt::TokenId::unspecified(),
368 fn mk_doc_literal(comment: &ast::Comment) -> tt::TokenTree {
369 let lit = tt::Literal { text: doc_comment_text(comment), id: tt::TokenId::unspecified() };
371 tt::TokenTree::from(tt::Leaf::from(lit))
375 struct TokenIdAlloc {
377 global_offset: TextSize,
384 absolute_range: TextRange,
385 synthetic_id: Option<SyntheticTokenId>,
387 let relative_range = absolute_range - self.global_offset;
388 let token_id = tt::TokenId(self.next_id);
390 self.map.insert(token_id, relative_range);
391 if let Some(id) = synthetic_id {
392 self.map.insert_synthetic(token_id, id);
397 fn open_delim(&mut self, open_abs_range: TextRange) -> (tt::TokenId, usize) {
398 let token_id = tt::TokenId(self.next_id);
400 let idx = self.map.insert_delim(
402 open_abs_range - self.global_offset,
403 open_abs_range - self.global_offset,
408 fn close_delim(&mut self, idx: usize, close_abs_range: Option<TextRange>) {
409 match close_abs_range {
411 self.map.remove_delim(idx);
414 self.map.update_close_delim(idx, close - self.global_offset);
420 /// A raw token (straight from lexer) convertor
421 struct RawConvertor<'a> {
422 lexed: parser::LexedStr<'a>,
424 id_alloc: TokenIdAlloc,
427 trait SrcToken<Ctx>: std::fmt::Debug {
428 fn kind(&self, ctx: &Ctx) -> SyntaxKind;
430 fn to_char(&self, ctx: &Ctx) -> Option<char>;
432 fn to_text(&self, ctx: &Ctx) -> SmolStr;
434 fn synthetic_id(&self, ctx: &Ctx) -> Option<SyntheticTokenId>;
437 trait TokenConvertor: Sized {
438 type Token: SrcToken<Self>;
440 fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>>;
442 fn bump(&mut self) -> Option<(Self::Token, TextRange)>;
444 fn peek(&self) -> Option<Self::Token>;
446 fn id_alloc(&mut self) -> &mut TokenIdAlloc;
449 impl<'a> SrcToken<RawConvertor<'a>> for usize {
450 fn kind(&self, ctx: &RawConvertor<'a>) -> SyntaxKind {
451 ctx.lexed.kind(*self)
454 fn to_char(&self, ctx: &RawConvertor<'a>) -> Option<char> {
455 ctx.lexed.text(*self).chars().next()
458 fn to_text(&self, ctx: &RawConvertor<'_>) -> SmolStr {
459 ctx.lexed.text(*self).into()
462 fn synthetic_id(&self, _ctx: &RawConvertor<'a>) -> Option<SyntheticTokenId> {
467 impl<'a> TokenConvertor for RawConvertor<'a> {
470 fn convert_doc_comment(&self, &token: &usize) -> Option<Vec<tt::TokenTree>> {
471 let text = self.lexed.text(token);
472 convert_doc_comment(&doc_comment(text))
475 fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
476 if self.pos == self.lexed.len() {
479 let token = self.pos;
481 let range = self.lexed.text_range(token);
482 let range = TextRange::new(range.start.try_into().unwrap(), range.end.try_into().unwrap());
487 fn peek(&self) -> Option<Self::Token> {
488 if self.pos == self.lexed.len() {
494 fn id_alloc(&mut self) -> &mut TokenIdAlloc {
500 id_alloc: TokenIdAlloc,
501 current: Option<SyntaxToken>,
502 current_synthetic: Vec<SyntheticToken>,
503 preorder: PreorderWithTokens,
504 replace: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
505 append: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
507 punct_offset: Option<(SyntaxToken, TextSize)>,
513 global_offset: TextSize,
514 mut replace: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
515 mut append: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
517 let range = node.text_range();
518 let mut preorder = node.preorder_with_tokens();
519 let (first, synthetic) = Self::next_token(&mut preorder, &mut replace, &mut append);
521 id_alloc: { TokenIdAlloc { map: TokenMap::default(), global_offset, next_id: 0 } },
523 current_synthetic: synthetic,
533 preorder: &mut PreorderWithTokens,
534 replace: &mut FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
535 append: &mut FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
536 ) -> (Option<SyntaxToken>, Vec<SyntheticToken>) {
537 while let Some(ev) = preorder.next() {
539 WalkEvent::Enter(ele) => ele,
540 WalkEvent::Leave(SyntaxElement::Node(node)) => {
541 if let Some(mut v) = append.remove(&node) {
552 SyntaxElement::Token(t) => return (Some(t), Vec::new()),
553 SyntaxElement::Node(node) => {
554 if let Some(mut v) = replace.remove(&node) {
555 preorder.skip_subtree();
570 Ordinary(SyntaxToken),
571 // FIXME is this supposed to be `Punct`?
572 Punch(SyntaxToken, TextSize),
573 Synthetic(SyntheticToken),
577 fn token(&self) -> Option<&SyntaxToken> {
579 SynToken::Ordinary(it) | SynToken::Punch(it, _) => Some(it),
580 SynToken::Synthetic(_) => None,
585 impl SrcToken<Convertor> for SynToken {
586 fn kind(&self, _ctx: &Convertor) -> SyntaxKind {
588 SynToken::Ordinary(token) => token.kind(),
589 SynToken::Punch(token, _) => token.kind(),
590 SynToken::Synthetic(token) => token.kind,
593 fn to_char(&self, _ctx: &Convertor) -> Option<char> {
595 SynToken::Ordinary(_) => None,
596 SynToken::Punch(it, i) => it.text().chars().nth((*i).into()),
597 SynToken::Synthetic(token) if token.text.len() == 1 => token.text.chars().next(),
598 SynToken::Synthetic(_) => None,
601 fn to_text(&self, _ctx: &Convertor) -> SmolStr {
603 SynToken::Ordinary(token) => token.text().into(),
604 SynToken::Punch(token, _) => token.text().into(),
605 SynToken::Synthetic(token) => token.text.clone(),
609 fn synthetic_id(&self, _ctx: &Convertor) -> Option<SyntheticTokenId> {
611 SynToken::Synthetic(token) => Some(token.id),
617 impl TokenConvertor for Convertor {
618 type Token = SynToken;
619 fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>> {
620 convert_doc_comment(token.token()?)
623 fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
624 if let Some((punct, offset)) = self.punct_offset.clone() {
625 if usize::from(offset) + 1 < punct.text().len() {
626 let offset = offset + TextSize::of('.');
627 let range = punct.text_range();
628 self.punct_offset = Some((punct.clone(), offset));
629 let range = TextRange::at(range.start() + offset, TextSize::of('.'));
630 return Some((SynToken::Punch(punct, offset), range));
634 if let Some(synth_token) = self.current_synthetic.pop() {
635 if self.current_synthetic.is_empty() {
636 let (new_current, new_synth) =
637 Self::next_token(&mut self.preorder, &mut self.replace, &mut self.append);
638 self.current = new_current;
639 self.current_synthetic = new_synth;
641 let range = synth_token.range;
642 return Some((SynToken::Synthetic(synth_token), range));
645 let curr = self.current.clone()?;
646 if !&self.range.contains_range(curr.text_range()) {
649 let (new_current, new_synth) =
650 Self::next_token(&mut self.preorder, &mut self.replace, &mut self.append);
651 self.current = new_current;
652 self.current_synthetic = new_synth;
653 let token = if curr.kind().is_punct() {
654 self.punct_offset = Some((curr.clone(), 0.into()));
655 let range = curr.text_range();
656 let range = TextRange::at(range.start(), TextSize::of('.'));
657 (SynToken::Punch(curr, 0.into()), range)
659 self.punct_offset = None;
660 let range = curr.text_range();
661 (SynToken::Ordinary(curr), range)
667 fn peek(&self) -> Option<Self::Token> {
668 if let Some((punct, mut offset)) = self.punct_offset.clone() {
669 offset += TextSize::of('.');
670 if usize::from(offset) < punct.text().len() {
671 return Some(SynToken::Punch(punct, offset));
675 if let Some(synth_token) = self.current_synthetic.last() {
676 return Some(SynToken::Synthetic(synth_token.clone()));
679 let curr = self.current.clone()?;
680 if !self.range.contains_range(curr.text_range()) {
684 let token = if curr.kind().is_punct() {
685 SynToken::Punch(curr, 0.into())
687 SynToken::Ordinary(curr)
692 fn id_alloc(&mut self) -> &mut TokenIdAlloc {
697 struct TtTreeSink<'a> {
700 open_delims: FxHashMap<tt::TokenId, TextSize>,
702 inner: SyntaxTreeBuilder,
706 impl<'a> TtTreeSink<'a> {
707 fn new(cursor: Cursor<'a>) -> Self {
711 open_delims: FxHashMap::default(),
713 inner: SyntaxTreeBuilder::default(),
714 token_map: TokenMap::default(),
718 fn finish(mut self) -> (Parse<SyntaxNode>, TokenMap) {
719 self.token_map.shrink_to_fit();
720 (self.inner.finish(), self.token_map)
724 fn delim_to_str(d: tt::DelimiterKind, closing: bool) -> &'static str {
725 let texts = match d {
726 tt::DelimiterKind::Parenthesis => "()",
727 tt::DelimiterKind::Brace => "{}",
728 tt::DelimiterKind::Bracket => "[]",
731 let idx = closing as usize;
732 &texts[idx..texts.len() - (1 - idx)]
735 impl<'a> TtTreeSink<'a> {
736 fn token(&mut self, kind: SyntaxKind, mut n_tokens: u8) {
737 if kind == LIFETIME_IDENT {
741 let mut last = self.cursor;
742 for _ in 0..n_tokens {
744 if self.cursor.eof() {
748 let text: &str = loop {
749 break match self.cursor.token_tree() {
750 Some(tt::buffer::TokenTreeRef::Leaf(leaf, _)) => {
751 // Mark the range if needed
752 let (text, id) = match leaf {
753 tt::Leaf::Ident(ident) => (ident.text.as_str(), ident.id),
754 tt::Leaf::Punct(punct) => {
755 assert!(punct.char.is_ascii());
756 tmp = punct.char as u8;
757 (std::str::from_utf8(std::slice::from_ref(&tmp)).unwrap(), punct.id)
759 tt::Leaf::Literal(lit) => (lit.text.as_str(), lit.id),
761 let range = TextRange::at(self.text_pos, TextSize::of(text));
762 self.token_map.insert(id, range);
763 self.cursor = self.cursor.bump();
766 Some(tt::buffer::TokenTreeRef::Subtree(subtree, _)) => {
767 self.cursor = self.cursor.subtree().unwrap();
768 match subtree.delimiter {
770 self.open_delims.insert(d.id, self.text_pos);
771 delim_to_str(d.kind, false)
777 let parent = self.cursor.end().unwrap();
778 self.cursor = self.cursor.bump();
779 match parent.delimiter {
781 if let Some(open_delim) = self.open_delims.get(&d.id) {
782 let open_range = TextRange::at(*open_delim, TextSize::of('('));
784 TextRange::at(self.text_pos, TextSize::of('('));
785 self.token_map.insert_delim(d.id, open_range, close_range);
787 delim_to_str(d.kind, true)
795 self.text_pos += TextSize::of(text);
798 self.inner.token(kind, self.buf.as_str());
800 // Add whitespace between adjoint puncts
801 let next = last.bump();
803 Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Punct(curr), _)),
804 Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Punct(_), _)),
805 ) = (last.token_tree(), next.token_tree())
807 // Note: We always assume the semi-colon would be the last token in
808 // other parts of RA such that we don't add whitespace here.
809 if curr.spacing == tt::Spacing::Alone && curr.char != ';' {
810 self.inner.token(WHITESPACE, " ");
811 self.text_pos += TextSize::of(' ');
816 fn start_node(&mut self, kind: SyntaxKind) {
817 self.inner.start_node(kind);
820 fn finish_node(&mut self) {
821 self.inner.finish_node();
824 fn error(&mut self, error: String) {
825 self.inner.error(error, self.text_pos)