1 //! Conversions between [`SyntaxNode`] and [`tt::TokenTree`].
3 use rustc_hash::FxHashMap;
4 use stdx::{always, non_empty_vec::NonEmptyVec};
6 ast::{self, make::tokens::doc_comment},
7 AstToken, Parse, PreorderWithTokens, SmolStr, SyntaxElement, SyntaxKind,
9 SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextSize, WalkEvent, T,
11 use tt::buffer::{Cursor, TokenBuffer};
13 use crate::{to_parser_input::to_parser_input, tt_iter::TtIter, TokenMap};
15 /// Convert the syntax node to a `TokenTree` (what macro
17 pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> (tt::Subtree, TokenMap) {
18 syntax_node_to_token_tree_with_modifications(node, Default::default(), Default::default())
21 /// Convert the syntax node to a `TokenTree` (what macro will consume)
22 /// with the censored range excluded.
23 pub fn syntax_node_to_token_tree_with_modifications(
25 replace: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
26 append: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
27 ) -> (tt::Subtree, TokenMap) {
28 let global_offset = node.text_range().start();
29 let mut c = Convertor::new(node, global_offset, replace, append);
30 let subtree = convert_tokens(&mut c);
31 c.id_alloc.map.shrink_to_fit();
32 always!(c.replace.is_empty(), "replace: {:?}", c.replace);
33 always!(c.append.is_empty(), "append: {:?}", c.append);
34 (subtree, c.id_alloc.map)
37 #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
38 pub struct SyntheticTokenId(pub u32);
40 #[derive(Debug, Clone)]
41 pub struct SyntheticToken {
45 pub id: SyntheticTokenId,
48 // The following items are what `rustc` macro can be parsed into :
49 // link: https://github.com/rust-lang/rust/blob/9ebf47851a357faa4cd97f4b1dc7835f6376e639/src/libsyntax/ext/expand.rs#L141
50 // * Expr(P<ast::Expr>) -> token_tree_to_expr
51 // * Pat(P<ast::Pat>) -> token_tree_to_pat
52 // * Ty(P<ast::Ty>) -> token_tree_to_ty
53 // * Stmts(SmallVec<[ast::Stmt; 1]>) -> token_tree_to_stmts
54 // * Items(SmallVec<[P<ast::Item>; 1]>) -> token_tree_to_items
56 // * TraitItems(SmallVec<[ast::TraitItem; 1]>)
57 // * AssocItems(SmallVec<[ast::AssocItem; 1]>)
58 // * ForeignItems(SmallVec<[ast::ForeignItem; 1]>
60 pub fn token_tree_to_syntax_node(
62 entry_point: parser::TopEntryPoint,
63 ) -> (Parse<SyntaxNode>, TokenMap) {
64 let buffer = match tt {
65 tt::Subtree { delimiter: None, token_trees } => {
66 TokenBuffer::from_tokens(token_trees.as_slice())
68 _ => TokenBuffer::from_subtree(tt),
70 let parser_input = to_parser_input(&buffer);
71 let parser_output = entry_point.parse(&parser_input);
72 let mut tree_sink = TtTreeSink::new(buffer.begin());
73 for event in parser_output.iter() {
75 parser::Step::Token { kind, n_input_tokens: n_raw_tokens } => {
76 tree_sink.token(kind, n_raw_tokens)
78 parser::Step::Enter { kind } => tree_sink.start_node(kind),
79 parser::Step::Exit => tree_sink.finish_node(),
80 parser::Step::Error { msg } => tree_sink.error(msg.to_string()),
83 let (parse, range_map) = tree_sink.finish();
87 /// Convert a string to a `TokenTree`
88 pub fn parse_to_token_tree(text: &str) -> Option<(tt::Subtree, TokenMap)> {
89 let lexed = parser::LexedStr::new(text);
90 if lexed.errors().next().is_some() {
94 let mut conv = RawConvertor {
97 id_alloc: TokenIdAlloc {
98 map: Default::default(),
99 global_offset: TextSize::default(),
104 let subtree = convert_tokens(&mut conv);
105 Some((subtree, conv.id_alloc.map))
108 /// Split token tree with separate expr: $($e:expr)SEP*
109 pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec<tt::Subtree> {
110 if tt.token_trees.is_empty() {
114 let mut iter = TtIter::new(tt);
115 let mut res = Vec::new();
117 while iter.peek_n(0).is_some() {
118 let expanded = iter.expect_fragment(parser::PrefixEntryPoint::Expr);
120 res.push(match expanded.value {
122 Some(tt @ tt::TokenTree::Leaf(_)) => {
123 tt::Subtree { delimiter: None, token_trees: vec![tt] }
125 Some(tt::TokenTree::Subtree(tt)) => tt,
128 let mut fork = iter.clone();
129 if fork.expect_char(sep).is_err() {
135 if iter.peek_n(0).is_some() {
136 res.push(tt::Subtree { delimiter: None, token_trees: iter.into_iter().cloned().collect() });
142 fn convert_tokens<C: TokenConvertor>(conv: &mut C) -> tt::Subtree {
144 subtree: tt::Subtree,
146 open_range: TextRange,
149 let entry = StackEntry {
150 subtree: tt::Subtree { delimiter: None, ..Default::default() },
151 // never used (delimiter is `None`)
153 open_range: TextRange::empty(TextSize::of('.')),
155 let mut stack = NonEmptyVec::new(entry);
158 let StackEntry { subtree, .. } = stack.last_mut();
159 let result = &mut subtree.token_trees;
160 let (token, range) = match conv.bump() {
164 let synth_id = token.synthetic_id(&conv);
166 let kind = token.kind(&conv);
168 if let Some(tokens) = conv.convert_doc_comment(&token) {
169 // FIXME: There has to be a better way to do this
170 // Add the comments token id to the converted doc string
171 let id = conv.id_alloc().alloc(range, synth_id);
172 result.extend(tokens.into_iter().map(|mut tt| {
173 if let tt::TokenTree::Subtree(sub) = &mut tt {
174 if let Some(tt::TokenTree::Leaf(tt::Leaf::Literal(lit))) =
175 sub.token_trees.get_mut(2)
185 let tt = if kind.is_punct() && kind != UNDERSCORE {
186 // assert_eq!(range.len(), TextSize::of('.'));
188 if let Some(delim) = subtree.delimiter {
189 let expected = match delim.kind {
190 tt::DelimiterKind::Parenthesis => T![')'],
191 tt::DelimiterKind::Brace => T!['}'],
192 tt::DelimiterKind::Bracket => T![']'],
195 if kind == expected {
196 if let Some(entry) = stack.pop() {
197 conv.id_alloc().close_delim(entry.idx, Some(range));
198 stack.last_mut().subtree.token_trees.push(entry.subtree.into());
204 let delim = match kind {
205 T!['('] => Some(tt::DelimiterKind::Parenthesis),
206 T!['{'] => Some(tt::DelimiterKind::Brace),
207 T!['['] => Some(tt::DelimiterKind::Bracket),
211 if let Some(kind) = delim {
212 let mut subtree = tt::Subtree::default();
213 let (id, idx) = conv.id_alloc().open_delim(range);
214 subtree.delimiter = Some(tt::Delimiter { id, kind });
215 stack.push(StackEntry { subtree, idx, open_range: range });
219 let spacing = match conv.peek().map(|next| next.kind(&conv)) {
226 && kind != UNDERSCORE =>
230 _ => tt::Spacing::Alone,
232 let char = match token.to_char(&conv) {
235 panic!("Token from lexer must be single char: token = {:#?}", token);
238 tt::Leaf::from(tt::Punct { char, spacing, id: conv.id_alloc().alloc(range, synth_id) })
241 macro_rules! make_leaf {
243 tt::$i { id: conv.id_alloc().alloc(range, synth_id), text: token.to_text(conv) }
247 let leaf: tt::Leaf = match kind {
248 T![true] | T![false] => make_leaf!(Ident),
249 IDENT => make_leaf!(Ident),
250 UNDERSCORE => make_leaf!(Ident),
251 k if k.is_keyword() => make_leaf!(Ident),
252 k if k.is_literal() => make_leaf!(Literal),
254 let char_unit = TextSize::of('\'');
255 let r = TextRange::at(range.start(), char_unit);
256 let apostrophe = tt::Leaf::from(tt::Punct {
258 spacing: tt::Spacing::Joint,
259 id: conv.id_alloc().alloc(r, synth_id),
261 result.push(apostrophe.into());
263 let r = TextRange::at(range.start() + char_unit, range.len() - char_unit);
264 let ident = tt::Leaf::from(tt::Ident {
265 text: SmolStr::new(&token.to_text(conv)[1..]),
266 id: conv.id_alloc().alloc(r, synth_id),
268 result.push(ident.into());
279 // If we get here, we've consumed all input tokens.
280 // We might have more than one subtree in the stack, if the delimiters are improperly balanced.
281 // Merge them so we're left with one.
282 while let Some(entry) = stack.pop() {
283 let parent = stack.last_mut();
285 conv.id_alloc().close_delim(entry.idx, None);
286 let leaf: tt::Leaf = tt::Punct {
287 id: conv.id_alloc().alloc(entry.open_range, None),
288 char: match entry.subtree.delimiter.unwrap().kind {
289 tt::DelimiterKind::Parenthesis => '(',
290 tt::DelimiterKind::Brace => '{',
291 tt::DelimiterKind::Bracket => '[',
293 spacing: tt::Spacing::Alone,
296 parent.subtree.token_trees.push(leaf.into());
297 parent.subtree.token_trees.extend(entry.subtree.token_trees);
300 let subtree = stack.into_last().subtree;
301 if let [tt::TokenTree::Subtree(first)] = &*subtree.token_trees {
308 /// Returns the textual content of a doc comment block as a quoted string
309 /// That is, strips leading `///` (or `/**`, etc)
310 /// and strips the ending `*/`
311 /// And then quote the string, which is needed to convert to `tt::Literal`
312 fn doc_comment_text(comment: &ast::Comment) -> SmolStr {
313 let prefix_len = comment.prefix().len();
314 let mut text = &comment.text()[prefix_len..];
316 // Remove ending "*/"
317 if comment.kind().shape == ast::CommentShape::Block {
318 text = &text[0..text.len() - 2];
322 // Note that `tt::Literal` expect an escaped string
323 let text = format!("\"{}\"", text.escape_debug());
327 fn convert_doc_comment(token: &syntax::SyntaxToken) -> Option<Vec<tt::TokenTree>> {
328 cov_mark::hit!(test_meta_doc_comments);
329 let comment = ast::Comment::cast(token.clone())?;
330 let doc = comment.kind().doc?;
332 // Make `doc="\" Comments\""
333 let meta_tkns = vec![mk_ident("doc"), mk_punct('='), mk_doc_literal(&comment)];
336 let mut token_trees = Vec::with_capacity(3);
337 token_trees.push(mk_punct('#'));
338 if let ast::CommentPlacement::Inner = doc {
339 token_trees.push(mk_punct('!'));
341 token_trees.push(tt::TokenTree::from(tt::Subtree {
342 delimiter: Some(tt::Delimiter {
343 kind: tt::DelimiterKind::Bracket,
344 id: tt::TokenId::unspecified(),
346 token_trees: meta_tkns,
349 return Some(token_trees);
352 fn mk_ident(s: &str) -> tt::TokenTree {
353 tt::TokenTree::from(tt::Leaf::from(tt::Ident {
355 id: tt::TokenId::unspecified(),
359 fn mk_punct(c: char) -> tt::TokenTree {
360 tt::TokenTree::from(tt::Leaf::from(tt::Punct {
362 spacing: tt::Spacing::Alone,
363 id: tt::TokenId::unspecified(),
367 fn mk_doc_literal(comment: &ast::Comment) -> tt::TokenTree {
368 let lit = tt::Literal { text: doc_comment_text(comment), id: tt::TokenId::unspecified() };
370 tt::TokenTree::from(tt::Leaf::from(lit))
374 struct TokenIdAlloc {
376 global_offset: TextSize,
383 absolute_range: TextRange,
384 synthetic_id: Option<SyntheticTokenId>,
386 let relative_range = absolute_range - self.global_offset;
387 let token_id = tt::TokenId(self.next_id);
389 self.map.insert(token_id, relative_range);
390 if let Some(id) = synthetic_id {
391 self.map.insert_synthetic(token_id, id);
396 fn open_delim(&mut self, open_abs_range: TextRange) -> (tt::TokenId, usize) {
397 let token_id = tt::TokenId(self.next_id);
399 let idx = self.map.insert_delim(
401 open_abs_range - self.global_offset,
402 open_abs_range - self.global_offset,
407 fn close_delim(&mut self, idx: usize, close_abs_range: Option<TextRange>) {
408 match close_abs_range {
410 self.map.remove_delim(idx);
413 self.map.update_close_delim(idx, close - self.global_offset);
419 /// A raw token (straight from lexer) convertor
420 struct RawConvertor<'a> {
421 lexed: parser::LexedStr<'a>,
423 id_alloc: TokenIdAlloc,
426 trait SrcToken<Ctx>: std::fmt::Debug {
427 fn kind(&self, ctx: &Ctx) -> SyntaxKind;
429 fn to_char(&self, ctx: &Ctx) -> Option<char>;
431 fn to_text(&self, ctx: &Ctx) -> SmolStr;
433 fn synthetic_id(&self, ctx: &Ctx) -> Option<SyntheticTokenId>;
436 trait TokenConvertor: Sized {
437 type Token: SrcToken<Self>;
439 fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>>;
441 fn bump(&mut self) -> Option<(Self::Token, TextRange)>;
443 fn peek(&self) -> Option<Self::Token>;
445 fn id_alloc(&mut self) -> &mut TokenIdAlloc;
448 impl<'a> SrcToken<RawConvertor<'a>> for usize {
449 fn kind(&self, ctx: &RawConvertor<'a>) -> SyntaxKind {
450 ctx.lexed.kind(*self)
453 fn to_char(&self, ctx: &RawConvertor<'a>) -> Option<char> {
454 ctx.lexed.text(*self).chars().next()
457 fn to_text(&self, ctx: &RawConvertor<'_>) -> SmolStr {
458 ctx.lexed.text(*self).into()
461 fn synthetic_id(&self, _ctx: &RawConvertor<'a>) -> Option<SyntheticTokenId> {
466 impl<'a> TokenConvertor for RawConvertor<'a> {
469 fn convert_doc_comment(&self, &token: &usize) -> Option<Vec<tt::TokenTree>> {
470 let text = self.lexed.text(token);
471 convert_doc_comment(&doc_comment(text))
474 fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
475 if self.pos == self.lexed.len() {
478 let token = self.pos;
480 let range = self.lexed.text_range(token);
481 let range = TextRange::new(range.start.try_into().unwrap(), range.end.try_into().unwrap());
486 fn peek(&self) -> Option<Self::Token> {
487 if self.pos == self.lexed.len() {
493 fn id_alloc(&mut self) -> &mut TokenIdAlloc {
499 id_alloc: TokenIdAlloc,
500 current: Option<SyntaxToken>,
501 current_synthetic: Vec<SyntheticToken>,
502 preorder: PreorderWithTokens,
503 replace: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
504 append: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
506 punct_offset: Option<(SyntaxToken, TextSize)>,
512 global_offset: TextSize,
513 mut replace: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
514 mut append: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
516 let range = node.text_range();
517 let mut preorder = node.preorder_with_tokens();
518 let (first, synthetic) = Self::next_token(&mut preorder, &mut replace, &mut append);
520 id_alloc: { TokenIdAlloc { map: TokenMap::default(), global_offset, next_id: 0 } },
522 current_synthetic: synthetic,
532 preorder: &mut PreorderWithTokens,
533 replace: &mut FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
534 append: &mut FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
535 ) -> (Option<SyntaxToken>, Vec<SyntheticToken>) {
536 while let Some(ev) = preorder.next() {
538 WalkEvent::Enter(ele) => ele,
539 WalkEvent::Leave(SyntaxElement::Node(node)) => {
540 if let Some(mut v) = append.remove(&node) {
551 SyntaxElement::Token(t) => return (Some(t), Vec::new()),
552 SyntaxElement::Node(node) => {
553 if let Some(mut v) = replace.remove(&node) {
554 preorder.skip_subtree();
569 Ordinary(SyntaxToken),
570 // FIXME is this supposed to be `Punct`?
571 Punch(SyntaxToken, TextSize),
572 Synthetic(SyntheticToken),
576 fn token(&self) -> Option<&SyntaxToken> {
578 SynToken::Ordinary(it) | SynToken::Punch(it, _) => Some(it),
579 SynToken::Synthetic(_) => None,
584 impl SrcToken<Convertor> for SynToken {
585 fn kind(&self, _ctx: &Convertor) -> SyntaxKind {
587 SynToken::Ordinary(token) => token.kind(),
588 SynToken::Punch(token, _) => token.kind(),
589 SynToken::Synthetic(token) => token.kind,
592 fn to_char(&self, _ctx: &Convertor) -> Option<char> {
594 SynToken::Ordinary(_) => None,
595 SynToken::Punch(it, i) => it.text().chars().nth((*i).into()),
596 SynToken::Synthetic(token) if token.text.len() == 1 => token.text.chars().next(),
597 SynToken::Synthetic(_) => None,
600 fn to_text(&self, _ctx: &Convertor) -> SmolStr {
602 SynToken::Ordinary(token) => token.text().into(),
603 SynToken::Punch(token, _) => token.text().into(),
604 SynToken::Synthetic(token) => token.text.clone(),
608 fn synthetic_id(&self, _ctx: &Convertor) -> Option<SyntheticTokenId> {
610 SynToken::Synthetic(token) => Some(token.id),
616 impl TokenConvertor for Convertor {
617 type Token = SynToken;
618 fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>> {
619 convert_doc_comment(token.token()?)
622 fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
623 if let Some((punct, offset)) = self.punct_offset.clone() {
624 if usize::from(offset) + 1 < punct.text().len() {
625 let offset = offset + TextSize::of('.');
626 let range = punct.text_range();
627 self.punct_offset = Some((punct.clone(), offset));
628 let range = TextRange::at(range.start() + offset, TextSize::of('.'));
629 return Some((SynToken::Punch(punct, offset), range));
633 if let Some(synth_token) = self.current_synthetic.pop() {
634 if self.current_synthetic.is_empty() {
635 let (new_current, new_synth) =
636 Self::next_token(&mut self.preorder, &mut self.replace, &mut self.append);
637 self.current = new_current;
638 self.current_synthetic = new_synth;
640 let range = synth_token.range;
641 return Some((SynToken::Synthetic(synth_token), range));
644 let curr = self.current.clone()?;
645 if !&self.range.contains_range(curr.text_range()) {
648 let (new_current, new_synth) =
649 Self::next_token(&mut self.preorder, &mut self.replace, &mut self.append);
650 self.current = new_current;
651 self.current_synthetic = new_synth;
652 let token = if curr.kind().is_punct() {
653 self.punct_offset = Some((curr.clone(), 0.into()));
654 let range = curr.text_range();
655 let range = TextRange::at(range.start(), TextSize::of('.'));
656 (SynToken::Punch(curr, 0.into()), range)
658 self.punct_offset = None;
659 let range = curr.text_range();
660 (SynToken::Ordinary(curr), range)
666 fn peek(&self) -> Option<Self::Token> {
667 if let Some((punct, mut offset)) = self.punct_offset.clone() {
668 offset += TextSize::of('.');
669 if usize::from(offset) < punct.text().len() {
670 return Some(SynToken::Punch(punct, offset));
674 if let Some(synth_token) = self.current_synthetic.last() {
675 return Some(SynToken::Synthetic(synth_token.clone()));
678 let curr = self.current.clone()?;
679 if !self.range.contains_range(curr.text_range()) {
683 let token = if curr.kind().is_punct() {
684 SynToken::Punch(curr, 0.into())
686 SynToken::Ordinary(curr)
691 fn id_alloc(&mut self) -> &mut TokenIdAlloc {
696 struct TtTreeSink<'a> {
699 open_delims: FxHashMap<tt::TokenId, TextSize>,
701 inner: SyntaxTreeBuilder,
705 impl<'a> TtTreeSink<'a> {
706 fn new(cursor: Cursor<'a>) -> Self {
710 open_delims: FxHashMap::default(),
712 inner: SyntaxTreeBuilder::default(),
713 token_map: TokenMap::default(),
717 fn finish(mut self) -> (Parse<SyntaxNode>, TokenMap) {
718 self.token_map.shrink_to_fit();
719 (self.inner.finish(), self.token_map)
723 fn delim_to_str(d: tt::DelimiterKind, closing: bool) -> &'static str {
724 let texts = match d {
725 tt::DelimiterKind::Parenthesis => "()",
726 tt::DelimiterKind::Brace => "{}",
727 tt::DelimiterKind::Bracket => "[]",
730 let idx = closing as usize;
731 &texts[idx..texts.len() - (1 - idx)]
734 impl<'a> TtTreeSink<'a> {
735 fn token(&mut self, kind: SyntaxKind, mut n_tokens: u8) {
736 if kind == LIFETIME_IDENT {
740 let mut last = self.cursor;
741 for _ in 0..n_tokens {
743 if self.cursor.eof() {
747 let text: &str = loop {
748 break match self.cursor.token_tree() {
749 Some(tt::buffer::TokenTreeRef::Leaf(leaf, _)) => {
750 // Mark the range if needed
751 let (text, id) = match leaf {
752 tt::Leaf::Ident(ident) => (ident.text.as_str(), ident.id),
753 tt::Leaf::Punct(punct) => {
754 assert!(punct.char.is_ascii());
755 tmp = punct.char as u8;
756 (std::str::from_utf8(std::slice::from_ref(&tmp)).unwrap(), punct.id)
758 tt::Leaf::Literal(lit) => (lit.text.as_str(), lit.id),
760 let range = TextRange::at(self.text_pos, TextSize::of(text));
761 self.token_map.insert(id, range);
762 self.cursor = self.cursor.bump();
765 Some(tt::buffer::TokenTreeRef::Subtree(subtree, _)) => {
766 self.cursor = self.cursor.subtree().unwrap();
767 match subtree.delimiter {
769 self.open_delims.insert(d.id, self.text_pos);
770 delim_to_str(d.kind, false)
776 let parent = self.cursor.end().unwrap();
777 self.cursor = self.cursor.bump();
778 match parent.delimiter {
780 if let Some(open_delim) = self.open_delims.get(&d.id) {
781 let open_range = TextRange::at(*open_delim, TextSize::of('('));
783 TextRange::at(self.text_pos, TextSize::of('('));
784 self.token_map.insert_delim(d.id, open_range, close_range);
786 delim_to_str(d.kind, true)
794 self.text_pos += TextSize::of(text);
797 self.inner.token(kind, self.buf.as_str());
799 // Add whitespace between adjoint puncts
800 let next = last.bump();
802 Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Punct(curr), _)),
803 Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Punct(_), _)),
804 ) = (last.token_tree(), next.token_tree())
806 // Note: We always assume the semi-colon would be the last token in
807 // other parts of RA such that we don't add whitespace here.
808 if curr.spacing == tt::Spacing::Alone && curr.char != ';' {
809 self.inner.token(WHITESPACE, " ");
810 self.text_pos += TextSize::of(' ');
815 fn start_node(&mut self, kind: SyntaxKind) {
816 self.inner.start_node(kind);
819 fn finish_node(&mut self) {
820 self.inner.finish_node();
823 fn error(&mut self, error: String) {
824 self.inner.error(error, self.text_pos)