1 use ra_parser::{TokenSource, TreeSink, ParseError};
3 AstNode, SyntaxNode, TextRange, SyntaxKind, SmolStr, SyntaxTreeBuilder, TreeArc,
4 ast, SyntaxKind::*, TextUnit
7 /// Maps `tt::TokenId` to the relative range of the original token.
10 /// Maps `tt::TokenId` to the *relative* source range.
11 toknes: Vec<TextRange>,
14 /// Convert the syntax tree (what user has written) to a `TokenTree` (what macro
16 pub fn ast_to_token_tree(ast: &ast::TokenTree) -> Option<(tt::Subtree, TokenMap)> {
17 let mut token_map = TokenMap::default();
18 let node = ast.syntax();
19 let tt = convert_tt(&mut token_map, node.range().start(), node)?;
23 /// Parses the token tree (result of macro expansion) as a sequence of items
24 pub fn token_tree_to_ast_item_list(tt: &tt::Subtree) -> TreeArc<ast::SourceFile> {
25 let token_source = TtTokenSource::new(tt);
26 let mut tree_sink = TtTreeSink::new(&token_source.tokens);
27 ra_parser::parse(&token_source, &mut tree_sink);
28 let syntax = tree_sink.inner.finish();
29 ast::SourceFile::cast(&syntax).unwrap().to_owned()
33 pub fn relative_range_of(&self, tt: tt::TokenId) -> Option<TextRange> {
34 let idx = tt.0 as usize;
35 self.toknes.get(idx).map(|&it| it)
38 fn alloc(&mut self, relative_range: TextRange) -> tt::TokenId {
39 let id = self.toknes.len();
40 self.toknes.push(relative_range);
41 tt::TokenId(id as u32)
46 token_map: &mut TokenMap,
47 global_offset: TextUnit,
49 ) -> Option<tt::Subtree> {
50 let first_child = tt.first_child()?;
51 let last_child = tt.last_child()?;
52 let delimiter = match (first_child.kind(), last_child.kind()) {
53 (L_PAREN, R_PAREN) => tt::Delimiter::Parenthesis,
54 (L_CURLY, R_CURLY) => tt::Delimiter::Brace,
55 (L_BRACK, R_BRACK) => tt::Delimiter::Bracket,
58 let mut token_trees = Vec::new();
59 for child in tt.children().skip(1) {
60 if child == first_child || child == last_child || child.kind().is_trivia() {
63 if child.kind().is_punct() {
65 for char in child.leaf_text().unwrap().chars() {
66 if let Some(char) = prev {
68 tt::Leaf::from(tt::Punct { char, spacing: tt::Spacing::Joint }).into(),
73 if let Some(char) = prev {
75 .push(tt::Leaf::from(tt::Punct { char, spacing: tt::Spacing::Alone }).into());
78 let child: tt::TokenTree = if child.kind() == TOKEN_TREE {
79 convert_tt(token_map, global_offset, child)?.into()
80 } else if child.kind().is_keyword() || child.kind() == IDENT {
81 let relative_range = child.range() - global_offset;
82 let id = token_map.alloc(relative_range);
83 let text = child.leaf_text().unwrap().clone();
84 tt::Leaf::from(tt::Ident { text, id }).into()
85 } else if child.kind().is_literal() {
86 tt::Leaf::from(tt::Literal { text: child.leaf_text().unwrap().clone() }).into()
90 token_trees.push(child)
94 let res = tt::Subtree { delimiter, token_trees };
98 struct TtTokenSource {
104 is_joint_to_next: bool,
109 fn new(tt: &tt::Subtree) -> TtTokenSource {
110 let mut res = TtTokenSource { tokens: Vec::new() };
111 res.convert_subtree(tt);
114 fn convert_subtree(&mut self, sub: &tt::Subtree) {
115 self.push_delim(sub.delimiter, false);
116 sub.token_trees.iter().for_each(|tt| self.convert_tt(tt));
117 self.push_delim(sub.delimiter, true)
119 fn convert_tt(&mut self, tt: &tt::TokenTree) {
121 tt::TokenTree::Leaf(leaf) => self.convert_leaf(leaf),
122 tt::TokenTree::Subtree(sub) => self.convert_subtree(sub),
125 fn convert_leaf(&mut self, leaf: &tt::Leaf) {
126 let tok = match leaf {
127 tt::Leaf::Literal(l) => Tok {
128 kind: SyntaxKind::INT_NUMBER, // FIXME
129 is_joint_to_next: false,
130 text: l.text.clone(),
132 tt::Leaf::Punct(p) => {
133 let kind = match p.char {
134 // lexer may produce combpund tokens for these ones
140 c => SyntaxKind::from_char(c).unwrap(),
143 let mut buf = [0u8; 4];
144 let s: &str = p.char.encode_utf8(&mut buf);
147 Tok { kind, is_joint_to_next: p.spacing == tt::Spacing::Joint, text }
149 tt::Leaf::Ident(ident) => {
150 let kind = SyntaxKind::from_keyword(ident.text.as_str()).unwrap_or(IDENT);
151 Tok { kind, is_joint_to_next: false, text: ident.text.clone() }
154 self.tokens.push(tok)
156 fn push_delim(&mut self, d: tt::Delimiter, closing: bool) {
157 let (kinds, texts) = match d {
158 tt::Delimiter::Parenthesis => ([L_PAREN, R_PAREN], "()"),
159 tt::Delimiter::Brace => ([L_CURLY, R_CURLY], "{}"),
160 tt::Delimiter::Bracket => ([L_BRACK, R_BRACK], "[]"),
161 tt::Delimiter::None => return,
163 let idx = closing as usize;
164 let kind = kinds[idx];
165 let text = &texts[idx..texts.len() - (1 - idx)];
166 let tok = Tok { kind, is_joint_to_next: false, text: SmolStr::new(text) };
167 self.tokens.push(tok)
171 impl TokenSource for TtTokenSource {
172 fn token_kind(&self, pos: usize) -> SyntaxKind {
173 if let Some(tok) = self.tokens.get(pos) {
179 fn is_token_joint_to_next(&self, pos: usize) -> bool {
180 self.tokens[pos].is_joint_to_next
182 fn is_keyword(&self, pos: usize, kw: &str) -> bool {
183 self.tokens[pos].text == *kw
188 struct TtTreeSink<'a> {
193 inner: SyntaxTreeBuilder,
196 impl<'a> TtTreeSink<'a> {
197 fn new(tokens: &'a [Tok]) -> TtTreeSink {
203 inner: SyntaxTreeBuilder::default(),
208 impl<'a> TreeSink for TtTreeSink<'a> {
209 fn leaf(&mut self, kind: SyntaxKind, n_tokens: u8) {
210 for _ in 0..n_tokens {
211 self.buf += self.tokens[self.token_pos].text.as_str();
214 self.text_pos += TextUnit::of_str(&self.buf);
215 let text = SmolStr::new(self.buf.as_str());
217 self.inner.leaf(kind, text)
220 fn start_branch(&mut self, kind: SyntaxKind) {
221 self.inner.start_branch(kind);
224 fn finish_branch(&mut self) {
225 self.inner.finish_branch();
228 fn error(&mut self, error: ParseError) {
229 self.inner.error(error, self.text_pos)