]> git.lizzy.rs Git - rust.git/commitdiff
Merge #2586
authorbors[bot] <26634292+bors[bot]@users.noreply.github.com>
Wed, 18 Dec 2019 14:47:04 +0000 (14:47 +0000)
committerGitHub <noreply@github.com>
Wed, 18 Dec 2019 14:47:04 +0000 (14:47 +0000)
2586: Make reformat hook actually reformat files r=matklad a=matklad

Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
crates/ra_hir_expand/src/builtin_derive.rs
crates/ra_hir_expand/src/lib.rs
crates/ra_hir_expand/src/quote.rs
crates/ra_mbe/src/lib.rs
crates/ra_mbe/src/mbe_expander/matcher.rs
crates/ra_mbe/src/mbe_expander/transcriber.rs
crates/ra_mbe/src/subtree_source.rs
crates/ra_mbe/src/syntax_bridge.rs
crates/ra_mbe/src/tests.rs
crates/ra_tt/src/lib.rs
docs/user/README.md

index b2644125383ce4c1a149ea012321c6eaf411a2b2..62c60e336cec21c58fcda496b1cb0cfe9a630a8e 100644 (file)
@@ -97,11 +97,24 @@ fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, mbe::ExpandError> {
 
 fn make_type_args(n: usize, bound: Vec<tt::TokenTree>) -> Vec<tt::TokenTree> {
     let mut result = Vec::<tt::TokenTree>::new();
-    result.push(tt::Leaf::Punct(tt::Punct { char: '<', spacing: tt::Spacing::Alone }).into());
+    result.push(
+        tt::Leaf::Punct(tt::Punct {
+            char: '<',
+            spacing: tt::Spacing::Alone,
+            id: tt::TokenId::unspecified(),
+        })
+        .into(),
+    );
     for i in 0..n {
         if i > 0 {
-            result
-                .push(tt::Leaf::Punct(tt::Punct { char: ',', spacing: tt::Spacing::Alone }).into());
+            result.push(
+                tt::Leaf::Punct(tt::Punct {
+                    char: ',',
+                    spacing: tt::Spacing::Alone,
+                    id: tt::TokenId::unspecified(),
+                })
+                .into(),
+            );
         }
         result.push(
             tt::Leaf::Ident(tt::Ident {
@@ -112,7 +125,14 @@ fn make_type_args(n: usize, bound: Vec<tt::TokenTree>) -> Vec<tt::TokenTree> {
         );
         result.extend(bound.iter().cloned());
     }
-    result.push(tt::Leaf::Punct(tt::Punct { char: '>', spacing: tt::Spacing::Alone }).into());
+    result.push(
+        tt::Leaf::Punct(tt::Punct {
+            char: '>',
+            spacing: tt::Spacing::Alone,
+            id: tt::TokenId::unspecified(),
+        })
+        .into(),
+    );
     result
 }
 
index cb4e1950bf82f6c2b27269a5a813043e217317be..2fa5d51402bc6f8828e1f1147fb6cb76df4f32f6 100644 (file)
@@ -227,7 +227,7 @@ pub fn map_token_down(&self, token: InFile<&SyntaxToken>) -> Option<InFile<Synta
         let token_id = self.macro_arg.1.token_by_range(range)?;
         let token_id = self.macro_def.0.map_id_down(token_id);
 
-        let range = self.exp_map.range_by_token(token_id)?;
+        let range = self.exp_map.range_by_token(token_id)?.by_kind(token.value.kind())?;
 
         let token = algo::find_covering_element(&self.expanded.value, range).into_token()?;
 
@@ -248,7 +248,7 @@ pub fn map_token_up(
             }
         };
 
-        let range = token_map.range_by_token(token_id)?;
+        let range = token_map.range_by_token(token_id)?.by_kind(token.value.kind())?;
         let token = algo::find_covering_element(&tt.value, range + tt.value.text_range().start())
             .into_token()?;
         Some((tt.with_value(token), origin))
index aa8a5f23fd6e7f12908b8f4bc8ea4de43ca35c14..49155fe6262c234d8bd222a4e13ac75d2dc41b80 100644 (file)
@@ -16,7 +16,10 @@ macro_rules! __quote {
         {
             let children = $crate::__quote!($($tt)*);
             let subtree = tt::Subtree {
-                delimiter: Some(tt::Delimiter::$delim),
+                delimiter: Some(tt::Delimiter {
+                    kind: tt::DelimiterKind::$delim,
+                    id: tt::TokenId::unspecified(),
+                }),
                 token_trees: $crate::quote::IntoTt::to_tokens(children),
             };
             subtree
@@ -29,6 +32,7 @@ macro_rules! __quote {
                 tt::Leaf::Punct(tt::Punct {
                     char: $first,
                     spacing: tt::Spacing::Alone,
+                    id: tt::TokenId::unspecified(),
                 }).into()
             ]
         }
@@ -40,10 +44,12 @@ macro_rules! __quote {
                 tt::Leaf::Punct(tt::Punct {
                     char: $first,
                     spacing: tt::Spacing::Joint,
+                    id: tt::TokenId::unspecified(),
                 }).into(),
                 tt::Leaf::Punct(tt::Punct {
                     char: $sec,
                     spacing: tt::Spacing::Alone,
+                    id: tt::TokenId::unspecified(),
                 }).into()
             ]
         }
@@ -179,15 +185,15 @@ fn to_token($this) -> tt::TokenTree {
 }
 
 impl_to_to_tokentrees! {
-    u32 => self { tt::Literal{text: self.to_string().into()} };
-    usize => self { tt::Literal{text: self.to_string().into()}};
-    i32 => self { tt::Literal{text: self.to_string().into()}};
+    u32 => self { tt::Literal{text: self.to_string().into(), id: tt::TokenId::unspecified()} };
+    usize => self { tt::Literal{text: self.to_string().into(), id: tt::TokenId::unspecified()}};
+    i32 => self { tt::Literal{text: self.to_string().into(), id: tt::TokenId::unspecified()}};
     tt::Leaf => self { self };
     tt::Literal => self { self };
     tt::Ident => self { self };
     tt::Punct => self { self };
-    &str => self { tt::Literal{text: format!("{:?}", self.escape_default().to_string()).into()}};
-    String => self { tt::Literal{text: format!("{:?}", self.escape_default().to_string()).into()}}
+    &str => self { tt::Literal{text: format!("{:?}", self.escape_default().to_string()).into(), id: tt::TokenId::unspecified()}};
+    String => self { tt::Literal{text: format!("{:?}", self.escape_default().to_string()).into(), id: tt::TokenId::unspecified()}}
 }
 
 #[cfg(test)]
@@ -254,8 +260,13 @@ fn test_quote_derive_copy_hack() {
         let fields =
             fields.iter().map(|it| quote!(#it: self.#it.clone(), ).token_trees.clone()).flatten();
 
-        let list =
-            tt::Subtree { delimiter: Some(tt::Delimiter::Brace), token_trees: fields.collect() };
+        let list = tt::Subtree {
+            delimiter: Some(tt::Delimiter {
+                kind: tt::DelimiterKind::Brace,
+                id: tt::TokenId::unspecified(),
+            }),
+            token_trees: fields.collect(),
+        };
 
         let quoted = quote! {
             impl Clone for #struct_name {
index ce2deadf6e41a49227800d0aefebdd4e8ca05153..45dad2d108bc8ffda931f94515a9c51e6dcd278e 100644 (file)
@@ -67,7 +67,15 @@ fn max_id(subtree: &tt::Subtree) -> Option<u32> {
                 .token_trees
                 .iter()
                 .filter_map(|tt| match tt {
-                    tt::TokenTree::Subtree(subtree) => max_id(subtree),
+                    tt::TokenTree::Subtree(subtree) => {
+                        let tree_id = max_id(subtree);
+                        match subtree.delimiter {
+                            Some(it) if it.id != tt::TokenId::unspecified() => {
+                                Some(tree_id.map_or(it.id.0, |t| t.max(it.id.0)))
+                            }
+                            _ => tree_id,
+                        }
+                    }
                     tt::TokenTree::Leaf(tt::Leaf::Ident(ident))
                         if ident.id != tt::TokenId::unspecified() =>
                     {
@@ -85,9 +93,13 @@ fn shift_all(self, tt: &mut tt::Subtree) {
             match t {
                 tt::TokenTree::Leaf(leaf) => match leaf {
                     tt::Leaf::Ident(ident) => ident.id = self.shift(ident.id),
-                    _ => (),
+                    tt::Leaf::Punct(punct) => punct.id = self.shift(punct.id),
+                    tt::Leaf::Literal(lit) => lit.id = self.shift(lit.id),
                 },
-                tt::TokenTree::Subtree(tt) => self.shift_all(tt),
+                tt::TokenTree::Subtree(tt) => {
+                    tt.delimiter.as_mut().map(|it: &mut Delimiter| it.id = self.shift(it.id));
+                    self.shift_all(tt)
+                }
             }
         }
     }
index 3f51364780a083713440f5f7252d5a70f4f4dd4e..e36b5a412b903dc65487b61e1a010d9f94cbed35 100644 (file)
@@ -106,7 +106,7 @@ fn match_subtree(
             }
             Op::TokenTree(tt::TokenTree::Subtree(lhs)) => {
                 let rhs = src.expect_subtree().map_err(|()| err!("expected subtree"))?;
-                if lhs.delimiter != rhs.delimiter {
+                if lhs.delimiter_kind() != rhs.delimiter_kind() {
                     bail!("mismatched delimiter")
                 }
                 let mut src = TtIter::new(rhs);
index f7636db11b8abd031686b3bccf37348281729749..eda66cd506ebe2f6928011ee20877e10e339f900 100644 (file)
@@ -108,7 +108,12 @@ fn expand_var(ctx: &mut ExpandCtx, v: &SmolStr) -> Result<Fragment, ExpandError>
         let tt = tt::Subtree {
             delimiter: None,
             token_trees: vec![
-                tt::Leaf::from(tt::Punct { char: '$', spacing: tt::Spacing::Alone }).into(),
+                tt::Leaf::from(tt::Punct {
+                    char: '$',
+                    spacing: tt::Spacing::Alone,
+                    id: tt::TokenId::unspecified(),
+                })
+                .into(),
                 tt::Leaf::from(tt::Ident { text: v.clone(), id: tt::TokenId::unspecified() })
                     .into(),
             ],
index 061e9f20b2b6f2c47965ed3f5d2030b3b12557e9..b841c39d31440aa54c45c70873ad9d2c3cd621a2 100644 (file)
@@ -70,11 +70,11 @@ fn get(&self, pos: usize) -> Ref<Option<TtToken>> {
                     }
                     Some(tt::TokenTree::Subtree(subtree)) => {
                         self.cached_cursor.set(cursor.subtree().unwrap());
-                        cached.push(Some(convert_delim(subtree.delimiter, false)));
+                        cached.push(Some(convert_delim(subtree.delimiter_kind(), false)));
                     }
                     None => {
                         if let Some(subtree) = cursor.end() {
-                            cached.push(Some(convert_delim(subtree.delimiter, true)));
+                            cached.push(Some(convert_delim(subtree.delimiter_kind(), true)));
                             self.cached_cursor.set(cursor.bump());
                         }
                     }
@@ -114,11 +114,11 @@ fn is_keyword(&self, kw: &str) -> bool {
     }
 }
 
-fn convert_delim(d: Option<tt::Delimiter>, closing: bool) -> TtToken {
+fn convert_delim(d: Option<tt::DelimiterKind>, closing: bool) -> TtToken {
     let (kinds, texts) = match d {
-        Some(tt::Delimiter::Parenthesis) => ([T!['('], T![')']], "()"),
-        Some(tt::Delimiter::Brace) => ([T!['{'], T!['}']], "{}"),
-        Some(tt::Delimiter::Bracket) => ([T!['['], T![']']], "[]"),
+        Some(tt::DelimiterKind::Parenthesis) => ([T!['('], T![')']], "()"),
+        Some(tt::DelimiterKind::Brace) => ([T!['{'], T!['}']], "{}"),
+        Some(tt::DelimiterKind::Bracket) => ([T!['['], T![']']], "[]"),
         None => ([L_DOLLAR, R_DOLLAR], ""),
     };
 
index b8e2cfc1d8c2ab8c1fd44f37ff9ed44dc201c8fb..2c60430d155cd51cfe072551124d276bfb563043 100644 (file)
@@ -5,17 +5,37 @@
     ast, AstToken, NodeOrToken, Parse, SmolStr, SyntaxKind, SyntaxKind::*, SyntaxNode,
     SyntaxTreeBuilder, TextRange, TextUnit, T,
 };
+use rustc_hash::FxHashMap;
 use std::iter::successors;
 use tt::buffer::{Cursor, TokenBuffer};
 
 use crate::subtree_source::SubtreeTokenSource;
 use crate::ExpandError;
 
+#[derive(Debug, PartialEq, Eq, Clone, Copy)]
+pub enum TokenTextRange {
+    Token(TextRange),
+    Delimiter(TextRange, TextRange),
+}
+
+impl TokenTextRange {
+    pub fn by_kind(self, kind: SyntaxKind) -> Option<TextRange> {
+        match self {
+            TokenTextRange::Token(it) => Some(it),
+            TokenTextRange::Delimiter(open, close) => match kind {
+                T!['{'] | T!['('] | T!['['] => Some(open),
+                T!['}'] | T![')'] | T![']'] => Some(close),
+                _ => None,
+            },
+        }
+    }
+}
+
 /// Maps `tt::TokenId` to the relative range of the original token.
 #[derive(Debug, PartialEq, Eq, Default)]
 pub struct TokenMap {
     /// Maps `tt::TokenId` to the *relative* source range.
-    entries: Vec<(tt::TokenId, TextRange)>,
+    entries: Vec<(tt::TokenId, TokenTextRange)>,
 }
 
 /// Convert the syntax tree (what user has written) to a `TokenTree` (what macro
@@ -71,17 +91,32 @@ pub fn token_tree_to_syntax_node(
 
 impl TokenMap {
     pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> {
-        let &(token_id, _) = self.entries.iter().find(|(_, range)| *range == relative_range)?;
+        let &(token_id, _) = self.entries.iter().find(|(_, range)| match range {
+            TokenTextRange::Token(it) => *it == relative_range,
+            TokenTextRange::Delimiter(open, close) => {
+                *open == relative_range || *close == relative_range
+            }
+        })?;
         Some(token_id)
     }
 
-    pub fn range_by_token(&self, token_id: tt::TokenId) -> Option<TextRange> {
+    pub fn range_by_token(&self, token_id: tt::TokenId) -> Option<TokenTextRange> {
         let &(_, range) = self.entries.iter().find(|(tid, _)| *tid == token_id)?;
         Some(range)
     }
 
     fn insert(&mut self, token_id: tt::TokenId, relative_range: TextRange) {
-        self.entries.push((token_id, relative_range));
+        self.entries.push((token_id, TokenTextRange::Token(relative_range)));
+    }
+
+    fn insert_delim(
+        &mut self,
+        token_id: tt::TokenId,
+        open_relative_range: TextRange,
+        close_relative_range: TextRange,
+    ) {
+        self.entries
+            .push((token_id, TokenTextRange::Delimiter(open_relative_range, close_relative_range)));
     }
 }
 
@@ -121,7 +156,10 @@ fn convert_doc_comment(token: &ra_syntax::SyntaxToken) -> Option<Vec<tt::TokenTr
         token_trees.push(mk_punct('!'));
     }
     token_trees.push(tt::TokenTree::from(tt::Subtree {
-        delimiter: Some(tt::Delimiter::Bracket),
+        delimiter: Some(tt::Delimiter {
+            kind: tt::DelimiterKind::Bracket,
+            id: tt::TokenId::unspecified(),
+        }),
         token_trees: meta_tkns,
     }));
 
@@ -136,11 +174,15 @@ fn mk_ident(s: &str) -> tt::TokenTree {
     }
 
     fn mk_punct(c: char) -> tt::TokenTree {
-        tt::TokenTree::from(tt::Leaf::from(tt::Punct { char: c, spacing: tt::Spacing::Alone }))
+        tt::TokenTree::from(tt::Leaf::from(tt::Punct {
+            char: c,
+            spacing: tt::Spacing::Alone,
+            id: tt::TokenId::unspecified(),
+        }))
     }
 
     fn mk_doc_literal(comment: &ast::Comment) -> tt::TokenTree {
-        let lit = tt::Literal { text: doc_comment_text(comment) };
+        let lit = tt::Literal { text: doc_comment_text(comment), id: tt::TokenId::unspecified() };
 
         tt::TokenTree::from(tt::Leaf::from(lit))
     }
@@ -186,12 +228,16 @@ fn go(&mut self, tt: &SyntaxNode) -> Option<tt::Subtree> {
         .last()
         .unwrap();
 
-        let (delimiter, skip_first) = match (first_child.kind(), last_child.kind()) {
-            (T!['('], T![')']) => (Some(tt::Delimiter::Parenthesis), true),
-            (T!['{'], T!['}']) => (Some(tt::Delimiter::Brace), true),
-            (T!['['], T![']']) => (Some(tt::Delimiter::Bracket), true),
+        let (delimiter_kind, skip_first) = match (first_child.kind(), last_child.kind()) {
+            (T!['('], T![')']) => (Some(tt::DelimiterKind::Parenthesis), true),
+            (T!['{'], T!['}']) => (Some(tt::DelimiterKind::Brace), true),
+            (T!['['], T![']']) => (Some(tt::DelimiterKind::Bracket), true),
             _ => (None, false),
         };
+        let delimiter = delimiter_kind.map(|kind| tt::Delimiter {
+            kind,
+            id: self.alloc_delim(first_child.text_range(), last_child.text_range()),
+        });
 
         let mut token_trees = Vec::new();
         let mut child_iter = tt.children_with_tokens().skip(skip_first as usize).peekable();
@@ -223,25 +269,34 @@ fn go(&mut self, tt: &SyntaxNode) -> Option<tt::Subtree> {
                             .take(token.text().len() - 1)
                             .chain(std::iter::once(last_spacing));
                         for (char, spacing) in token.text().chars().zip(spacing_iter) {
-                            token_trees.push(tt::Leaf::from(tt::Punct { char, spacing }).into());
+                            token_trees.push(
+                                tt::Leaf::from(tt::Punct {
+                                    char,
+                                    spacing,
+                                    id: self.alloc(token.text_range()),
+                                })
+                                .into(),
+                            );
                         }
                     } else {
-                        let child: tt::TokenTree =
-                            if token.kind() == T![true] || token.kind() == T![false] {
-                                tt::Leaf::from(tt::Literal { text: token.text().clone() }).into()
-                            } else if token.kind().is_keyword()
-                                || token.kind() == IDENT
-                                || token.kind() == LIFETIME
-                            {
-                                let id = self.alloc(token.text_range());
-                                let text = token.text().clone();
-                                tt::Leaf::from(tt::Ident { text, id }).into()
-                            } else if token.kind().is_literal() {
-                                tt::Leaf::from(tt::Literal { text: token.text().clone() }).into()
-                            } else {
-                                return None;
+                        macro_rules! make_leaf {
+                            ($i:ident) => {
+                                tt::$i {
+                                    id: self.alloc(token.text_range()),
+                                    text: token.text().clone(),
+                                }
+                                .into()
                             };
-                        token_trees.push(child);
+                        }
+
+                        let child: tt::Leaf = match token.kind() {
+                            T![true] | T![false] => make_leaf!(Literal),
+                            IDENT | LIFETIME => make_leaf!(Ident),
+                            k if k.is_keyword() => make_leaf!(Ident),
+                            k if k.is_literal() => make_leaf!(Literal),
+                            _ => return None,
+                        };
+                        token_trees.push(child.into());
                     }
                 }
                 NodeOrToken::Node(node) => {
@@ -266,11 +321,26 @@ fn alloc(&mut self, absolute_range: TextRange) -> tt::TokenId {
         self.map.insert(token_id, relative_range);
         token_id
     }
+
+    fn alloc_delim(
+        &mut self,
+        open_abs_range: TextRange,
+        close_abs_range: TextRange,
+    ) -> tt::TokenId {
+        let open_relative_range = open_abs_range - self.global_offset;
+        let close_relative_range = close_abs_range - self.global_offset;
+        let token_id = tt::TokenId(self.next_id);
+        self.next_id += 1;
+
+        self.map.insert_delim(token_id, open_relative_range, close_relative_range);
+        token_id
+    }
 }
 
 struct TtTreeSink<'a> {
     buf: String,
     cursor: Cursor<'a>,
+    open_delims: FxHashMap<tt::TokenId, TextUnit>,
     text_pos: TextUnit,
     inner: SyntaxTreeBuilder,
     token_map: TokenMap,
@@ -285,6 +355,7 @@ fn new(cursor: Cursor<'a>) -> Self {
         TtTreeSink {
             buf: String::new(),
             cursor,
+            open_delims: FxHashMap::default(),
             text_pos: 0.into(),
             inner: SyntaxTreeBuilder::default(),
             roots: smallvec::SmallVec::new(),
@@ -297,11 +368,11 @@ fn finish(self) -> (Parse<SyntaxNode>, TokenMap) {
     }
 }
 
-fn delim_to_str(d: Option<tt::Delimiter>, closing: bool) -> SmolStr {
+fn delim_to_str(d: Option<tt::DelimiterKind>, closing: bool) -> SmolStr {
     let texts = match d {
-        Some(tt::Delimiter::Parenthesis) => "()",
-        Some(tt::Delimiter::Brace) => "{}",
-        Some(tt::Delimiter::Bracket) => "[]",
+        Some(tt::DelimiterKind::Parenthesis) => "()",
+        Some(tt::DelimiterKind::Brace) => "{}",
+        Some(tt::DelimiterKind::Bracket) => "[]",
         None => return "".into(),
     };
 
@@ -322,34 +393,49 @@ fn token(&mut self, kind: SyntaxKind, n_tokens: u8) {
                 break;
             }
 
-            match self.cursor.token_tree() {
+            let text: SmolStr = match self.cursor.token_tree() {
                 Some(tt::TokenTree::Leaf(leaf)) => {
                     // Mark the range if needed
-                    if let tt::Leaf::Ident(ident) = leaf {
-                        if kind == IDENT {
-                            let range =
-                                TextRange::offset_len(self.text_pos, TextUnit::of_str(&ident.text));
-                            self.token_map.insert(ident.id, range);
-                        }
-                    }
-
+                    let id = match leaf {
+                        tt::Leaf::Ident(ident) => ident.id,
+                        tt::Leaf::Punct(punct) => punct.id,
+                        tt::Leaf::Literal(lit) => lit.id,
+                    };
+                    let text = SmolStr::new(format!("{}", leaf));
+                    let range = TextRange::offset_len(self.text_pos, TextUnit::of_str(&text));
+                    self.token_map.insert(id, range);
                     self.cursor = self.cursor.bump();
-                    self.buf += &format!("{}", leaf);
+                    text
                 }
                 Some(tt::TokenTree::Subtree(subtree)) => {
                     self.cursor = self.cursor.subtree().unwrap();
-                    self.buf += &delim_to_str(subtree.delimiter, false);
+                    if let Some(id) = subtree.delimiter.map(|it| it.id) {
+                        self.open_delims.insert(id, self.text_pos);
+                    }
+                    delim_to_str(subtree.delimiter_kind(), false)
                 }
                 None => {
                     if let Some(parent) = self.cursor.end() {
                         self.cursor = self.cursor.bump();
-                        self.buf += &delim_to_str(parent.delimiter, true);
+                        if let Some(id) = parent.delimiter.map(|it| it.id) {
+                            if let Some(open_delim) = self.open_delims.get(&id) {
+                                let open_range =
+                                    TextRange::offset_len(*open_delim, TextUnit::from_usize(1));
+                                let close_range =
+                                    TextRange::offset_len(self.text_pos, TextUnit::from_usize(1));
+                                self.token_map.insert_delim(id, open_range, close_range);
+                            }
+                        }
+                        delim_to_str(parent.delimiter_kind(), true)
+                    } else {
+                        continue;
                     }
                 }
             };
+            self.buf += &text;
+            self.text_pos += TextUnit::of_str(&text);
         }
 
-        self.text_pos += TextUnit::of_str(&self.buf);
         let text = SmolStr::new(self.buf.as_str());
         self.buf.clear();
         self.inner.token(kind, text);
@@ -495,7 +581,7 @@ fn test_token_tree_last_child_is_white_space() {
         let token_tree = ast::TokenTree::cast(token_tree).unwrap();
         let tt = ast_to_token_tree(&token_tree).unwrap().0;
 
-        assert_eq!(tt.delimiter, Some(tt::Delimiter::Brace));
+        assert_eq!(tt.delimiter_kind(), Some(tt::DelimiterKind::Brace));
     }
 
     #[test]
index 148cc262547cca2510c3a433254d573d557f0b7e..ff225f0db21d8ba650bc25cb38b187927bcca1b8 100644 (file)
@@ -77,13 +77,41 @@ fn get_id(t: &tt::TokenTree) -> Option<u32> {
     }
 
     assert_eq!(expansion.token_trees.len(), 3);
-    // ($e:ident) => { foo bar $e }
-    //   0 1            2   3   4
-    assert_eq!(get_id(&expansion.token_trees[0]), Some(2));
-    assert_eq!(get_id(&expansion.token_trees[1]), Some(3));
+    // {($e:ident) => { foo bar $e }}
+    // 012345      67 8 9   T   12
+    assert_eq!(get_id(&expansion.token_trees[0]), Some(9));
+    assert_eq!(get_id(&expansion.token_trees[1]), Some(10));
 
-    // So baz should be 5
-    assert_eq!(get_id(&expansion.token_trees[2]), Some(5));
+    // The input args of macro call include parentheses:
+    // (baz)
+    // So baz should be 12+1+1
+    assert_eq!(get_id(&expansion.token_trees[2]), Some(14));
+}
+
+#[test]
+fn test_token_map() {
+    use ra_parser::SyntaxKind::*;
+    use ra_syntax::T;
+
+    let macro_definition = r#"
+macro_rules! foobar {
+    ($e:ident) => { fn $e() {} }
+}
+"#;
+    let rules = create_rules(macro_definition);
+    let (expansion, (token_map, content)) = expand_and_map(&rules, "foobar!(baz);");
+
+    let get_text = |id, kind| -> String {
+        content[token_map.range_by_token(id).unwrap().by_kind(kind).unwrap()].to_string()
+    };
+
+    assert_eq!(expansion.token_trees.len(), 4);
+    // {($e:ident) => { fn $e() {} }}
+    // 012345      67 8 9  T12  3
+
+    assert_eq!(get_text(tt::TokenId(9), IDENT), "fn");
+    assert_eq!(get_text(tt::TokenId(12), T!['(']), "(");
+    assert_eq!(get_text(tt::TokenId(13), T!['{']), "{");
 }
 
 #[test]
@@ -1441,6 +1469,23 @@ pub(crate) fn expand(rules: &MacroRules, invocation: &str) -> tt::Subtree {
     rules.expand(&invocation_tt).unwrap()
 }
 
+pub(crate) fn expand_and_map(
+    rules: &MacroRules,
+    invocation: &str,
+) -> (tt::Subtree, (TokenMap, String)) {
+    let source_file = ast::SourceFile::parse(invocation).ok().unwrap();
+    let macro_invocation =
+        source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
+
+    let (invocation_tt, _) = ast_to_token_tree(&macro_invocation.token_tree().unwrap()).unwrap();
+    let expanded = rules.expand(&invocation_tt).unwrap();
+
+    let (node, expanded_token_tree) =
+        token_tree_to_syntax_node(&expanded, FragmentKind::Items).unwrap();
+
+    (expanded, (expanded_token_tree, node.syntax_node().to_string()))
+}
+
 pub(crate) enum MacroKind {
     Items,
     Stmts,
index e7bfd5fd294e6274f09c21b23ec8093fafe541f2..10f424aae96c645cecae2b7f5e411c61d24a79c0 100644 (file)
@@ -55,7 +55,13 @@ pub struct Subtree {
 }
 
 #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
-pub enum Delimiter {
+pub struct Delimiter {
+    pub id: TokenId,
+    pub kind: DelimiterKind,
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub enum DelimiterKind {
     Parenthesis,
     Brace,
     Bracket,
@@ -64,12 +70,14 @@ pub enum Delimiter {
 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct Literal {
     pub text: SmolStr,
+    pub id: TokenId,
 }
 
 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 pub struct Punct {
     pub char: char,
     pub spacing: Spacing,
+    pub id: TokenId,
 }
 
 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@@ -95,10 +103,10 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
 
 impl fmt::Display for Subtree {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        let (l, r) = match self.delimiter {
-            Some(Delimiter::Parenthesis) => ("(", ")"),
-            Some(Delimiter::Brace) => ("{", "}"),
-            Some(Delimiter::Bracket) => ("[", "]"),
+        let (l, r) = match self.delimiter_kind() {
+            Some(DelimiterKind::Parenthesis) => ("(", ")"),
+            Some(DelimiterKind::Brace) => ("{", "}"),
+            Some(DelimiterKind::Bracket) => ("[", "]"),
             None => ("", ""),
         };
         f.write_str(l)?;
@@ -163,6 +171,10 @@ pub fn count(&self) -> usize {
 
         self.token_trees.len() + children_count
     }
+
+    pub fn delimiter_kind(&self) -> Option<DelimiterKind> {
+        self.delimiter.map(|it| it.kind)
+    }
 }
 
 pub mod buffer;
index 04c3493426b6abd6ff2a19d529f2c70626d2f67e..9cdabfd421b2839554adc287d3e213fbb994ade0 100644 (file)
@@ -135,37 +135,25 @@ to load path and require it in `init.el`
 * (Optionally) bind commands like `rust-analyzer-join-lines`, `rust-analyzer-extend-selection` and `rust-analyzer-expand-macro` to keys, and enable `rust-analyzer-inlay-hints-mode` to get inline type hints
 
 
-## Vim and NeoVim
+## Vim and NeoVim (coc-rust-analyzer)
 
-Neovim 0.5 has a built in language server. For a quick start configuration of
-rust-analyzer, use [neovim/nvim-lsp](https://github.com/neovim/nvim-lsp#rust_analyzer).
-Once `neovim/nvim-lsp` is installed, you can use `call nvim_lsp#setup("rust_analyzer", {})`
-or `lua require'nvim_lsp'.rust_analyzer.setup({})` to quickly get set up.
-
-* Install coc.nvim by following the instructions at [coc.nvim]
-  - You will need nodejs installed.
-  - You may want to include some of the sample vim configurations [from here][coc-vim-conf]
-  - Note that if you use a plugin manager other than `vim-plug`, you may need to manually
-    checkout the `release` branch wherever your plugin manager cloned it. Otherwise you will
-    get errors about a missing javascript file.
-* Run `:CocInstall coc-rust-analyzer` to install [coc-rust-analyzer], this extension implemented _most_ of the features supported in the VSCode extension:
+* Install coc.nvim by following the instructions at [coc.nvim][] (nodejs required)
+* Run `:CocInstall coc-rust-analyzer` to install [coc-rust-analyzer], this extension implements _most_ of the features supported in the VSCode extension:
   - same configurations as VSCode extension, `rust-analyzer.raLspServerPath`, `rust-analyzer.enableCargoWatchOnStartup` etc.
   - same commands too, `rust-analyzer.analyzerStatus`, `rust-analyzer.startCargoWatch` etc.
   - highlighting and inlay_hints are not implemented yet
 
 [coc.nvim]: https://github.com/neoclide/coc.nvim
-[coc-vim-conf]: https://github.com/neoclide/coc.nvim/#example-vim-configuration
 [coc-rust-analyzer]: https://github.com/fannheyward/coc-rust-analyzer
 
-## Vim and NeoVim Alternative
+## Vim and NeoVim (LanguageClient-neovim)
 
 * Install LanguageClient-neovim by following the instructions [here][lang-client-neovim]
-  - No extra run-time is required as this server is written in Rust
   - The github project wiki has extra tips on configuration
 
 * Configure by adding this to your vim/neovim config file (replacing the existing rust specific line if it exists):
 
-```
+```vim
 let g:LanguageClient_serverCommands = {
 \ 'rust': ['ra_lsp_server'],
 \ }
@@ -173,6 +161,13 @@ let g:LanguageClient_serverCommands = {
 
 [lang-client-neovim]: https://github.com/autozimu/LanguageClient-neovim
 
+## NeoVim (nvim-lsp)
+
+NeoVim 0.5 (not yet released) has built in language server support. For a quick start configuration
+of rust-analyzer, use [neovim/nvim-lsp](https://github.com/neovim/nvim-lsp#rust_analyzer).
+Once `neovim/nvim-lsp` is installed, you can use `call nvim_lsp#setup("rust_analyzer", {})`
+or `lua require'nvim_lsp'.rust_analyzer.setup({})` to quickly get set up.
+
 
 ## Sublime Text 3