fn make_type_args(n: usize, bound: Vec<tt::TokenTree>) -> Vec<tt::TokenTree> {
let mut result = Vec::<tt::TokenTree>::new();
- result.push(tt::Leaf::Punct(tt::Punct { char: '<', spacing: tt::Spacing::Alone }).into());
+ result.push(
+ tt::Leaf::Punct(tt::Punct {
+ char: '<',
+ spacing: tt::Spacing::Alone,
+ id: tt::TokenId::unspecified(),
+ })
+ .into(),
+ );
for i in 0..n {
if i > 0 {
- result
- .push(tt::Leaf::Punct(tt::Punct { char: ',', spacing: tt::Spacing::Alone }).into());
+ result.push(
+ tt::Leaf::Punct(tt::Punct {
+ char: ',',
+ spacing: tt::Spacing::Alone,
+ id: tt::TokenId::unspecified(),
+ })
+ .into(),
+ );
}
result.push(
tt::Leaf::Ident(tt::Ident {
);
result.extend(bound.iter().cloned());
}
- result.push(tt::Leaf::Punct(tt::Punct { char: '>', spacing: tt::Spacing::Alone }).into());
+ result.push(
+ tt::Leaf::Punct(tt::Punct {
+ char: '>',
+ spacing: tt::Spacing::Alone,
+ id: tt::TokenId::unspecified(),
+ })
+ .into(),
+ );
result
}
let token_id = self.macro_arg.1.token_by_range(range)?;
let token_id = self.macro_def.0.map_id_down(token_id);
- let range = self.exp_map.range_by_token(token_id)?;
+ let range = self.exp_map.range_by_token(token_id)?.by_kind(token.value.kind())?;
let token = algo::find_covering_element(&self.expanded.value, range).into_token()?;
}
};
- let range = token_map.range_by_token(token_id)?;
+ let range = token_map.range_by_token(token_id)?.by_kind(token.value.kind())?;
let token = algo::find_covering_element(&tt.value, range + tt.value.text_range().start())
.into_token()?;
Some((tt.with_value(token), origin))
{
let children = $crate::__quote!($($tt)*);
let subtree = tt::Subtree {
- delimiter: Some(tt::Delimiter::$delim),
+ delimiter: Some(tt::Delimiter {
+ kind: tt::DelimiterKind::$delim,
+ id: tt::TokenId::unspecified(),
+ }),
token_trees: $crate::quote::IntoTt::to_tokens(children),
};
subtree
tt::Leaf::Punct(tt::Punct {
char: $first,
spacing: tt::Spacing::Alone,
+ id: tt::TokenId::unspecified(),
}).into()
]
}
tt::Leaf::Punct(tt::Punct {
char: $first,
spacing: tt::Spacing::Joint,
+ id: tt::TokenId::unspecified(),
}).into(),
tt::Leaf::Punct(tt::Punct {
char: $sec,
spacing: tt::Spacing::Alone,
+ id: tt::TokenId::unspecified(),
}).into()
]
}
}
impl_to_to_tokentrees! {
- u32 => self { tt::Literal{text: self.to_string().into()} };
- usize => self { tt::Literal{text: self.to_string().into()}};
- i32 => self { tt::Literal{text: self.to_string().into()}};
+ u32 => self { tt::Literal{text: self.to_string().into(), id: tt::TokenId::unspecified()} };
+ usize => self { tt::Literal{text: self.to_string().into(), id: tt::TokenId::unspecified()}};
+ i32 => self { tt::Literal{text: self.to_string().into(), id: tt::TokenId::unspecified()}};
tt::Leaf => self { self };
tt::Literal => self { self };
tt::Ident => self { self };
tt::Punct => self { self };
- &str => self { tt::Literal{text: format!("{:?}", self.escape_default().to_string()).into()}};
- String => self { tt::Literal{text: format!("{:?}", self.escape_default().to_string()).into()}}
+ &str => self { tt::Literal{text: format!("{:?}", self.escape_default().to_string()).into(), id: tt::TokenId::unspecified()}};
+ String => self { tt::Literal{text: format!("{:?}", self.escape_default().to_string()).into(), id: tt::TokenId::unspecified()}}
}
#[cfg(test)]
let fields =
fields.iter().map(|it| quote!(#it: self.#it.clone(), ).token_trees.clone()).flatten();
- let list =
- tt::Subtree { delimiter: Some(tt::Delimiter::Brace), token_trees: fields.collect() };
+ let list = tt::Subtree {
+ delimiter: Some(tt::Delimiter {
+ kind: tt::DelimiterKind::Brace,
+ id: tt::TokenId::unspecified(),
+ }),
+ token_trees: fields.collect(),
+ };
let quoted = quote! {
impl Clone for #struct_name {
.token_trees
.iter()
.filter_map(|tt| match tt {
- tt::TokenTree::Subtree(subtree) => max_id(subtree),
+ tt::TokenTree::Subtree(subtree) => {
+ let tree_id = max_id(subtree);
+ match subtree.delimiter {
+ Some(it) if it.id != tt::TokenId::unspecified() => {
+ Some(tree_id.map_or(it.id.0, |t| t.max(it.id.0)))
+ }
+ _ => tree_id,
+ }
+ }
tt::TokenTree::Leaf(tt::Leaf::Ident(ident))
if ident.id != tt::TokenId::unspecified() =>
{
match t {
tt::TokenTree::Leaf(leaf) => match leaf {
tt::Leaf::Ident(ident) => ident.id = self.shift(ident.id),
- _ => (),
+ tt::Leaf::Punct(punct) => punct.id = self.shift(punct.id),
+ tt::Leaf::Literal(lit) => lit.id = self.shift(lit.id),
},
- tt::TokenTree::Subtree(tt) => self.shift_all(tt),
+ tt::TokenTree::Subtree(tt) => {
+ tt.delimiter.as_mut().map(|it: &mut Delimiter| it.id = self.shift(it.id));
+ self.shift_all(tt)
+ }
}
}
}
}
Op::TokenTree(tt::TokenTree::Subtree(lhs)) => {
let rhs = src.expect_subtree().map_err(|()| err!("expected subtree"))?;
- if lhs.delimiter != rhs.delimiter {
+ if lhs.delimiter_kind() != rhs.delimiter_kind() {
bail!("mismatched delimiter")
}
let mut src = TtIter::new(rhs);
let tt = tt::Subtree {
delimiter: None,
token_trees: vec![
- tt::Leaf::from(tt::Punct { char: '$', spacing: tt::Spacing::Alone }).into(),
+ tt::Leaf::from(tt::Punct {
+ char: '$',
+ spacing: tt::Spacing::Alone,
+ id: tt::TokenId::unspecified(),
+ })
+ .into(),
tt::Leaf::from(tt::Ident { text: v.clone(), id: tt::TokenId::unspecified() })
.into(),
],
}
Some(tt::TokenTree::Subtree(subtree)) => {
self.cached_cursor.set(cursor.subtree().unwrap());
- cached.push(Some(convert_delim(subtree.delimiter, false)));
+ cached.push(Some(convert_delim(subtree.delimiter_kind(), false)));
}
None => {
if let Some(subtree) = cursor.end() {
- cached.push(Some(convert_delim(subtree.delimiter, true)));
+ cached.push(Some(convert_delim(subtree.delimiter_kind(), true)));
self.cached_cursor.set(cursor.bump());
}
}
}
}
-fn convert_delim(d: Option<tt::Delimiter>, closing: bool) -> TtToken {
+fn convert_delim(d: Option<tt::DelimiterKind>, closing: bool) -> TtToken {
let (kinds, texts) = match d {
- Some(tt::Delimiter::Parenthesis) => ([T!['('], T![')']], "()"),
- Some(tt::Delimiter::Brace) => ([T!['{'], T!['}']], "{}"),
- Some(tt::Delimiter::Bracket) => ([T!['['], T![']']], "[]"),
+ Some(tt::DelimiterKind::Parenthesis) => ([T!['('], T![')']], "()"),
+ Some(tt::DelimiterKind::Brace) => ([T!['{'], T!['}']], "{}"),
+ Some(tt::DelimiterKind::Bracket) => ([T!['['], T![']']], "[]"),
None => ([L_DOLLAR, R_DOLLAR], ""),
};
ast, AstToken, NodeOrToken, Parse, SmolStr, SyntaxKind, SyntaxKind::*, SyntaxNode,
SyntaxTreeBuilder, TextRange, TextUnit, T,
};
+use rustc_hash::FxHashMap;
use std::iter::successors;
use tt::buffer::{Cursor, TokenBuffer};
use crate::subtree_source::SubtreeTokenSource;
use crate::ExpandError;
+#[derive(Debug, PartialEq, Eq, Clone, Copy)]
+pub enum TokenTextRange {
+ Token(TextRange),
+ Delimiter(TextRange, TextRange),
+}
+
+impl TokenTextRange {
+ pub fn by_kind(self, kind: SyntaxKind) -> Option<TextRange> {
+ match self {
+ TokenTextRange::Token(it) => Some(it),
+ TokenTextRange::Delimiter(open, close) => match kind {
+ T!['{'] | T!['('] | T!['['] => Some(open),
+ T!['}'] | T![')'] | T![']'] => Some(close),
+ _ => None,
+ },
+ }
+ }
+}
+
/// Maps `tt::TokenId` to the relative range of the original token.
#[derive(Debug, PartialEq, Eq, Default)]
pub struct TokenMap {
/// Maps `tt::TokenId` to the *relative* source range.
- entries: Vec<(tt::TokenId, TextRange)>,
+ entries: Vec<(tt::TokenId, TokenTextRange)>,
}
/// Convert the syntax tree (what user has written) to a `TokenTree` (what macro
impl TokenMap {
pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> {
- let &(token_id, _) = self.entries.iter().find(|(_, range)| *range == relative_range)?;
+ let &(token_id, _) = self.entries.iter().find(|(_, range)| match range {
+ TokenTextRange::Token(it) => *it == relative_range,
+ TokenTextRange::Delimiter(open, close) => {
+ *open == relative_range || *close == relative_range
+ }
+ })?;
Some(token_id)
}
- pub fn range_by_token(&self, token_id: tt::TokenId) -> Option<TextRange> {
+ pub fn range_by_token(&self, token_id: tt::TokenId) -> Option<TokenTextRange> {
let &(_, range) = self.entries.iter().find(|(tid, _)| *tid == token_id)?;
Some(range)
}
fn insert(&mut self, token_id: tt::TokenId, relative_range: TextRange) {
- self.entries.push((token_id, relative_range));
+ self.entries.push((token_id, TokenTextRange::Token(relative_range)));
+ }
+
+ fn insert_delim(
+ &mut self,
+ token_id: tt::TokenId,
+ open_relative_range: TextRange,
+ close_relative_range: TextRange,
+ ) {
+ self.entries
+ .push((token_id, TokenTextRange::Delimiter(open_relative_range, close_relative_range)));
}
}
token_trees.push(mk_punct('!'));
}
token_trees.push(tt::TokenTree::from(tt::Subtree {
- delimiter: Some(tt::Delimiter::Bracket),
+ delimiter: Some(tt::Delimiter {
+ kind: tt::DelimiterKind::Bracket,
+ id: tt::TokenId::unspecified(),
+ }),
token_trees: meta_tkns,
}));
}
fn mk_punct(c: char) -> tt::TokenTree {
- tt::TokenTree::from(tt::Leaf::from(tt::Punct { char: c, spacing: tt::Spacing::Alone }))
+ tt::TokenTree::from(tt::Leaf::from(tt::Punct {
+ char: c,
+ spacing: tt::Spacing::Alone,
+ id: tt::TokenId::unspecified(),
+ }))
}
fn mk_doc_literal(comment: &ast::Comment) -> tt::TokenTree {
- let lit = tt::Literal { text: doc_comment_text(comment) };
+ let lit = tt::Literal { text: doc_comment_text(comment), id: tt::TokenId::unspecified() };
tt::TokenTree::from(tt::Leaf::from(lit))
}
.last()
.unwrap();
- let (delimiter, skip_first) = match (first_child.kind(), last_child.kind()) {
- (T!['('], T![')']) => (Some(tt::Delimiter::Parenthesis), true),
- (T!['{'], T!['}']) => (Some(tt::Delimiter::Brace), true),
- (T!['['], T![']']) => (Some(tt::Delimiter::Bracket), true),
+ let (delimiter_kind, skip_first) = match (first_child.kind(), last_child.kind()) {
+ (T!['('], T![')']) => (Some(tt::DelimiterKind::Parenthesis), true),
+ (T!['{'], T!['}']) => (Some(tt::DelimiterKind::Brace), true),
+ (T!['['], T![']']) => (Some(tt::DelimiterKind::Bracket), true),
_ => (None, false),
};
+ let delimiter = delimiter_kind.map(|kind| tt::Delimiter {
+ kind,
+ id: self.alloc_delim(first_child.text_range(), last_child.text_range()),
+ });
let mut token_trees = Vec::new();
let mut child_iter = tt.children_with_tokens().skip(skip_first as usize).peekable();
.take(token.text().len() - 1)
.chain(std::iter::once(last_spacing));
for (char, spacing) in token.text().chars().zip(spacing_iter) {
- token_trees.push(tt::Leaf::from(tt::Punct { char, spacing }).into());
+ token_trees.push(
+ tt::Leaf::from(tt::Punct {
+ char,
+ spacing,
+ id: self.alloc(token.text_range()),
+ })
+ .into(),
+ );
}
} else {
- let child: tt::TokenTree =
- if token.kind() == T![true] || token.kind() == T![false] {
- tt::Leaf::from(tt::Literal { text: token.text().clone() }).into()
- } else if token.kind().is_keyword()
- || token.kind() == IDENT
- || token.kind() == LIFETIME
- {
- let id = self.alloc(token.text_range());
- let text = token.text().clone();
- tt::Leaf::from(tt::Ident { text, id }).into()
- } else if token.kind().is_literal() {
- tt::Leaf::from(tt::Literal { text: token.text().clone() }).into()
- } else {
- return None;
+ macro_rules! make_leaf {
+ ($i:ident) => {
+ tt::$i {
+ id: self.alloc(token.text_range()),
+ text: token.text().clone(),
+ }
+ .into()
};
- token_trees.push(child);
+ }
+
+ let child: tt::Leaf = match token.kind() {
+ T![true] | T![false] => make_leaf!(Literal),
+ IDENT | LIFETIME => make_leaf!(Ident),
+ k if k.is_keyword() => make_leaf!(Ident),
+ k if k.is_literal() => make_leaf!(Literal),
+ _ => return None,
+ };
+ token_trees.push(child.into());
}
}
NodeOrToken::Node(node) => {
self.map.insert(token_id, relative_range);
token_id
}
+
+ fn alloc_delim(
+ &mut self,
+ open_abs_range: TextRange,
+ close_abs_range: TextRange,
+ ) -> tt::TokenId {
+ let open_relative_range = open_abs_range - self.global_offset;
+ let close_relative_range = close_abs_range - self.global_offset;
+ let token_id = tt::TokenId(self.next_id);
+ self.next_id += 1;
+
+ self.map.insert_delim(token_id, open_relative_range, close_relative_range);
+ token_id
+ }
}
struct TtTreeSink<'a> {
buf: String,
cursor: Cursor<'a>,
+ open_delims: FxHashMap<tt::TokenId, TextUnit>,
text_pos: TextUnit,
inner: SyntaxTreeBuilder,
token_map: TokenMap,
TtTreeSink {
buf: String::new(),
cursor,
+ open_delims: FxHashMap::default(),
text_pos: 0.into(),
inner: SyntaxTreeBuilder::default(),
roots: smallvec::SmallVec::new(),
}
}
-fn delim_to_str(d: Option<tt::Delimiter>, closing: bool) -> SmolStr {
+fn delim_to_str(d: Option<tt::DelimiterKind>, closing: bool) -> SmolStr {
let texts = match d {
- Some(tt::Delimiter::Parenthesis) => "()",
- Some(tt::Delimiter::Brace) => "{}",
- Some(tt::Delimiter::Bracket) => "[]",
+ Some(tt::DelimiterKind::Parenthesis) => "()",
+ Some(tt::DelimiterKind::Brace) => "{}",
+ Some(tt::DelimiterKind::Bracket) => "[]",
None => return "".into(),
};
break;
}
- match self.cursor.token_tree() {
+ let text: SmolStr = match self.cursor.token_tree() {
Some(tt::TokenTree::Leaf(leaf)) => {
// Mark the range if needed
- if let tt::Leaf::Ident(ident) = leaf {
- if kind == IDENT {
- let range =
- TextRange::offset_len(self.text_pos, TextUnit::of_str(&ident.text));
- self.token_map.insert(ident.id, range);
- }
- }
-
+ let id = match leaf {
+ tt::Leaf::Ident(ident) => ident.id,
+ tt::Leaf::Punct(punct) => punct.id,
+ tt::Leaf::Literal(lit) => lit.id,
+ };
+ let text = SmolStr::new(format!("{}", leaf));
+ let range = TextRange::offset_len(self.text_pos, TextUnit::of_str(&text));
+ self.token_map.insert(id, range);
self.cursor = self.cursor.bump();
- self.buf += &format!("{}", leaf);
+ text
}
Some(tt::TokenTree::Subtree(subtree)) => {
self.cursor = self.cursor.subtree().unwrap();
- self.buf += &delim_to_str(subtree.delimiter, false);
+ if let Some(id) = subtree.delimiter.map(|it| it.id) {
+ self.open_delims.insert(id, self.text_pos);
+ }
+ delim_to_str(subtree.delimiter_kind(), false)
}
None => {
if let Some(parent) = self.cursor.end() {
self.cursor = self.cursor.bump();
- self.buf += &delim_to_str(parent.delimiter, true);
+ if let Some(id) = parent.delimiter.map(|it| it.id) {
+ if let Some(open_delim) = self.open_delims.get(&id) {
+ let open_range =
+ TextRange::offset_len(*open_delim, TextUnit::from_usize(1));
+ let close_range =
+ TextRange::offset_len(self.text_pos, TextUnit::from_usize(1));
+ self.token_map.insert_delim(id, open_range, close_range);
+ }
+ }
+ delim_to_str(parent.delimiter_kind(), true)
+ } else {
+ continue;
}
}
};
+ self.buf += &text;
+ self.text_pos += TextUnit::of_str(&text);
}
- self.text_pos += TextUnit::of_str(&self.buf);
let text = SmolStr::new(self.buf.as_str());
self.buf.clear();
self.inner.token(kind, text);
let token_tree = ast::TokenTree::cast(token_tree).unwrap();
let tt = ast_to_token_tree(&token_tree).unwrap().0;
- assert_eq!(tt.delimiter, Some(tt::Delimiter::Brace));
+ assert_eq!(tt.delimiter_kind(), Some(tt::DelimiterKind::Brace));
}
#[test]
}
assert_eq!(expansion.token_trees.len(), 3);
- // ($e:ident) => { foo bar $e }
- // 0 1 2 3 4
- assert_eq!(get_id(&expansion.token_trees[0]), Some(2));
- assert_eq!(get_id(&expansion.token_trees[1]), Some(3));
+ // {($e:ident) => { foo bar $e }}
+ // 012345 67 8 9 T 12
+ assert_eq!(get_id(&expansion.token_trees[0]), Some(9));
+ assert_eq!(get_id(&expansion.token_trees[1]), Some(10));
- // So baz should be 5
- assert_eq!(get_id(&expansion.token_trees[2]), Some(5));
+ // The input args of macro call include parentheses:
+ // (baz)
+ // So baz should be 12+1+1
+ assert_eq!(get_id(&expansion.token_trees[2]), Some(14));
+}
+
+#[test]
+fn test_token_map() {
+ use ra_parser::SyntaxKind::*;
+ use ra_syntax::T;
+
+ let macro_definition = r#"
+macro_rules! foobar {
+ ($e:ident) => { fn $e() {} }
+}
+"#;
+ let rules = create_rules(macro_definition);
+ let (expansion, (token_map, content)) = expand_and_map(&rules, "foobar!(baz);");
+
+ let get_text = |id, kind| -> String {
+ content[token_map.range_by_token(id).unwrap().by_kind(kind).unwrap()].to_string()
+ };
+
+ assert_eq!(expansion.token_trees.len(), 4);
+ // {($e:ident) => { fn $e() {} }}
+ // 012345 67 8 9 T12 3
+
+ assert_eq!(get_text(tt::TokenId(9), IDENT), "fn");
+ assert_eq!(get_text(tt::TokenId(12), T!['(']), "(");
+ assert_eq!(get_text(tt::TokenId(13), T!['{']), "{");
}
#[test]
rules.expand(&invocation_tt).unwrap()
}
+pub(crate) fn expand_and_map(
+ rules: &MacroRules,
+ invocation: &str,
+) -> (tt::Subtree, (TokenMap, String)) {
+ let source_file = ast::SourceFile::parse(invocation).ok().unwrap();
+ let macro_invocation =
+ source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
+
+ let (invocation_tt, _) = ast_to_token_tree(¯o_invocation.token_tree().unwrap()).unwrap();
+ let expanded = rules.expand(&invocation_tt).unwrap();
+
+ let (node, expanded_token_tree) =
+ token_tree_to_syntax_node(&expanded, FragmentKind::Items).unwrap();
+
+ (expanded, (expanded_token_tree, node.syntax_node().to_string()))
+}
+
pub(crate) enum MacroKind {
Items,
Stmts,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
-pub enum Delimiter {
+pub struct Delimiter {
+ pub id: TokenId,
+ pub kind: DelimiterKind,
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub enum DelimiterKind {
Parenthesis,
Brace,
Bracket,
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Literal {
pub text: SmolStr,
+ pub id: TokenId,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Punct {
pub char: char,
pub spacing: Spacing,
+ pub id: TokenId,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
impl fmt::Display for Subtree {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- let (l, r) = match self.delimiter {
- Some(Delimiter::Parenthesis) => ("(", ")"),
- Some(Delimiter::Brace) => ("{", "}"),
- Some(Delimiter::Bracket) => ("[", "]"),
+ let (l, r) = match self.delimiter_kind() {
+ Some(DelimiterKind::Parenthesis) => ("(", ")"),
+ Some(DelimiterKind::Brace) => ("{", "}"),
+ Some(DelimiterKind::Bracket) => ("[", "]"),
None => ("", ""),
};
f.write_str(l)?;
self.token_trees.len() + children_count
}
+
+ pub fn delimiter_kind(&self) -> Option<DelimiterKind> {
+ self.delimiter.map(|it| it.kind)
+ }
}
pub mod buffer;
* (Optionally) bind commands like `rust-analyzer-join-lines`, `rust-analyzer-extend-selection` and `rust-analyzer-expand-macro` to keys, and enable `rust-analyzer-inlay-hints-mode` to get inline type hints
-## Vim and NeoVim
+## Vim and NeoVim (coc-rust-analyzer)
-Neovim 0.5 has a built in language server. For a quick start configuration of
-rust-analyzer, use [neovim/nvim-lsp](https://github.com/neovim/nvim-lsp#rust_analyzer).
-Once `neovim/nvim-lsp` is installed, you can use `call nvim_lsp#setup("rust_analyzer", {})`
-or `lua require'nvim_lsp'.rust_analyzer.setup({})` to quickly get set up.
-
-* Install coc.nvim by following the instructions at [coc.nvim]
- - You will need nodejs installed.
- - You may want to include some of the sample vim configurations [from here][coc-vim-conf]
- - Note that if you use a plugin manager other than `vim-plug`, you may need to manually
- checkout the `release` branch wherever your plugin manager cloned it. Otherwise you will
- get errors about a missing javascript file.
-* Run `:CocInstall coc-rust-analyzer` to install [coc-rust-analyzer], this extension implemented _most_ of the features supported in the VSCode extension:
+* Install coc.nvim by following the instructions at [coc.nvim][] (nodejs required)
+* Run `:CocInstall coc-rust-analyzer` to install [coc-rust-analyzer], this extension implements _most_ of the features supported in the VSCode extension:
- same configurations as VSCode extension, `rust-analyzer.raLspServerPath`, `rust-analyzer.enableCargoWatchOnStartup` etc.
- same commands too, `rust-analyzer.analyzerStatus`, `rust-analyzer.startCargoWatch` etc.
- highlighting and inlay_hints are not implemented yet
[coc.nvim]: https://github.com/neoclide/coc.nvim
-[coc-vim-conf]: https://github.com/neoclide/coc.nvim/#example-vim-configuration
[coc-rust-analyzer]: https://github.com/fannheyward/coc-rust-analyzer
-## Vim and NeoVim Alternative
+## Vim and NeoVim (LanguageClient-neovim)
* Install LanguageClient-neovim by following the instructions [here][lang-client-neovim]
- - No extra run-time is required as this server is written in Rust
- The github project wiki has extra tips on configuration
* Configure by adding this to your vim/neovim config file (replacing the existing rust specific line if it exists):
-```
+```vim
let g:LanguageClient_serverCommands = {
\ 'rust': ['ra_lsp_server'],
\ }
[lang-client-neovim]: https://github.com/autozimu/LanguageClient-neovim
+## NeoVim (nvim-lsp)
+
+NeoVim 0.5 (not yet released) has built in language server support. For a quick start configuration
+of rust-analyzer, use [neovim/nvim-lsp](https://github.com/neovim/nvim-lsp#rust_analyzer).
+Once `neovim/nvim-lsp` is installed, you can use `call nvim_lsp#setup("rust_analyzer", {})`
+or `lua require'nvim_lsp'.rust_analyzer.setup({})` to quickly get set up.
+
## Sublime Text 3