]> git.lizzy.rs Git - rust.git/blobdiff - crates/ide_completion/src/completions/attribute.rs
fix: Don't duplicate attribute completions
[rust.git] / crates / ide_completion / src / completions / attribute.rs
index 2b130cecf5cbe116d1511bee76181d47a4637fd4..d642c8bc4df18bc0d4d18cd91713ebf7ed4246ee 100644 (file)
@@ -2,18 +2,21 @@
 //!
 //! This module uses a bit of static metadata to provide completions
 //! for built-in attributes.
+//! Non-builtin attribute(excluding derives attributes) completions are done in [`super::unqualified_path`].
 
-use hir::HasAttrs;
-use ide_db::helpers::generated_lints::{CLIPPY_LINTS, DEFAULT_LINTS, FEATURES};
+use ide_db::{
+    helpers::{
+        generated_lints::{CLIPPY_LINTS, DEFAULT_LINTS, FEATURES, RUSTDOC_LINTS},
+        parse_tt_as_comma_sep_paths,
+    },
+    SymbolKind,
+};
+use itertools::Itertools;
 use once_cell::sync::Lazy;
-use rustc_hash::{FxHashMap, FxHashSet};
-use syntax::{algo::non_trivia_sibling, ast, AstNode, Direction, NodeOrToken, SyntaxKind, T};
+use rustc_hash::FxHashMap;
+use syntax::{algo::non_trivia_sibling, ast, AstNode, Direction, SyntaxKind, T};
 
-use crate::{
-    context::CompletionContext,
-    item::{CompletionItem, CompletionItemKind, CompletionKind},
-    Completions,
-};
+use crate::{context::CompletionContext, item::CompletionItem, Completions};
 
 mod cfg;
 mod derive;
@@ -27,21 +30,23 @@ pub(crate) fn complete_attribute(acc: &mut Completions, ctx: &CompletionContext)
         None => None,
     };
     match (name_ref, attribute.token_tree()) {
-        (Some(path), Some(token_tree)) => match path.text().as_str() {
-            "derive" => derive::complete_derive(acc, ctx, token_tree),
-            "repr" => repr::complete_repr(acc, ctx, token_tree),
-            "feature" => lint::complete_lint(acc, ctx, token_tree, FEATURES),
+        (Some(path), Some(tt)) if tt.l_paren_token().is_some() => match path.text().as_str() {
+            "repr" => repr::complete_repr(acc, ctx, tt),
+            "derive" => derive::complete_derive(acc, ctx, &parse_tt_as_comma_sep_paths(tt)?),
+            "feature" => lint::complete_lint(acc, ctx, &parse_tt_as_comma_sep_paths(tt)?, FEATURES),
             "allow" | "warn" | "deny" | "forbid" => {
-                lint::complete_lint(acc, ctx, token_tree.clone(), DEFAULT_LINTS);
-                lint::complete_lint(acc, ctx, token_tree, CLIPPY_LINTS);
+                let existing_lints = parse_tt_as_comma_sep_paths(tt)?;
+                lint::complete_lint(acc, ctx, &existing_lints, DEFAULT_LINTS);
+                lint::complete_lint(acc, ctx, &existing_lints, CLIPPY_LINTS);
+                lint::complete_lint(acc, ctx, &existing_lints, RUSTDOC_LINTS);
             }
             "cfg" => {
                 cfg::complete_cfg(acc, ctx);
             }
             _ => (),
         },
-        (None, Some(_)) => (),
-        _ => complete_new_attribute(acc, ctx, attribute),
+        (_, Some(_)) => (),
+        (_, None) => complete_new_attribute(acc, ctx, attribute),
     }
     Some(())
 }
@@ -63,12 +68,8 @@ fn complete_new_attribute(acc: &mut Completions, ctx: &CompletionContext, attrib
     });
 
     let add_completion = |attr_completion: &AttrCompletion| {
-        let mut item = CompletionItem::new(
-            CompletionKind::Attribute,
-            ctx.source_range(),
-            attr_completion.label,
-        );
-        item.kind(CompletionItemKind::Attribute);
+        let mut item =
+            CompletionItem::new(SymbolKind::Attribute, ctx.source_range(), attr_completion.label);
 
         if let Some(lookup) = attr_completion.lookup {
             item.lookup_by(lookup);
@@ -92,24 +93,6 @@ fn complete_new_attribute(acc: &mut Completions, ctx: &CompletionContext, attrib
         None if is_inner => ATTRIBUTES.iter().for_each(add_completion),
         None => ATTRIBUTES.iter().filter(|compl| !compl.prefer_inner).for_each(add_completion),
     }
-
-    // FIXME: write a test for this when we can
-    ctx.scope.process_all_names(&mut |name, scope_def| {
-        if let hir::ScopeDef::MacroDef(mac) = scope_def {
-            if mac.kind() == hir::MacroKind::Attr {
-                let mut item = CompletionItem::new(
-                    CompletionKind::Attribute,
-                    ctx.source_range(),
-                    name.to_string(),
-                );
-                item.kind(CompletionItemKind::Attribute);
-                if let Some(docs) = mac.docs(ctx.sema.db) {
-                    item.documentation(docs);
-                }
-                item.add_to(acc);
-            }
-        }
-    });
 }
 
 struct AttrCompletion {
@@ -172,7 +155,7 @@ macro_rules! attrs {
 #[rustfmt::skip]
 static KIND_TO_ATTRIBUTES: Lazy<FxHashMap<SyntaxKind, &[&str]>> = Lazy::new(|| {
     use SyntaxKind::*;
-    std::array::IntoIter::new([
+    [
         (
             SOURCE_FILE,
             attrs!(
@@ -224,7 +207,8 @@ macro_rules! attrs {
         (MATCH_ARM, attrs!()),
         (IDENT_PAT, attrs!()),
         (RECORD_PAT_FIELD, attrs!()),
-    ])
+    ]
+    .into_iter()
     .collect()
 });
 const EXPR_ATTRIBUTES: &[&str] = attrs!();
@@ -303,31 +287,21 @@ macro_rules! attrs {
     .prefer_inner(),
 ];
 
-fn parse_comma_sep_input(derive_input: ast::TokenTree) -> Option<FxHashSet<String>> {
-    let (l_paren, r_paren) = derive_input.l_paren_token().zip(derive_input.r_paren_token())?;
-    let mut input_derives = FxHashSet::default();
-    let mut tokens = derive_input
+fn parse_comma_sep_expr(input: ast::TokenTree) -> Option<Vec<ast::Expr>> {
+    let r_paren = input.r_paren_token()?;
+    let tokens = input
         .syntax()
         .children_with_tokens()
-        .filter_map(NodeOrToken::into_token)
-        .skip_while(|token| token != &l_paren)
         .skip(1)
-        .take_while(|token| token != &r_paren)
-        .peekable();
-    let mut input = String::new();
-    while tokens.peek().is_some() {
-        for token in tokens.by_ref().take_while(|t| t.kind() != T![,]) {
-            input.push_str(token.text());
-        }
-
-        if !input.is_empty() {
-            input_derives.insert(input.trim().to_owned());
-        }
-
-        input.clear();
-    }
-
-    Some(input_derives)
+        .take_while(|it| it.as_token() != Some(&r_paren));
+    let input_expressions = tokens.into_iter().group_by(|tok| tok.kind() == T![,]);
+    Some(
+        input_expressions
+            .into_iter()
+            .filter_map(|(is_sep, group)| (!is_sep).then(|| group))
+            .filter_map(|mut tokens| ast::Expr::parse(&tokens.join("")).ok())
+            .collect::<Vec<ast::Expr>>(),
+    )
 }
 
 #[test]