]> git.lizzy.rs Git - rust.git/blobdiff - crates/hir/src/semantics.rs
fix: insert whitespaces into assoc items for assist when macro generated
[rust.git] / crates / hir / src / semantics.rs
index 7c2097f00e4265b8c2f11601d97f602c6b5ae6df..75f6b025779587ec935b97b721005d16a043ba8f 100644 (file)
@@ -25,9 +25,9 @@
     db::HirDatabase,
     semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx},
     source_analyzer::{resolve_hir_path, resolve_hir_path_as_macro, SourceAnalyzer},
-    Access, AssocItem, Callable, ConstParam, Crate, Field, Function, HasSource, HirFileId, Impl,
-    InFile, Label, LifetimeParam, Local, MacroDef, Module, ModuleDef, Name, Path, ScopeDef, Trait,
-    Type, TypeAlias, TypeParam, VariantDef,
+    Access, AssocItem, BuiltinAttr, Callable, ConstParam, Crate, Field, Function, HasSource,
+    HirFileId, Impl, InFile, Label, LifetimeParam, Local, MacroDef, Module, ModuleDef, Name, Path,
+    ScopeDef, ToolModule, Trait, Type, TypeAlias, TypeParam, VariantDef,
 };
 
 #[derive(Debug, Clone, PartialEq, Eq)]
@@ -43,6 +43,8 @@ pub enum PathResolution {
     SelfType(Impl),
     Macro(MacroDef),
     AssocItem(AssocItem),
+    BuiltinAttr(BuiltinAttr),
+    ToolModule(ToolModule),
 }
 
 impl PathResolution {
@@ -63,9 +65,11 @@ fn in_type_ns(&self) -> Option<TypeNs> {
             PathResolution::Def(ModuleDef::TypeAlias(alias)) => {
                 Some(TypeNs::TypeAliasId((*alias).into()))
             }
-            PathResolution::Local(_) | PathResolution::Macro(_) | PathResolution::ConstParam(_) => {
-                None
-            }
+            PathResolution::BuiltinAttr(_)
+            | PathResolution::ToolModule(_)
+            | PathResolution::Local(_)
+            | PathResolution::Macro(_)
+            | PathResolution::ConstParam(_) => None,
             PathResolution::TypeParam(param) => Some(TypeNs::GenericParam((*param).into())),
             PathResolution::SelfType(impl_def) => Some(TypeNs::SelfType((*impl_def).into())),
             PathResolution::AssocItem(AssocItem::Const(_) | AssocItem::Function(_)) => None,
@@ -143,6 +147,10 @@ pub fn parse(&self, file_id: FileId) -> ast::SourceFile {
         self.imp.parse(file_id)
     }
 
+    pub fn parse_or_expand(&self, file_id: HirFileId) -> Option<SyntaxNode> {
+        self.imp.parse_or_expand(file_id)
+    }
+
     pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
         self.imp.expand(macro_call)
     }
@@ -178,10 +186,12 @@ pub fn speculative_expand_attr_macro(
         self.imp.speculative_expand_attr(actual_macro_call, speculative_args, token_to_map)
     }
 
+    /// Descend the token into macrocalls to its first mapped counterpart.
     pub fn descend_into_macros_single(&self, token: SyntaxToken) -> SyntaxToken {
-        self.imp.descend_into_macros(token).pop().unwrap()
+        self.imp.descend_into_macros_single(token)
     }
 
+    /// Descend the token into macrocalls to all its mapped counterparts.
     pub fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
         self.imp.descend_into_macros(token)
     }
@@ -226,6 +236,7 @@ pub fn token_ancestors_with_macros(
         token.parent().into_iter().flat_map(move |it| self.ancestors_with_macros(it))
     }
 
+    /// Iterates the ancestors of the given node, climbing up macro expansions while doing so.
     pub fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ {
         self.imp.ancestors_with_macros(node)
     }
@@ -409,6 +420,12 @@ fn parse(&self, file_id: FileId) -> ast::SourceFile {
         tree
     }
 
+    fn parse_or_expand(&self, file_id: HirFileId) -> Option<SyntaxNode> {
+        let node = self.db.parse_or_expand(file_id)?;
+        self.cache(node.clone(), file_id);
+        Some(node)
+    }
+
     fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
         let sa = self.analyze(macro_call.syntax());
         let file_id = sa.expand(self.db, InFile::new(sa.file_id, macro_call))?;
@@ -509,47 +526,70 @@ fn speculative_expand_attr(
         };
 
         if first == last {
-            self.descend_into_macros_impl(first, |InFile { value, .. }| {
-                if let Some(node) = value.ancestors().find_map(N::cast) {
-                    res.push(node)
-                }
-            });
+            self.descend_into_macros_impl(
+                first,
+                |InFile { value, .. }| {
+                    if let Some(node) = value.ancestors().find_map(N::cast) {
+                        res.push(node)
+                    }
+                },
+                false,
+            );
         } else {
             // Descend first and last token, then zip them to look for the node they belong to
             let mut scratch: SmallVec<[_; 1]> = smallvec![];
-            self.descend_into_macros_impl(first, |token| {
-                scratch.push(token);
-            });
+            self.descend_into_macros_impl(
+                first,
+                |token| {
+                    scratch.push(token);
+                },
+                false,
+            );
 
             let mut scratch = scratch.into_iter();
-            self.descend_into_macros_impl(last, |InFile { value: last, file_id: last_fid }| {
-                if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() {
-                    if first_fid == last_fid {
-                        if let Some(p) = first.parent() {
-                            let range = first.text_range().cover(last.text_range());
-                            let node = find_root(&p)
-                                .covering_element(range)
-                                .ancestors()
-                                .take_while(|it| it.text_range() == range)
-                                .find_map(N::cast);
-                            if let Some(node) = node {
-                                res.push(node);
+            self.descend_into_macros_impl(
+                last,
+                |InFile { value: last, file_id: last_fid }| {
+                    if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() {
+                        if first_fid == last_fid {
+                            if let Some(p) = first.parent() {
+                                let range = first.text_range().cover(last.text_range());
+                                let node = find_root(&p)
+                                    .covering_element(range)
+                                    .ancestors()
+                                    .take_while(|it| it.text_range() == range)
+                                    .find_map(N::cast);
+                                if let Some(node) = node {
+                                    res.push(node);
+                                }
                             }
                         }
                     }
-                }
-            });
+                },
+                false,
+            );
         }
         res
     }
 
     fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
         let mut res = smallvec![];
-        self.descend_into_macros_impl(token, |InFile { value, .. }| res.push(value));
+        self.descend_into_macros_impl(token, |InFile { value, .. }| res.push(value), false);
         res
     }
 
-    fn descend_into_macros_impl(&self, token: SyntaxToken, mut f: impl FnMut(InFile<SyntaxToken>)) {
+    fn descend_into_macros_single(&self, token: SyntaxToken) -> SyntaxToken {
+        let mut res = token.clone();
+        self.descend_into_macros_impl(token, |InFile { value, .. }| res = value, true);
+        res
+    }
+
+    fn descend_into_macros_impl(
+        &self,
+        token: SyntaxToken,
+        mut f: impl FnMut(InFile<SyntaxToken>),
+        single: bool,
+    ) {
         let _p = profile::span("descend_into_macros");
         let parent = match token.parent() {
             Some(it) => it,
@@ -572,11 +612,16 @@ fn descend_into_macros_impl(&self, token: SyntaxToken, mut f: impl FnMut(InFile<
                     self.cache(value, file_id);
                 }
 
-                let mapped_tokens = expansion_info.map_token_down(self.db.upcast(), item, token)?;
+                let mut mapped_tokens =
+                    expansion_info.map_token_down(self.db.upcast(), item, token)?;
 
                 let len = stack.len();
                 // requeue the tokens we got from mapping our current token down
-                stack.extend(mapped_tokens);
+                if single {
+                    stack.extend(mapped_tokens.next());
+                } else {
+                    stack.extend(mapped_tokens);
+                }
                 // if the length changed we have found a mapping for the token
                 (stack.len() != len).then(|| ())
             };
@@ -808,7 +853,7 @@ fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<MacroDef> {
     fn resolve_attr_macro_call(&self, item: &ast::Item) -> Option<MacroDef> {
         let item_in_file = self.find_file(item.syntax().clone()).with_value(item.clone());
         let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(item_in_file))?;
-        Some(MacroDef { id: self.db.lookup_intern_macro(macro_call_id).def })
+        Some(MacroDef { id: self.db.lookup_intern_macro_call(macro_call_id).def })
     }
 
     fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> {
@@ -857,13 +902,13 @@ fn to_module_def(&self, file: FileId) -> impl Iterator<Item = Module> {
     }
 
     fn scope(&self, node: &SyntaxNode) -> SemanticsScope<'db> {
-        let sa = self.analyze(node);
-        SemanticsScope { db: self.db, file_id: sa.file_id, resolver: sa.resolver }
+        let SourceAnalyzer { file_id, resolver, .. } = self.analyze(node);
+        SemanticsScope { db: self.db, file_idresolver }
     }
 
     fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> SemanticsScope<'db> {
-        let sa = self.analyze_with_offset(node, offset);
-        SemanticsScope { db: self.db, file_id: sa.file_id, resolver: sa.resolver }
+        let SourceAnalyzer { file_id, resolver, .. } = self.analyze_with_offset(node, offset);
+        SemanticsScope { db: self.db, file_idresolver }
     }
 
     fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> {
@@ -884,9 +929,11 @@ fn source<Def: HasSource>(&self, def: Def) -> Option<InFile<Def::Ast>>
     fn analyze(&self, node: &SyntaxNode) -> SourceAnalyzer {
         self.analyze_impl(node, None)
     }
+
     fn analyze_with_offset(&self, node: &SyntaxNode, offset: TextSize) -> SourceAnalyzer {
         self.analyze_impl(node, Some(offset))
     }
+
     fn analyze_impl(&self, node: &SyntaxNode, offset: Option<TextSize>) -> SourceAnalyzer {
         let _p = profile::span("Semantics::analyze_impl");
         let node = self.find_file(node.clone());
@@ -1117,8 +1164,7 @@ pub fn krate(&self) -> Option<Crate> {
     }
 
     /// Note: `FxHashSet<TraitId>` should be treated as an opaque type, passed into `Type
-    // FIXME: rename to visible_traits to not repeat scope?
-    pub fn traits_in_scope(&self) -> FxHashSet<TraitId> {
+    pub fn visible_traits(&self) -> FxHashSet<TraitId> {
         let resolver = &self.resolver;
         resolver.traits_in_scope(self.db.upcast())
     }