]> git.lizzy.rs Git - rust.git/blobdiff - crates/hir/src/semantics.rs
fix: insert whitespaces into assoc items for assist when macro generated
[rust.git] / crates / hir / src / semantics.rs
index 5cda6cc1e96855b3a93b644cfeb9699a3f6aafd6..75f6b025779587ec935b97b721005d16a043ba8f 100644 (file)
 use smallvec::{smallvec, SmallVec};
 use syntax::{
     algo::skip_trivia_token,
-    ast::{self, GenericParamsOwner, LoopBodyOwner},
-    match_ast, AstNode, Direction, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange, TextSize,
+    ast::{self, HasAttrs, HasGenericParams, HasLoopBody},
+    match_ast, AstNode, Direction, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextSize,
 };
 
 use crate::{
     db::HirDatabase,
     semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx},
-    source_analyzer::{resolve_hir_path, SourceAnalyzer},
-    Access, AssocItem, Callable, ConstParam, Crate, Field, Function, HirFileId, Impl, InFile,
-    Label, LifetimeParam, Local, MacroDef, Module, ModuleDef, Name, Path, ScopeDef, Trait, Type,
-    TypeAlias, TypeParam, VariantDef,
+    source_analyzer::{resolve_hir_path, resolve_hir_path_as_macro, SourceAnalyzer},
+    Access, AssocItem, BuiltinAttr, Callable, ConstParam, Crate, Field, Function, HasSource,
+    HirFileId, Impl, InFile, Label, LifetimeParam, Local, MacroDef, Module, ModuleDef, Name, Path,
+    ScopeDef, ToolModule, Trait, Type, TypeAlias, TypeParam, VariantDef,
 };
 
 #[derive(Debug, Clone, PartialEq, Eq)]
@@ -43,6 +43,8 @@ pub enum PathResolution {
     SelfType(Impl),
     Macro(MacroDef),
     AssocItem(AssocItem),
+    BuiltinAttr(BuiltinAttr),
+    ToolModule(ToolModule),
 }
 
 impl PathResolution {
@@ -63,9 +65,11 @@ fn in_type_ns(&self) -> Option<TypeNs> {
             PathResolution::Def(ModuleDef::TypeAlias(alias)) => {
                 Some(TypeNs::TypeAliasId((*alias).into()))
             }
-            PathResolution::Local(_) | PathResolution::Macro(_) | PathResolution::ConstParam(_) => {
-                None
-            }
+            PathResolution::BuiltinAttr(_)
+            | PathResolution::ToolModule(_)
+            | PathResolution::Local(_)
+            | PathResolution::Macro(_)
+            | PathResolution::ConstParam(_) => None,
             PathResolution::TypeParam(param) => Some(TypeNs::GenericParam((*param).into())),
             PathResolution::SelfType(impl_def) => Some(TypeNs::SelfType((*impl_def).into())),
             PathResolution::AssocItem(AssocItem::Const(_) | AssocItem::Function(_)) => None,
@@ -121,7 +125,10 @@ pub struct SemanticsImpl<'db> {
     pub db: &'db dyn HirDatabase,
     s2d_cache: RefCell<SourceToDefCache>,
     expansion_info_cache: RefCell<FxHashMap<HirFileId, Option<ExpansionInfo>>>,
+    // Rootnode to HirFileId cache
     cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>,
+    // MacroCall to its expansion's HirFileId cache
+    macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, HirFileId>>,
 }
 
 impl<DB> fmt::Debug for Semantics<'_, DB> {
@@ -140,6 +147,10 @@ pub fn parse(&self, file_id: FileId) -> ast::SourceFile {
         self.imp.parse(file_id)
     }
 
+    pub fn parse_or_expand(&self, file_id: HirFileId) -> Option<SyntaxNode> {
+        self.imp.parse_or_expand(file_id)
+    }
+
     pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
         self.imp.expand(macro_call)
     }
@@ -175,13 +186,13 @@ pub fn speculative_expand_attr_macro(
         self.imp.speculative_expand_attr(actual_macro_call, speculative_args, token_to_map)
     }
 
-    // FIXME: Rename to descend_into_macros_single
-    pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken {
-        self.imp.descend_into_macros(token).pop().unwrap()
+    /// Descend the token into macrocalls to its first mapped counterpart.
+    pub fn descend_into_macros_single(&self, token: SyntaxToken) -> SyntaxToken {
+        self.imp.descend_into_macros_single(token)
     }
 
-    // FIXME: Rename to descend_into_macros
-    pub fn descend_into_macros_many(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
+    /// Descend the token into macrocalls to all its mapped counterparts.
+    pub fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
         self.imp.descend_into_macros(token)
     }
 
@@ -190,6 +201,14 @@ pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken {
         self.imp.descend_node_into_attributes(node)
     }
 
+    /// Search for a definition's source and cache its syntax tree
+    pub fn source<Def: HasSource>(&self, def: Def) -> Option<InFile<Def::Ast>>
+    where
+        Def::Ast: AstNode,
+    {
+        self.imp.source(def)
+    }
+
     pub fn hir_file_for(&self, syntax_node: &SyntaxNode) -> HirFileId {
         self.imp.find_file(syntax_node.clone()).file_id
     }
@@ -202,6 +221,10 @@ pub fn original_range_opt(&self, node: &SyntaxNode) -> Option<FileRange> {
         self.imp.original_range_opt(node)
     }
 
+    pub fn original_ast_node<N: AstNode>(&self, node: N) -> Option<N> {
+        self.imp.original_ast_node(node)
+    }
+
     pub fn diagnostics_display_range(&self, diagnostics: InFile<SyntaxNodePtr>) -> FileRange {
         self.imp.diagnostics_display_range(diagnostics)
     }
@@ -213,6 +236,7 @@ pub fn token_ancestors_with_macros(
         token.parent().into_iter().flat_map(move |it| self.ancestors_with_macros(it))
     }
 
+    /// Iterates the ancestors of the given node, climbing up macro expansions while doing so.
     pub fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ {
         self.imp.ancestors_with_macros(node)
     }
@@ -386,6 +410,7 @@ fn new(db: &'db dyn HirDatabase) -> Self {
             s2d_cache: Default::default(),
             cache: Default::default(),
             expansion_info_cache: Default::default(),
+            macro_call_cache: Default::default(),
         }
     }
 
@@ -395,6 +420,12 @@ fn parse(&self, file_id: FileId) -> ast::SourceFile {
         tree
     }
 
+    fn parse_or_expand(&self, file_id: HirFileId) -> Option<SyntaxNode> {
+        let node = self.db.parse_or_expand(file_id)?;
+        self.cache(node.clone(), file_id);
+        Some(node)
+    }
+
     fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
         let sa = self.analyze(macro_call.syntax());
         let file_id = sa.expand(self.db, InFile::new(sa.file_id, macro_call))?;
@@ -495,115 +526,158 @@ fn speculative_expand_attr(
         };
 
         if first == last {
-            self.descend_into_macros_impl(first, |InFile { value, .. }| {
-                if let Some(node) = value.ancestors().find_map(N::cast) {
-                    res.push(node)
-                }
-            });
+            self.descend_into_macros_impl(
+                first,
+                |InFile { value, .. }| {
+                    if let Some(node) = value.ancestors().find_map(N::cast) {
+                        res.push(node)
+                    }
+                },
+                false,
+            );
         } else {
             // Descend first and last token, then zip them to look for the node they belong to
             let mut scratch: SmallVec<[_; 1]> = smallvec![];
-            self.descend_into_macros_impl(first, |token| {
-                scratch.push(token);
-            });
+            self.descend_into_macros_impl(
+                first,
+                |token| {
+                    scratch.push(token);
+                },
+                false,
+            );
 
             let mut scratch = scratch.into_iter();
-            self.descend_into_macros_impl(last, |InFile { value: last, file_id: last_fid }| {
-                if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() {
-                    if first_fid == last_fid {
-                        if let Some(p) = first.parent() {
-                            let range = first.text_range().cover(last.text_range());
-                            let node = find_root(&p)
-                                .covering_element(range)
-                                .ancestors()
-                                .take_while(|it| it.text_range() == range)
-                                .find_map(N::cast);
-                            if let Some(node) = node {
-                                res.push(node);
+            self.descend_into_macros_impl(
+                last,
+                |InFile { value: last, file_id: last_fid }| {
+                    if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() {
+                        if first_fid == last_fid {
+                            if let Some(p) = first.parent() {
+                                let range = first.text_range().cover(last.text_range());
+                                let node = find_root(&p)
+                                    .covering_element(range)
+                                    .ancestors()
+                                    .take_while(|it| it.text_range() == range)
+                                    .find_map(N::cast);
+                                if let Some(node) = node {
+                                    res.push(node);
+                                }
                             }
                         }
                     }
-                }
-            });
+                },
+                false,
+            );
         }
         res
     }
 
     fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
         let mut res = smallvec![];
-        self.descend_into_macros_impl(token, |InFile { value, .. }| res.push(value));
+        self.descend_into_macros_impl(token, |InFile { value, .. }| res.push(value), false);
+        res
+    }
+
+    fn descend_into_macros_single(&self, token: SyntaxToken) -> SyntaxToken {
+        let mut res = token.clone();
+        self.descend_into_macros_impl(token, |InFile { value, .. }| res = value, true);
         res
     }
 
-    fn descend_into_macros_impl(&self, token: SyntaxToken, mut f: impl FnMut(InFile<SyntaxToken>)) {
+    fn descend_into_macros_impl(
+        &self,
+        token: SyntaxToken,
+        mut f: impl FnMut(InFile<SyntaxToken>),
+        single: bool,
+    ) {
         let _p = profile::span("descend_into_macros");
         let parent = match token.parent() {
             Some(it) => it,
             None => return,
         };
         let sa = self.analyze(&parent);
-        let mut queue = vec![InFile::new(sa.file_id, token)];
+        let mut stack: SmallVec<[_; 4]> = smallvec![InFile::new(sa.file_id, token)];
         let mut cache = self.expansion_info_cache.borrow_mut();
+        let mut mcache = self.macro_call_cache.borrow_mut();
+
+        let mut process_expansion_for_token =
+            |stack: &mut SmallVec<_>, macro_file, item, token: InFile<&_>| {
+                let expansion_info = cache
+                    .entry(macro_file)
+                    .or_insert_with(|| macro_file.expansion_info(self.db.upcast()))
+                    .as_ref()?;
+
+                {
+                    let InFile { file_id, value } = expansion_info.expanded();
+                    self.cache(value, file_id);
+                }
+
+                let mut mapped_tokens =
+                    expansion_info.map_token_down(self.db.upcast(), item, token)?;
+
+                let len = stack.len();
+                // requeue the tokens we got from mapping our current token down
+                if single {
+                    stack.extend(mapped_tokens.next());
+                } else {
+                    stack.extend(mapped_tokens);
+                }
+                // if the length changed we have found a mapping for the token
+                (stack.len() != len).then(|| ())
+            };
+
         // Remap the next token in the queue into a macro call its in, if it is not being remapped
         // either due to not being in a macro-call or because its unused push it into the result vec,
         // otherwise push the remapped tokens back into the queue as they can potentially be remapped again.
-        while let Some(token) = queue.pop() {
+        while let Some(token) = stack.pop() {
             self.db.unwind_if_cancelled();
             let was_not_remapped = (|| {
-                if let Some((call_id, item)) = token
-                    .value
-                    .ancestors()
-                    .filter_map(ast::Item::cast)
-                    .filter_map(|item| {
-                        self.with_ctx(|ctx| ctx.item_to_macro_call(token.with_value(item.clone())))
-                            .zip(Some(item))
+                // are we inside an attribute macro call
+                let containing_attribute_macro_call = self.with_ctx(|ctx| {
+                    token.value.ancestors().filter_map(ast::Item::cast).find_map(|item| {
+                        if item.attrs().next().is_none() {
+                            // Don't force populate the dyn cache for items that don't have an attribute anyways
+                            return None;
+                        }
+                        Some((ctx.item_to_macro_call(token.with_value(item.clone()))?, item))
                     })
-                    .last()
-                {
+                });
+                if let Some((call_id, item)) = containing_attribute_macro_call {
                     let file_id = call_id.as_file();
-                    let tokens = cache
-                        .entry(file_id)
-                        .or_insert_with(|| file_id.expansion_info(self.db.upcast()))
-                        .as_ref()?
-                        .map_token_down(self.db.upcast(), Some(item), token.as_ref())?;
-
-                    let len = queue.len();
-                    queue.extend(tokens.inspect(|token| {
-                        if let Some(parent) = token.value.parent() {
-                            self.cache(find_root(&parent), token.file_id);
-                        }
-                    }));
-                    return (queue.len() != len).then(|| ());
+                    return process_expansion_for_token(
+                        &mut stack,
+                        file_id,
+                        Some(item),
+                        token.as_ref(),
+                    );
                 }
 
-                if let Some(macro_call) = token.value.ancestors().find_map(ast::MacroCall::cast) {
-                    let tt = macro_call.token_tree()?;
-                    let l_delim = match tt.left_delimiter_token() {
-                        Some(it) => it.text_range().end(),
-                        None => tt.syntax().text_range().start(),
-                    };
-                    let r_delim = match tt.right_delimiter_token() {
-                        Some(it) => it.text_range().start(),
-                        None => tt.syntax().text_range().end(),
-                    };
-                    if !TextRange::new(l_delim, r_delim).contains_range(token.value.text_range()) {
+                // or are we inside a function-like macro call
+                if let Some(tt) =
+                    // FIXME replace map.while_some with take_while once stable
+                    token.value.ancestors().map(ast::TokenTree::cast).while_some().last()
+                {
+                    let macro_call = tt.syntax().parent().and_then(ast::MacroCall::cast)?;
+                    if tt.left_delimiter_token().map_or(false, |it| it == token.value) {
+                        return None;
+                    }
+                    if tt.right_delimiter_token().map_or(false, |it| it == token.value) {
                         return None;
                     }
-                    let file_id = sa.expand(self.db, token.with_value(&macro_call))?;
-                    let tokens = cache
-                        .entry(file_id)
-                        .or_insert_with(|| file_id.expansion_info(self.db.upcast()))
-                        .as_ref()?
-                        .map_token_down(self.db.upcast(), None, token.as_ref())?;
-
-                    let len = queue.len();
-                    queue.extend(tokens.inspect(|token| {
-                        if let Some(parent) = token.value.parent() {
-                            self.cache(find_root(&parent), token.file_id);
+
+                    let mcall = token.with_value(macro_call);
+                    let file_id = match mcache.get(&mcall) {
+                        Some(&it) => it,
+                        None => {
+                            let it = sa.expand(self.db, mcall.as_ref())?;
+                            mcache.insert(mcall, it);
+                            it
                         }
-                    }));
-                    return (queue.len() != len).then(|| ());
+                    };
+                    return process_expansion_for_token(&mut stack, file_id, None, token.as_ref());
                 }
+
+                // outside of a macro invocation so this is a "final" token
                 None
             })()
             .is_none();
@@ -647,6 +721,11 @@ fn original_range_opt(&self, node: &SyntaxNode) -> Option<FileRange> {
         node.as_ref().original_file_range_opt(self.db.upcast())
     }
 
+    fn original_ast_node<N: AstNode>(&self, node: N) -> Option<N> {
+        let file = self.find_file(node.syntax().clone());
+        file.with_value(node).original_ast_node(self.db.upcast()).map(|it| it.value)
+    }
+
     fn diagnostics_display_range(&self, src: InFile<SyntaxNodePtr>) -> FileRange {
         let root = self.db.parse_or_expand(src.file_id).unwrap();
         let node = src.value.to_node(&root);
@@ -682,7 +761,7 @@ fn ancestors_at_offset_with_macros(
     fn resolve_lifetime_param(&self, lifetime: &ast::Lifetime) -> Option<LifetimeParam> {
         let text = lifetime.text();
         let lifetime_param = lifetime.syntax().ancestors().find_map(|syn| {
-            let gpl = ast::DynGenericParamsOwner::cast(syn)?.generic_param_list()?;
+            let gpl = ast::AnyHasGenericParams::cast(syn)?.generic_param_list()?;
             gpl.lifetime_params()
                 .find(|tp| tp.lifetime().as_ref().map(|lt| lt.text()).as_ref() == Some(&text))
         })?;
@@ -774,7 +853,7 @@ fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<MacroDef> {
     fn resolve_attr_macro_call(&self, item: &ast::Item) -> Option<MacroDef> {
         let item_in_file = self.find_file(item.syntax().clone()).with_value(item.clone());
         let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(item_in_file))?;
-        Some(MacroDef { id: self.db.lookup_intern_macro(macro_call_id).def })
+        Some(MacroDef { id: self.db.lookup_intern_macro_call(macro_call_id).def })
     }
 
     fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> {
@@ -823,13 +902,13 @@ fn to_module_def(&self, file: FileId) -> impl Iterator<Item = Module> {
     }
 
     fn scope(&self, node: &SyntaxNode) -> SemanticsScope<'db> {
-        let sa = self.analyze(node);
-        SemanticsScope { db: self.db, file_id: sa.file_id, resolver: sa.resolver }
+        let SourceAnalyzer { file_id, resolver, .. } = self.analyze(node);
+        SemanticsScope { db: self.db, file_idresolver }
     }
 
     fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> SemanticsScope<'db> {
-        let sa = self.analyze_with_offset(node, offset);
-        SemanticsScope { db: self.db, file_id: sa.file_id, resolver: sa.resolver }
+        let SourceAnalyzer { file_id, resolver, .. } = self.analyze_with_offset(node, offset);
+        SemanticsScope { db: self.db, file_idresolver }
     }
 
     fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> {
@@ -838,12 +917,23 @@ fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> {
         SemanticsScope { db: self.db, file_id, resolver }
     }
 
+    fn source<Def: HasSource>(&self, def: Def) -> Option<InFile<Def::Ast>>
+    where
+        Def::Ast: AstNode,
+    {
+        let res = def.source(self.db)?;
+        self.cache(find_root(res.value.syntax()), res.file_id);
+        Some(res)
+    }
+
     fn analyze(&self, node: &SyntaxNode) -> SourceAnalyzer {
         self.analyze_impl(node, None)
     }
+
     fn analyze_with_offset(&self, node: &SyntaxNode, offset: TextSize) -> SourceAnalyzer {
         self.analyze_impl(node, Some(offset))
     }
+
     fn analyze_impl(&self, node: &SyntaxNode, offset: Option<TextSize>) -> SourceAnalyzer {
         let _p = profile::span("Semantics::analyze_impl");
         let node = self.find_file(node.clone());
@@ -1074,8 +1164,7 @@ pub fn krate(&self) -> Option<Crate> {
     }
 
     /// Note: `FxHashSet<TraitId>` should be treated as an opaque type, passed into `Type
-    // FIXME: rename to visible_traits to not repeat scope?
-    pub fn traits_in_scope(&self) -> FxHashSet<TraitId> {
+    pub fn visible_traits(&self) -> FxHashSet<TraitId> {
         let resolver = &self.resolver;
         resolver.traits_in_scope(self.db.upcast())
     }
@@ -1112,4 +1201,14 @@ pub fn speculative_resolve(&self, path: &ast::Path) -> Option<PathResolution> {
         let path = Path::from_src(path.clone(), &ctx)?;
         resolve_hir_path(self.db, &self.resolver, &path)
     }
+
+    /// Resolve a path as-if it was written at the given scope. This is
+    /// necessary a heuristic, as it doesn't take hygiene into account.
+    // FIXME: This special casing solely exists for attributes for now
+    // ideally we should have a path resolution infra that properly knows about overlapping namespaces
+    pub fn speculative_resolve_as_mac(&self, path: &ast::Path) -> Option<MacroDef> {
+        let ctx = body::LowerCtx::new(self.db.upcast(), self.file_id);
+        let path = Path::from_src(path.clone(), &ctx)?;
+        resolve_hir_path_as_macro(self.db, &self.resolver, &path)
+    }
 }