]> git.lizzy.rs Git - rust.git/commitdiff
internal: Don't kick off inference in Semantics::descend_into_macros_impl
authorLukas Wirth <lukastw97@gmail.com>
Mon, 20 Dec 2021 12:19:48 +0000 (13:19 +0100)
committerLukas Wirth <lukastw97@gmail.com>
Mon, 20 Dec 2021 12:19:48 +0000 (13:19 +0100)
crates/hir/src/semantics.rs
crates/hir/src/source_analyzer.rs

index d27869450e5fe915097495c62ebc4ceb28cf07c3..02da397b0cec1f29314397aa5238219267db2360 100644 (file)
@@ -528,7 +528,7 @@ fn speculative_expand_attr(
         if first == last {
             self.descend_into_macros_impl(
                 first,
-                |InFile { value, .. }| {
+                &mut |InFile { value, .. }| {
                     if let Some(node) = value.ancestors().find_map(N::cast) {
                         res.push(node)
                     }
@@ -540,7 +540,7 @@ fn speculative_expand_attr(
             let mut scratch: SmallVec<[_; 1]> = smallvec![];
             self.descend_into_macros_impl(
                 first,
-                |token| {
+                &mut |token| {
                     scratch.push(token);
                 },
                 false,
@@ -549,7 +549,7 @@ fn speculative_expand_attr(
             let mut scratch = scratch.into_iter();
             self.descend_into_macros_impl(
                 last,
-                |InFile { value: last, file_id: last_fid }| {
+                &mut |InFile { value: last, file_id: last_fid }| {
                     if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() {
                         if first_fid == last_fid {
                             if let Some(p) = first.parent() {
@@ -574,20 +574,20 @@ fn speculative_expand_attr(
 
     fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
         let mut res = smallvec![];
-        self.descend_into_macros_impl(token, |InFile { value, .. }| res.push(value), false);
+        self.descend_into_macros_impl(token, &mut |InFile { value, .. }| res.push(value), false);
         res
     }
 
     fn descend_into_macros_single(&self, token: SyntaxToken) -> SyntaxToken {
         let mut res = token.clone();
-        self.descend_into_macros_impl(token, |InFile { value, .. }| res = value, true);
+        self.descend_into_macros_impl(token, &mut |InFile { value, .. }| res = value, true);
         res
     }
 
     fn descend_into_macros_impl(
         &self,
         token: SyntaxToken,
-        mut f: impl FnMut(InFile<SyntaxToken>),
+        f: &mut dyn FnMut(InFile<SyntaxToken>),
         single: bool,
     ) {
         let _p = profile::span("descend_into_macros");
@@ -595,7 +595,7 @@ fn descend_into_macros_impl(
             Some(it) => it,
             None => return,
         };
-        let sa = self.analyze(&parent);
+        let sa = self.analyze_no_infer(&parent);
         let mut stack: SmallVec<[_; 4]> = smallvec![InFile::new(sa.file_id, token)];
         let mut cache = self.expansion_info_cache.borrow_mut();
         let mut mcache = self.macro_call_cache.borrow_mut();
@@ -927,14 +927,23 @@ fn source<Def: HasSource>(&self, def: Def) -> Option<InFile<Def::Ast>>
     }
 
     fn analyze(&self, node: &SyntaxNode) -> SourceAnalyzer {
-        self.analyze_impl(node, None)
+        self.analyze_impl(node, None, true)
     }
 
     fn analyze_with_offset(&self, node: &SyntaxNode, offset: TextSize) -> SourceAnalyzer {
-        self.analyze_impl(node, Some(offset))
+        self.analyze_impl(node, Some(offset), true)
     }
 
-    fn analyze_impl(&self, node: &SyntaxNode, offset: Option<TextSize>) -> SourceAnalyzer {
+    fn analyze_no_infer(&self, node: &SyntaxNode) -> SourceAnalyzer {
+        self.analyze_impl(node, None, false)
+    }
+
+    fn analyze_impl(
+        &self,
+        node: &SyntaxNode,
+        offset: Option<TextSize>,
+        infer_body: bool,
+    ) -> SourceAnalyzer {
         let _p = profile::span("Semantics::analyze_impl");
         let node = self.find_file(node.clone());
         let node = node.as_ref();
@@ -946,7 +955,11 @@ fn analyze_impl(&self, node: &SyntaxNode, offset: Option<TextSize>) -> SourceAna
 
         let resolver = match container {
             ChildContainer::DefWithBodyId(def) => {
-                return SourceAnalyzer::new_for_body(self.db, def, node, offset)
+                return if infer_body {
+                    SourceAnalyzer::new_for_body(self.db, def, node, offset)
+                } else {
+                    SourceAnalyzer::new_for_body_no_infer(self.db, def, node, offset)
+                }
             }
             ChildContainer::TraitId(it) => it.resolver(self.db.upcast()),
             ChildContainer::ImplId(it) => it.resolver(self.db.upcast()),
index 4f987db651fda8c405da5453d92a948fb005b695..2d779393f0950601f7e420698cc63c3c222d45bb 100644 (file)
@@ -50,7 +50,7 @@ impl SourceAnalyzer {
     pub(crate) fn new_for_body(
         db: &dyn HirDatabase,
         def: DefWithBodyId,
-        node: InFile<&SyntaxNode>,
+        node @ InFile { file_id, .. }: InFile<&SyntaxNode>,
         offset: Option<TextSize>,
     ) -> SourceAnalyzer {
         let (body, source_map) = db.body_with_source_map(def);
@@ -65,7 +65,29 @@ pub(crate) fn new_for_body(
             body: Some(body),
             body_source_map: Some(source_map),
             infer: Some(db.infer(def)),
-            file_id: node.file_id,
+            file_id,
+        }
+    }
+
+    pub(crate) fn new_for_body_no_infer(
+        db: &dyn HirDatabase,
+        def: DefWithBodyId,
+        node @ InFile { file_id, .. }: InFile<&SyntaxNode>,
+        offset: Option<TextSize>,
+    ) -> SourceAnalyzer {
+        let (body, source_map) = db.body_with_source_map(def);
+        let scopes = db.expr_scopes(def);
+        let scope = match offset {
+            None => scope_for(&scopes, &source_map, node),
+            Some(offset) => scope_for_offset(db, &scopes, &source_map, node.with_value(offset)),
+        };
+        let resolver = resolver_for_scope(db.upcast(), def, scope);
+        SourceAnalyzer {
+            resolver,
+            body: Some(body),
+            body_source_map: Some(source_map),
+            infer: None,
+            file_id,
         }
     }