if first == last {
self.descend_into_macros_impl(
first,
- |InFile { value, .. }| {
+ &mut |InFile { value, .. }| {
if let Some(node) = value.ancestors().find_map(N::cast) {
res.push(node)
}
let mut scratch: SmallVec<[_; 1]> = smallvec![];
self.descend_into_macros_impl(
first,
- |token| {
+ &mut |token| {
scratch.push(token);
},
false,
let mut scratch = scratch.into_iter();
self.descend_into_macros_impl(
last,
- |InFile { value: last, file_id: last_fid }| {
+ &mut |InFile { value: last, file_id: last_fid }| {
if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() {
if first_fid == last_fid {
if let Some(p) = first.parent() {
fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
let mut res = smallvec![];
- self.descend_into_macros_impl(token, |InFile { value, .. }| res.push(value), false);
+ self.descend_into_macros_impl(token, &mut |InFile { value, .. }| res.push(value), false);
res
}
fn descend_into_macros_single(&self, token: SyntaxToken) -> SyntaxToken {
let mut res = token.clone();
- self.descend_into_macros_impl(token, |InFile { value, .. }| res = value, true);
+ self.descend_into_macros_impl(token, &mut |InFile { value, .. }| res = value, true);
res
}
fn descend_into_macros_impl(
&self,
token: SyntaxToken,
- mut f: impl FnMut(InFile<SyntaxToken>),
+ f: &mut dyn FnMut(InFile<SyntaxToken>),
single: bool,
) {
let _p = profile::span("descend_into_macros");
Some(it) => it,
None => return,
};
- let sa = self.analyze(&parent);
+ let sa = self.analyze_no_infer(&parent);
let mut stack: SmallVec<[_; 4]> = smallvec![InFile::new(sa.file_id, token)];
let mut cache = self.expansion_info_cache.borrow_mut();
let mut mcache = self.macro_call_cache.borrow_mut();
}
fn analyze(&self, node: &SyntaxNode) -> SourceAnalyzer {
- self.analyze_impl(node, None)
+ self.analyze_impl(node, None, true)
}
fn analyze_with_offset(&self, node: &SyntaxNode, offset: TextSize) -> SourceAnalyzer {
- self.analyze_impl(node, Some(offset))
+ self.analyze_impl(node, Some(offset), true)
}
- fn analyze_impl(&self, node: &SyntaxNode, offset: Option<TextSize>) -> SourceAnalyzer {
+ fn analyze_no_infer(&self, node: &SyntaxNode) -> SourceAnalyzer {
+ self.analyze_impl(node, None, false)
+ }
+
+ fn analyze_impl(
+ &self,
+ node: &SyntaxNode,
+ offset: Option<TextSize>,
+ infer_body: bool,
+ ) -> SourceAnalyzer {
let _p = profile::span("Semantics::analyze_impl");
let node = self.find_file(node.clone());
let node = node.as_ref();
let resolver = match container {
ChildContainer::DefWithBodyId(def) => {
- return SourceAnalyzer::new_for_body(self.db, def, node, offset)
+ return if infer_body {
+ SourceAnalyzer::new_for_body(self.db, def, node, offset)
+ } else {
+ SourceAnalyzer::new_for_body_no_infer(self.db, def, node, offset)
+ }
}
ChildContainer::TraitId(it) => it.resolver(self.db.upcast()),
ChildContainer::ImplId(it) => it.resolver(self.db.upcast()),
pub(crate) fn new_for_body(
db: &dyn HirDatabase,
def: DefWithBodyId,
- node: InFile<&SyntaxNode>,
+ node @ InFile { file_id, .. }: InFile<&SyntaxNode>,
offset: Option<TextSize>,
) -> SourceAnalyzer {
let (body, source_map) = db.body_with_source_map(def);
body: Some(body),
body_source_map: Some(source_map),
infer: Some(db.infer(def)),
- file_id: node.file_id,
+ file_id,
+ }
+ }
+
+ pub(crate) fn new_for_body_no_infer(
+ db: &dyn HirDatabase,
+ def: DefWithBodyId,
+ node @ InFile { file_id, .. }: InFile<&SyntaxNode>,
+ offset: Option<TextSize>,
+ ) -> SourceAnalyzer {
+ let (body, source_map) = db.body_with_source_map(def);
+ let scopes = db.expr_scopes(def);
+ let scope = match offset {
+ None => scope_for(&scopes, &source_map, node),
+ Some(offset) => scope_for_offset(db, &scopes, &source_map, node.with_value(offset)),
+ };
+ let resolver = resolver_for_scope(db.upcast(), def, scope);
+ SourceAnalyzer {
+ resolver,
+ body: Some(body),
+ body_source_map: Some(source_map),
+ infer: None,
+ file_id,
}
}