db::HirDatabase,
semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx},
source_analyzer::{resolve_hir_path, resolve_hir_path_as_macro, SourceAnalyzer},
- Access, AssocItem, Callable, ConstParam, Crate, Field, Function, HasSource, HirFileId, Impl,
- InFile, Label, LifetimeParam, Local, MacroDef, Module, ModuleDef, Name, Path, ScopeDef, Trait,
- Type, TypeAlias, TypeParam, VariantDef,
+ Access, AssocItem, BuiltinAttr, Callable, ConstParam, Crate, Field, Function, HasSource,
+ HirFileId, Impl, InFile, Label, LifetimeParam, Local, MacroDef, Module, ModuleDef, Name, Path,
+ ScopeDef, ToolModule, Trait, Type, TypeAlias, TypeParam, VariantDef,
};
#[derive(Debug, Clone, PartialEq, Eq)]
SelfType(Impl),
Macro(MacroDef),
AssocItem(AssocItem),
+ BuiltinAttr(BuiltinAttr),
+ ToolModule(ToolModule),
}
impl PathResolution {
PathResolution::Def(ModuleDef::TypeAlias(alias)) => {
Some(TypeNs::TypeAliasId((*alias).into()))
}
- PathResolution::Local(_) | PathResolution::Macro(_) | PathResolution::ConstParam(_) => {
- None
- }
+ PathResolution::BuiltinAttr(_)
+ | PathResolution::ToolModule(_)
+ | PathResolution::Local(_)
+ | PathResolution::Macro(_)
+ | PathResolution::ConstParam(_) => None,
PathResolution::TypeParam(param) => Some(TypeNs::GenericParam((*param).into())),
PathResolution::SelfType(impl_def) => Some(TypeNs::SelfType((*impl_def).into())),
PathResolution::AssocItem(AssocItem::Const(_) | AssocItem::Function(_)) => None,
self.imp.parse(file_id)
}
+ pub fn parse_or_expand(&self, file_id: HirFileId) -> Option<SyntaxNode> {
+ self.imp.parse_or_expand(file_id)
+ }
+
pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
self.imp.expand(macro_call)
}
self.imp.speculative_expand_attr(actual_macro_call, speculative_args, token_to_map)
}
+ /// Descend the token into macrocalls to its first mapped counterpart.
pub fn descend_into_macros_single(&self, token: SyntaxToken) -> SyntaxToken {
- self.imp.descend_into_macros(token).pop().unwrap()
+ self.imp.descend_into_macros_single(token)
}
+ /// Descend the token into macrocalls to all its mapped counterparts.
pub fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
self.imp.descend_into_macros(token)
}
token.parent().into_iter().flat_map(move |it| self.ancestors_with_macros(it))
}
+ /// Iterates the ancestors of the given node, climbing up macro expansions while doing so.
pub fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ {
self.imp.ancestors_with_macros(node)
}
tree
}
+ fn parse_or_expand(&self, file_id: HirFileId) -> Option<SyntaxNode> {
+ let node = self.db.parse_or_expand(file_id)?;
+ self.cache(node.clone(), file_id);
+ Some(node)
+ }
+
fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
let sa = self.analyze(macro_call.syntax());
let file_id = sa.expand(self.db, InFile::new(sa.file_id, macro_call))?;
};
if first == last {
- self.descend_into_macros_impl(first, |InFile { value, .. }| {
- if let Some(node) = value.ancestors().find_map(N::cast) {
- res.push(node)
- }
- });
+ self.descend_into_macros_impl(
+ first,
+ |InFile { value, .. }| {
+ if let Some(node) = value.ancestors().find_map(N::cast) {
+ res.push(node)
+ }
+ },
+ false,
+ );
} else {
// Descend first and last token, then zip them to look for the node they belong to
let mut scratch: SmallVec<[_; 1]> = smallvec![];
- self.descend_into_macros_impl(first, |token| {
- scratch.push(token);
- });
+ self.descend_into_macros_impl(
+ first,
+ |token| {
+ scratch.push(token);
+ },
+ false,
+ );
let mut scratch = scratch.into_iter();
- self.descend_into_macros_impl(last, |InFile { value: last, file_id: last_fid }| {
- if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() {
- if first_fid == last_fid {
- if let Some(p) = first.parent() {
- let range = first.text_range().cover(last.text_range());
- let node = find_root(&p)
- .covering_element(range)
- .ancestors()
- .take_while(|it| it.text_range() == range)
- .find_map(N::cast);
- if let Some(node) = node {
- res.push(node);
+ self.descend_into_macros_impl(
+ last,
+ |InFile { value: last, file_id: last_fid }| {
+ if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() {
+ if first_fid == last_fid {
+ if let Some(p) = first.parent() {
+ let range = first.text_range().cover(last.text_range());
+ let node = find_root(&p)
+ .covering_element(range)
+ .ancestors()
+ .take_while(|it| it.text_range() == range)
+ .find_map(N::cast);
+ if let Some(node) = node {
+ res.push(node);
+ }
}
}
}
- }
- });
+ },
+ false,
+ );
}
res
}
fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
let mut res = smallvec![];
- self.descend_into_macros_impl(token, |InFile { value, .. }| res.push(value));
+ self.descend_into_macros_impl(token, |InFile { value, .. }| res.push(value), false);
res
}
- fn descend_into_macros_impl(&self, token: SyntaxToken, mut f: impl FnMut(InFile<SyntaxToken>)) {
+ fn descend_into_macros_single(&self, token: SyntaxToken) -> SyntaxToken {
+ let mut res = token.clone();
+ self.descend_into_macros_impl(token, |InFile { value, .. }| res = value, true);
+ res
+ }
+
+ fn descend_into_macros_impl(
+ &self,
+ token: SyntaxToken,
+ mut f: impl FnMut(InFile<SyntaxToken>),
+ single: bool,
+ ) {
let _p = profile::span("descend_into_macros");
let parent = match token.parent() {
Some(it) => it,
self.cache(value, file_id);
}
- let mapped_tokens = expansion_info.map_token_down(self.db.upcast(), item, token)?;
+ let mut mapped_tokens =
+ expansion_info.map_token_down(self.db.upcast(), item, token)?;
let len = stack.len();
// requeue the tokens we got from mapping our current token down
- stack.extend(mapped_tokens);
+ if single {
+ stack.extend(mapped_tokens.next());
+ } else {
+ stack.extend(mapped_tokens);
+ }
// if the length changed we have found a mapping for the token
(stack.len() != len).then(|| ())
};
fn resolve_attr_macro_call(&self, item: &ast::Item) -> Option<MacroDef> {
let item_in_file = self.find_file(item.syntax().clone()).with_value(item.clone());
let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(item_in_file))?;
- Some(MacroDef { id: self.db.lookup_intern_macro(macro_call_id).def })
+ Some(MacroDef { id: self.db.lookup_intern_macro_call(macro_call_id).def })
}
fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> {
}
fn scope(&self, node: &SyntaxNode) -> SemanticsScope<'db> {
- let sa = self.analyze(node);
- SemanticsScope { db: self.db, file_id: sa.file_id, resolver: sa.resolver }
+ let SourceAnalyzer { file_id, resolver, .. } = self.analyze(node);
+ SemanticsScope { db: self.db, file_id, resolver }
}
fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> SemanticsScope<'db> {
- let sa = self.analyze_with_offset(node, offset);
- SemanticsScope { db: self.db, file_id: sa.file_id, resolver: sa.resolver }
+ let SourceAnalyzer { file_id, resolver, .. } = self.analyze_with_offset(node, offset);
+ SemanticsScope { db: self.db, file_id, resolver }
}
fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> {
fn analyze(&self, node: &SyntaxNode) -> SourceAnalyzer {
self.analyze_impl(node, None)
}
+
fn analyze_with_offset(&self, node: &SyntaxNode, offset: TextSize) -> SourceAnalyzer {
self.analyze_impl(node, Some(offset))
}
+
fn analyze_impl(&self, node: &SyntaxNode, offset: Option<TextSize>) -> SourceAnalyzer {
let _p = profile::span("Semantics::analyze_impl");
let node = self.find_file(node.clone());
}
/// Note: `FxHashSet<TraitId>` should be treated as an opaque type, passed into `Type
- // FIXME: rename to visible_traits to not repeat scope?
- pub fn traits_in_scope(&self) -> FxHashSet<TraitId> {
+ pub fn visible_traits(&self) -> FxHashSet<TraitId> {
let resolver = &self.resolver;
resolver.traits_in_scope(self.db.upcast())
}