mod source_to_def;
-use std::{cell::RefCell, fmt, iter};
+use std::{cell::RefCell, fmt, iter, ops};
use base_db::{FileId, FileRange};
use hir_def::{
body, macro_id_to_def_id,
resolver::{self, HasResolver, Resolver, TypeNs},
+ type_ref::Mutability,
AsMacroCall, FunctionId, MacroId, TraitId, VariantId,
};
use hir_expand::{
name::{known, AsName},
ExpansionInfo, MacroCallId,
};
-use hir_ty::Interner;
use itertools::Itertools;
use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::{smallvec, SmallVec};
use syntax::{
algo::skip_trivia_token,
ast::{self, HasAttrs as _, HasGenericParams, HasLoopBody},
- match_ast, AstNode, Direction, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextSize,
+ match_ast, AstNode, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextSize,
};
use crate::{
db::HirDatabase,
semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx},
source_analyzer::{resolve_hir_path, SourceAnalyzer},
- Access, AssocItem, BuiltinAttr, Callable, ConstParam, Crate, Field, Function, HasSource,
- HirFileId, Impl, InFile, Label, LifetimeParam, Local, Macro, Module, ModuleDef, Name, Path,
- ScopeDef, ToolModule, Trait, Type, TypeAlias, TypeParam, VariantDef,
+ Access, BindingMode, BuiltinAttr, Callable, ConstParam, Crate, DeriveHelper, Field, Function,
+ HasSource, HirFileId, Impl, InFile, Label, LifetimeParam, Local, Macro, Module, ModuleDef,
+ Name, Path, ScopeDef, ToolModule, Trait, Type, TypeAlias, TypeParam, VariantDef,
};
#[derive(Debug, Clone, PartialEq, Eq)]
/// A const parameter
ConstParam(ConstParam),
SelfType(Impl),
- AssocItem(AssocItem),
BuiltinAttr(BuiltinAttr),
ToolModule(ToolModule),
+ DeriveHelper(DeriveHelper),
}
impl PathResolution {
PathResolution::BuiltinAttr(_)
| PathResolution::ToolModule(_)
| PathResolution::Local(_)
+ | PathResolution::DeriveHelper(_)
| PathResolution::ConstParam(_) => None,
PathResolution::TypeParam(param) => Some(TypeNs::GenericParam((*param).into())),
PathResolution::SelfType(impl_def) => Some(TypeNs::SelfType((*impl_def).into())),
- PathResolution::AssocItem(AssocItem::Const(_) | AssocItem::Function(_)) => None,
- PathResolution::AssocItem(AssocItem::TypeAlias(alias)) => {
- Some(TypeNs::TypeAliasId((*alias).into()))
- }
}
}
}
}
impl<'db, DB: HirDatabase> Semantics<'db, DB> {
- pub fn new(db: &DB) -> Semantics<DB> {
+ pub fn new(db: &DB) -> Semantics<'_, DB> {
let impl_ = SemanticsImpl::new(db);
Semantics { db, imp: impl_ }
}
self.imp.descend_into_macros(token)
}
+ /// Descend the token into macrocalls to all its mapped counterparts that have the same text as the input token.
+ ///
+ /// Returns the original non descended token if none of the mapped counterparts have the same text.
+ pub fn descend_into_macros_with_same_text(
+ &self,
+ token: SyntaxToken,
+ ) -> SmallVec<[SyntaxToken; 1]> {
+ self.imp.descend_into_macros_with_same_text(token)
+ }
+
+ pub fn descend_into_macros_with_kind_preference(&self, token: SyntaxToken) -> SyntaxToken {
+ self.imp.descend_into_macros_with_kind_preference(token)
+ }
+
/// Maps a node down by mapping its first and last token down.
pub fn descend_node_into_attributes<N: AstNode>(&self, node: N) -> SmallVec<[N; 1]> {
self.imp.descend_node_into_attributes(node)
self.imp.resolve_type(ty)
}
+ pub fn resolve_trait(&self, trait_: &ast::Path) -> Option<Trait> {
+ self.imp.resolve_trait(trait_)
+ }
+
+ // FIXME: Figure out a nice interface to inspect adjustments
+ pub fn is_implicit_reborrow(&self, expr: &ast::Expr) -> Option<Mutability> {
+ self.imp.is_implicit_reborrow(expr)
+ }
+
pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<TypeInfo> {
self.imp.type_of_expr(expr)
}
self.imp.type_of_self(param)
}
+ pub fn pattern_adjustments(&self, pat: &ast::Pat) -> SmallVec<[Type; 1]> {
+ self.imp.pattern_adjustments(pat)
+ }
+
+ pub fn binding_mode_of_pat(&self, pat: &ast::IdentPat) -> Option<BindingMode> {
+ self.imp.binding_mode_of_pat(pat)
+ }
+
pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> {
self.imp.resolve_method_call(call).map(Function::from)
}
self.imp.resolve_macro_call(macro_call)
}
+ pub fn is_unsafe_macro_call(&self, macro_call: &ast::MacroCall) -> bool {
+ self.imp.is_unsafe_macro_call(macro_call)
+ }
+
pub fn resolve_attr_macro_call(&self, item: &ast::Item) -> Option<Macro> {
self.imp.resolve_attr_macro_call(item)
}
self.imp.to_module_def(file)
}
- pub fn scope(&self, node: &SyntaxNode) -> SemanticsScope<'db> {
+ pub fn scope(&self, node: &SyntaxNode) -> Option<SemanticsScope<'db>> {
self.imp.scope(node)
}
- pub fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> SemanticsScope<'db> {
+ pub fn scope_at_offset(
+ &self,
+ node: &SyntaxNode,
+ offset: TextSize,
+ ) -> Option<SemanticsScope<'db>> {
self.imp.scope_at_offset(node, offset)
}
}
fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
- let sa = self.analyze_no_infer(macro_call.syntax());
+ let sa = self.analyze_no_infer(macro_call.syntax())?;
let file_id = sa.expand(self.db, InFile::new(sa.file_id, macro_call))?;
let node = self.parse_or_expand(file_id)?;
Some(node)
token_to_map: SyntaxToken,
) -> Option<(SyntaxNode, SyntaxToken)> {
let SourceAnalyzer { file_id, resolver, .. } =
- self.analyze_no_infer(actual_macro_call.syntax());
+ self.analyze_no_infer(actual_macro_call.syntax())?;
let macro_call = InFile::new(file_id, actual_macro_call);
- let krate = resolver.krate()?;
+ let krate = resolver.krate();
let macro_call_id = macro_call.as_call_id(self.db.upcast(), krate, |path| {
resolver
.resolve_path_as_macro(self.db.upcast(), &path)
};
if first == last {
- self.descend_into_macros_impl(
- first,
- &mut |InFile { value, .. }| {
- if let Some(node) = value.ancestors().find_map(N::cast) {
- res.push(node)
- }
- },
- false,
- );
+ self.descend_into_macros_impl(first, &mut |InFile { value, .. }| {
+ if let Some(node) = value.parent_ancestors().find_map(N::cast) {
+ res.push(node)
+ }
+ false
+ });
} else {
// Descend first and last token, then zip them to look for the node they belong to
let mut scratch: SmallVec<[_; 1]> = smallvec![];
- self.descend_into_macros_impl(
- first,
- &mut |token| {
- scratch.push(token);
- },
- false,
- );
+ self.descend_into_macros_impl(first, &mut |token| {
+ scratch.push(token);
+ false
+ });
let mut scratch = scratch.into_iter();
self.descend_into_macros_impl(
}
}
}
+ false
},
- false,
);
}
res
fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
let mut res = smallvec![];
- self.descend_into_macros_impl(token, &mut |InFile { value, .. }| res.push(value), false);
+ self.descend_into_macros_impl(token, &mut |InFile { value, .. }| {
+ res.push(value);
+ false
+ });
+ res
+ }
+
+ fn descend_into_macros_with_same_text(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
+ let text = token.text();
+ let mut res = smallvec![];
+ self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| {
+ if value.text() == text {
+ res.push(value);
+ }
+ false
+ });
+ if res.is_empty() {
+ res.push(token);
+ }
res
}
+ fn descend_into_macros_with_kind_preference(&self, token: SyntaxToken) -> SyntaxToken {
+ let fetch_kind = |token: &SyntaxToken| match token.parent() {
+ Some(node) => match node.kind() {
+ kind @ (SyntaxKind::NAME | SyntaxKind::NAME_REF) => {
+ node.parent().map_or(kind, |it| it.kind())
+ }
+ _ => token.kind(),
+ },
+ None => token.kind(),
+ };
+ let preferred_kind = fetch_kind(&token);
+ let mut res = None;
+ self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| {
+ if fetch_kind(&value) == preferred_kind {
+ res = Some(value);
+ true
+ } else {
+ if let None = res {
+ res = Some(value)
+ }
+ false
+ }
+ });
+ res.unwrap_or(token)
+ }
+
fn descend_into_macros_single(&self, token: SyntaxToken) -> SyntaxToken {
let mut res = token.clone();
- self.descend_into_macros_impl(token, &mut |InFile { value, .. }| res = value, true);
+ self.descend_into_macros_impl(token, &mut |InFile { value, .. }| {
+ res = value;
+ true
+ });
res
}
fn descend_into_macros_impl(
&self,
token: SyntaxToken,
- f: &mut dyn FnMut(InFile<SyntaxToken>),
- single: bool,
+ f: &mut dyn FnMut(InFile<SyntaxToken>) -> bool,
) {
let _p = profile::span("descend_into_macros");
let parent = match token.parent() {
Some(it) => it,
None => return,
};
- let sa = self.analyze_no_infer(&parent);
+ let sa = match self.analyze_no_infer(&parent) {
+ Some(it) => it,
+ None => return,
+ };
+ let def_map = sa.resolver.def_map();
+
let mut stack: SmallVec<[_; 4]> = smallvec![InFile::new(sa.file_id, token)];
let mut cache = self.expansion_info_cache.borrow_mut();
let mut mcache = self.macro_call_cache.borrow_mut();
self.cache(value, file_id);
}
- let mut mapped_tokens =
- expansion_info.map_token_down(self.db.upcast(), item, token)?;
-
+ let mapped_tokens = expansion_info.map_token_down(self.db.upcast(), item, token)?;
let len = stack.len();
+
// requeue the tokens we got from mapping our current token down
- if single {
- stack.extend(mapped_tokens.next());
- } else {
- stack.extend(mapped_tokens);
- }
+ stack.extend(mapped_tokens);
// if the length changed we have found a mapping for the token
(stack.len() != len).then(|| ())
};
while let Some(token) = stack.pop() {
self.db.unwind_if_cancelled();
let was_not_remapped = (|| {
- // are we inside an attribute macro call
+ // First expand into attribute invocations
let containing_attribute_macro_call = self.with_ctx(|ctx| {
- token.value.ancestors().filter_map(ast::Item::cast).find_map(|item| {
+ token.value.parent_ancestors().filter_map(ast::Item::cast).find_map(|item| {
if item.attrs().next().is_none() {
// Don't force populate the dyn cache for items that don't have an attribute anyways
return None;
);
}
- // or are we inside a function-like macro call
- if let Some(tt) =
- // FIXME replace map.while_some with take_while once stable
- token.value.ancestors().map(ast::TokenTree::cast).while_some().last()
- {
- let parent = tt.syntax().parent()?;
- // check for derive attribute here
- let macro_call = match_ast! {
- match parent {
- ast::MacroCall(mcall) => mcall,
- // attribute we failed expansion for earlier, this might be a derive invocation
+ // Then check for token trees, that means we are either in a function-like macro or
+ // secondary attribute inputs
+ let tt = token.value.parent_ancestors().map_while(ast::TokenTree::cast).last()?;
+ let parent = tt.syntax().parent()?;
+
+ if tt.left_delimiter_token().map_or(false, |it| it == token.value) {
+ return None;
+ }
+ if tt.right_delimiter_token().map_or(false, |it| it == token.value) {
+ return None;
+ }
+
+ if let Some(macro_call) = ast::MacroCall::cast(parent.clone()) {
+ let mcall = token.with_value(macro_call);
+ let file_id = match mcache.get(&mcall) {
+ Some(&it) => it,
+ None => {
+ let it = sa.expand(self.db, mcall.as_ref())?;
+ mcache.insert(mcall, it);
+ it
+ }
+ };
+ process_expansion_for_token(&mut stack, file_id, None, token.as_ref())
+ } else if let Some(meta) = ast::Meta::cast(parent.clone()) {
+ // attribute we failed expansion for earlier, this might be a derive invocation
+ // or derive helper attribute
+ let attr = meta.parent_attr()?;
+
+ let adt = if let Some(adt) = attr.syntax().parent().and_then(ast::Adt::cast) {
+ // this might be a derive, or a derive helper on an ADT
+ let derive_call = self.with_ctx(|ctx| {
// so try downmapping the token into the pseudo derive expansion
// see [hir_expand::builtin_attr_macro] for how the pseudo derive expansion works
- ast::Meta(meta) => {
- let attr = meta.parent_attr()?;
- let adt = attr.syntax().parent().and_then(ast::Adt::cast)?;
- let call_id = self.with_ctx(|ctx| {
- let (_, call_id, _) = ctx.attr_to_derive_macro_call(
- token.with_value(&adt),
- token.with_value(attr),
- )?;
- Some(call_id)
- })?;
+ ctx.attr_to_derive_macro_call(
+ token.with_value(&adt),
+ token.with_value(attr.clone()),
+ )
+ .map(|(_, call_id, _)| call_id)
+ });
+
+ match derive_call {
+ Some(call_id) => {
+ // resolved to a derive
let file_id = call_id.as_file();
return process_expansion_for_token(
&mut stack,
Some(adt.into()),
token.as_ref(),
);
- },
- _ => return None,
+ }
+ None => Some(adt),
}
- };
-
- if tt.left_delimiter_token().map_or(false, |it| it == token.value) {
+ } else {
+ // Otherwise this could be a derive helper on a variant or field
+ if let Some(field) = attr.syntax().parent().and_then(ast::RecordField::cast)
+ {
+ field.syntax().ancestors().take(4).find_map(ast::Adt::cast)
+ } else if let Some(field) =
+ attr.syntax().parent().and_then(ast::TupleField::cast)
+ {
+ field.syntax().ancestors().take(4).find_map(ast::Adt::cast)
+ } else if let Some(variant) =
+ attr.syntax().parent().and_then(ast::Variant::cast)
+ {
+ variant.syntax().ancestors().nth(2).and_then(ast::Adt::cast)
+ } else {
+ None
+ }
+ }?;
+ if !self.with_ctx(|ctx| ctx.has_derives(InFile::new(token.file_id, &adt))) {
return None;
}
- if tt.right_delimiter_token().map_or(false, |it| it == token.value) {
- return None;
+ // Not an attribute, nor a derive, so it's either a builtin or a derive helper
+ // Try to resolve to a derive helper and downmap
+ let attr_name = attr.path().and_then(|it| it.as_single_name_ref())?.as_name();
+ let id = self.db.ast_id_map(token.file_id).ast_id(&adt);
+ let helpers =
+ def_map.derive_helpers_in_scope(InFile::new(token.file_id, id))?;
+ let item = Some(adt.into());
+ let mut res = None;
+ for (.., derive) in helpers.iter().filter(|(helper, ..)| *helper == attr_name) {
+ res = res.or(process_expansion_for_token(
+ &mut stack,
+ derive.as_file(),
+ item.clone(),
+ token.as_ref(),
+ ));
}
-
- let mcall = token.with_value(macro_call);
- let file_id = match mcache.get(&mcall) {
- Some(&it) => it,
- None => {
- let it = sa.expand(self.db, mcall.as_ref())?;
- mcache.insert(mcall, it);
- it
- }
- };
- return process_expansion_for_token(&mut stack, file_id, None, token.as_ref());
+ res
+ } else {
+ None
}
-
- // outside of a macro invocation so this is a "final" token
- None
})()
.is_none();
- if was_not_remapped {
- f(token)
+ if was_not_remapped && f(token) {
+ break;
}
}
}
}
fn original_ast_node<N: AstNode>(&self, node: N) -> Option<N> {
- self.wrap_node_infile(node).original_ast_node(self.db.upcast()).map(|it| it.value)
+ self.wrap_node_infile(node).original_ast_node(self.db.upcast()).map(
+ |InFile { file_id, value }| {
+ self.cache(find_root(value.syntax()), file_id);
+ value
+ },
+ )
}
fn diagnostics_display_range(&self, src: InFile<SyntaxNodePtr>) -> FileRange {
}
fn resolve_type(&self, ty: &ast::Type) -> Option<Type> {
- let scope = self.scope(ty.syntax());
- let ctx = body::LowerCtx::new(self.db.upcast(), scope.file_id);
- let ty = hir_ty::TyLoweringContext::new(self.db, &scope.resolver)
+ let analyze = self.analyze(ty.syntax())?;
+ let ctx = body::LowerCtx::new(self.db.upcast(), analyze.file_id);
+ let ty = hir_ty::TyLoweringContext::new(self.db, &analyze.resolver)
.lower_ty(&crate::TypeRef::from_ast(&ctx, ty.clone()));
- Type::new_with_resolver(self.db, &scope.resolver, ty)
+ Some(Type::new_with_resolver(self.db, &analyze.resolver, ty))
+ }
+
+ fn resolve_trait(&self, path: &ast::Path) -> Option<Trait> {
+ let analyze = self.analyze(path.syntax())?;
+ let hygiene = hir_expand::hygiene::Hygiene::new(self.db.upcast(), analyze.file_id);
+ let ctx = body::LowerCtx::with_hygiene(self.db.upcast(), &hygiene);
+ let hir_path = Path::from_src(path.clone(), &ctx)?;
+ match analyze
+ .resolver
+ .resolve_path_in_type_ns_fully(self.db.upcast(), hir_path.mod_path())?
+ {
+ TypeNs::TraitId(id) => Some(Trait { id }),
+ _ => None,
+ }
+ }
+
+ fn is_implicit_reborrow(&self, expr: &ast::Expr) -> Option<Mutability> {
+ self.analyze(expr.syntax())?.is_implicit_reborrow(self.db, expr)
}
fn type_of_expr(&self, expr: &ast::Expr) -> Option<TypeInfo> {
- self.analyze(expr.syntax())
+ self.analyze(expr.syntax())?
.type_of_expr(self.db, expr)
.map(|(ty, coerced)| TypeInfo { original: ty, adjusted: coerced })
}
fn type_of_pat(&self, pat: &ast::Pat) -> Option<TypeInfo> {
- self.analyze(pat.syntax())
+ self.analyze(pat.syntax())?
.type_of_pat(self.db, pat)
.map(|(ty, coerced)| TypeInfo { original: ty, adjusted: coerced })
}
fn type_of_self(&self, param: &ast::SelfParam) -> Option<Type> {
- self.analyze(param.syntax()).type_of_self(self.db, param)
+ self.analyze(param.syntax())?.type_of_self(self.db, param)
+ }
+
+ fn pattern_adjustments(&self, pat: &ast::Pat) -> SmallVec<[Type; 1]> {
+ self.analyze(pat.syntax())
+ .and_then(|it| it.pattern_adjustments(self.db, pat))
+ .unwrap_or_default()
+ }
+
+ fn binding_mode_of_pat(&self, pat: &ast::IdentPat) -> Option<BindingMode> {
+ self.analyze(pat.syntax())?.binding_mode_of_pat(self.db, pat)
}
fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<FunctionId> {
- self.analyze(call.syntax()).resolve_method_call(self.db, call).map(|(id, _)| id)
+ self.analyze(call.syntax())?.resolve_method_call(self.db, call)
}
fn resolve_method_call_as_callable(&self, call: &ast::MethodCallExpr) -> Option<Callable> {
- let (func, subst) = self.analyze(call.syntax()).resolve_method_call(self.db, call)?;
- let ty = self.db.value_ty(func.into()).substitute(Interner, &subst);
- let resolver = self.analyze(call.syntax()).resolver;
- let ty = Type::new_with_resolver(self.db, &resolver, ty)?;
- let mut res = ty.as_callable(self.db)?;
- res.is_bound_method = true;
- Some(res)
+ self.analyze(call.syntax())?.resolve_method_call_as_callable(self.db, call)
}
fn resolve_field(&self, field: &ast::FieldExpr) -> Option<Field> {
- self.analyze(field.syntax()).resolve_field(self.db, field)
+ self.analyze(field.syntax())?.resolve_field(self.db, field)
}
fn resolve_record_field(
&self,
field: &ast::RecordExprField,
) -> Option<(Field, Option<Local>, Type)> {
- self.analyze(field.syntax()).resolve_record_field(self.db, field)
+ self.analyze(field.syntax())?.resolve_record_field(self.db, field)
}
fn resolve_record_pat_field(&self, field: &ast::RecordPatField) -> Option<Field> {
- self.analyze(field.syntax()).resolve_record_pat_field(self.db, field)
+ self.analyze(field.syntax())?.resolve_record_pat_field(self.db, field)
}
fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<Macro> {
- let sa = self.analyze(macro_call.syntax());
+ let sa = self.analyze(macro_call.syntax())?;
let macro_call = self.find_file(macro_call.syntax()).with_value(macro_call);
sa.resolve_macro_call(self.db, macro_call)
}
+ fn is_unsafe_macro_call(&self, macro_call: &ast::MacroCall) -> bool {
+ let sa = match self.analyze(macro_call.syntax()) {
+ Some(it) => it,
+ None => return false,
+ };
+ let macro_call = self.find_file(macro_call.syntax()).with_value(macro_call);
+ sa.is_unsafe_macro_call(self.db, macro_call)
+ }
+
fn resolve_attr_macro_call(&self, item: &ast::Item) -> Option<Macro> {
let item_in_file = self.wrap_node_infile(item.clone());
let id = self.with_ctx(|ctx| {
}
fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> {
- self.analyze(path.syntax()).resolve_path(self.db, path)
+ self.analyze(path.syntax())?.resolve_path(self.db, path)
}
fn resolve_extern_crate(&self, extern_crate: &ast::ExternCrate) -> Option<Crate> {
- let krate = self.scope(extern_crate.syntax()).krate()?;
+ let krate = self.scope(extern_crate.syntax())?.krate();
let name = extern_crate.name_ref()?.as_name();
if name == known::SELF_PARAM {
return Some(krate);
}
fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantId> {
- self.analyze(record_lit.syntax()).resolve_variant(self.db, record_lit)
+ self.analyze(record_lit.syntax())?.resolve_variant(self.db, record_lit)
}
fn resolve_bind_pat_to_const(&self, pat: &ast::IdentPat) -> Option<ModuleDef> {
- self.analyze(pat.syntax()).resolve_bind_pat_to_const(self.db, pat)
+ self.analyze(pat.syntax())?.resolve_bind_pat_to_const(self.db, pat)
}
fn record_literal_missing_fields(&self, literal: &ast::RecordExpr) -> Vec<(Field, Type)> {
self.analyze(literal.syntax())
- .record_literal_missing_fields(self.db, literal)
+ .and_then(|it| it.record_literal_missing_fields(self.db, literal))
.unwrap_or_default()
}
fn record_pattern_missing_fields(&self, pattern: &ast::RecordPat) -> Vec<(Field, Type)> {
self.analyze(pattern.syntax())
- .record_pattern_missing_fields(self.db, pattern)
+ .and_then(|it| it.record_pattern_missing_fields(self.db, pattern))
.unwrap_or_default()
}
- fn with_ctx<F: FnOnce(&mut SourceToDefCtx) -> T, T>(&self, f: F) -> T {
+ fn with_ctx<F: FnOnce(&mut SourceToDefCtx<'_, '_>) -> T, T>(&self, f: F) -> T {
let mut cache = self.s2d_cache.borrow_mut();
let mut ctx = SourceToDefCtx { db: self.db, cache: &mut *cache };
f(&mut ctx)
self.with_ctx(|ctx| ctx.file_to_def(file)).into_iter().map(Module::from)
}
- fn scope(&self, node: &SyntaxNode) -> SemanticsScope<'db> {
- let SourceAnalyzer { file_id, resolver, .. } = self.analyze_no_infer(node);
- SemanticsScope { db: self.db, file_id, resolver }
+ fn scope(&self, node: &SyntaxNode) -> Option<SemanticsScope<'db>> {
+ self.analyze_no_infer(node).map(|SourceAnalyzer { file_id, resolver, .. }| SemanticsScope {
+ db: self.db,
+ file_id,
+ resolver,
+ })
}
- fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> SemanticsScope<'db> {
- let SourceAnalyzer { file_id, resolver, .. } =
- self.analyze_with_offset_no_infer(node, offset);
- SemanticsScope { db: self.db, file_id, resolver }
+ fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> Option<SemanticsScope<'db>> {
+ self.analyze_with_offset_no_infer(node, offset).map(
+ |SourceAnalyzer { file_id, resolver, .. }| SemanticsScope {
+ db: self.db,
+ file_id,
+ resolver,
+ },
+ )
}
fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> {
Some(res)
}
- fn analyze(&self, node: &SyntaxNode) -> SourceAnalyzer {
+ /// Returns none if the file of the node is not part of a crate.
+ fn analyze(&self, node: &SyntaxNode) -> Option<SourceAnalyzer> {
self.analyze_impl(node, None, true)
}
- fn analyze_no_infer(&self, node: &SyntaxNode) -> SourceAnalyzer {
+ /// Returns none if the file of the node is not part of a crate.
+ fn analyze_no_infer(&self, node: &SyntaxNode) -> Option<SourceAnalyzer> {
self.analyze_impl(node, None, false)
}
- fn analyze_with_offset_no_infer(&self, node: &SyntaxNode, offset: TextSize) -> SourceAnalyzer {
+ fn analyze_with_offset_no_infer(
+ &self,
+ node: &SyntaxNode,
+ offset: TextSize,
+ ) -> Option<SourceAnalyzer> {
self.analyze_impl(node, Some(offset), false)
}
node: &SyntaxNode,
offset: Option<TextSize>,
infer_body: bool,
- ) -> SourceAnalyzer {
+ ) -> Option<SourceAnalyzer> {
let _p = profile::span("Semantics::analyze_impl");
let node = self.find_file(node);
let container = match self.with_ctx(|ctx| ctx.find_container(node)) {
Some(it) => it,
- None => return SourceAnalyzer::new_for_resolver(Resolver::default(), node),
+ None => return None,
};
let resolver = match container {
ChildContainer::DefWithBodyId(def) => {
- return if infer_body {
+ return Some(if infer_body {
SourceAnalyzer::new_for_body(self.db, def, node, offset)
} else {
SourceAnalyzer::new_for_body_no_infer(self.db, def, node, offset)
- }
+ })
}
ChildContainer::TraitId(it) => it.resolver(self.db.upcast()),
ChildContainer::ImplId(it) => it.resolver(self.db.upcast()),
ChildContainer::TypeAliasId(it) => it.resolver(self.db.upcast()),
ChildContainer::GenericDefId(it) => it.resolver(self.db.upcast()),
};
- SourceAnalyzer::new_for_resolver(resolver, node)
+ Some(SourceAnalyzer::new_for_resolver(resolver, node))
}
fn cache(&self, root_node: SyntaxNode, file_id: HirFileId) {
InFile::new(file_id, node)
}
+ /// Wraps the node in a [`InFile`] with the file id it belongs to.
fn find_file<'node>(&self, node: &'node SyntaxNode) -> InFile<&'node SyntaxNode> {
let root_node = find_root(node);
let file_id = self.lookup(&root_node).unwrap_or_else(|| {
}
fn macro_call_to_macro_id(
- ctx: &mut SourceToDefCtx,
+ ctx: &mut SourceToDefCtx<'_, '_>,
db: &dyn AstDatabase,
macro_call_id: MacroCallId,
) -> Option<MacroId> {
pub trait ToDef: AstNode + Clone {
type Def;
- fn to_def(sema: &SemanticsImpl, src: InFile<Self>) -> Option<Self::Def>;
+ fn to_def(sema: &SemanticsImpl<'_>, src: InFile<Self>) -> Option<Self::Def>;
}
macro_rules! to_def_impls {
($(($def:path, $ast:path, $meth:ident)),* ,) => {$(
impl ToDef for $ast {
type Def = $def;
- fn to_def(sema: &SemanticsImpl, src: InFile<Self>) -> Option<Self::Def> {
+ fn to_def(sema: &SemanticsImpl<'_>, src: InFile<Self>) -> Option<Self::Def> {
sema.with_ctx(|ctx| ctx.$meth(src)).map(<$def>::from)
}
}
}
impl<'a> SemanticsScope<'a> {
- pub fn module(&self) -> Option<Module> {
- Some(Module { id: self.resolver.module()? })
+ pub fn module(&self) -> Module {
+ Module { id: self.resolver.module() }
}
- pub fn krate(&self) -> Option<Crate> {
- Some(Crate { id: self.resolver.krate()? })
+ pub fn krate(&self) -> Crate {
+ Crate { id: self.resolver.krate() }
}
pub(crate) fn resolver(&self) -> &Resolver {
&self.resolver
}
- /// Note: `FxHashSet<TraitId>` should be treated as an opaque type, passed into `Type
- pub fn visible_traits(&self) -> FxHashSet<TraitId> {
+ /// Note: `VisibleTraits` should be treated as an opaque type, passed into `Type
+ pub fn visible_traits(&self) -> VisibleTraits {
let resolver = &self.resolver;
- resolver.traits_in_scope(self.db.upcast())
+ VisibleTraits(resolver.traits_in_scope(self.db.upcast()))
}
pub fn process_all_names(&self, f: &mut dyn FnMut(Name, ScopeDef)) {
resolver::ScopeDef::ImplSelfType(it) => ScopeDef::ImplSelfType(it.into()),
resolver::ScopeDef::AdtSelfType(it) => ScopeDef::AdtSelfType(it.into()),
resolver::ScopeDef::GenericParam(id) => ScopeDef::GenericParam(id.into()),
- resolver::ScopeDef::Local(pat_id) => {
- let parent = self.resolver.body_owner().unwrap();
- ScopeDef::Local(Local { parent, pat_id })
- }
- resolver::ScopeDef::Label(label_id) => {
- let parent = self.resolver.body_owner().unwrap();
- ScopeDef::Label(Label { parent, label_id })
- }
+ resolver::ScopeDef::Local(pat_id) => match self.resolver.body_owner() {
+ Some(parent) => ScopeDef::Local(Local { parent, pat_id }),
+ None => continue,
+ },
+ resolver::ScopeDef::Label(label_id) => match self.resolver.body_owner() {
+ Some(parent) => ScopeDef::Label(Label { parent, label_id }),
+ None => continue,
+ },
};
f(name.clone(), def)
}
)
}
}
+
+pub struct VisibleTraits(pub FxHashSet<TraitId>);
+
+impl ops::Deref for VisibleTraits {
+ type Target = FxHashSet<TraitId>;
+
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}