resolver::HasResolver,
type_ref::{Mutability, TypeRef},
AdtId, ConstId, DefWithBodyId, EnumId, FunctionId, HasModule, ImplId, LocalEnumVariantId,
- LocalModuleId, LocalStructFieldId, Lookup, ModuleId, StaticId, StructId, TraitId, TypeAliasId,
- TypeParamId, UnionId,
+ LocalImportId, LocalModuleId, LocalStructFieldId, Lookup, ModuleId, StaticId, StructId,
+ TraitId, TypeAliasId, TypeParamId, UnionId,
};
use hir_expand::{
diagnostics::DiagnosticSink,
}
/// Returns a `ModuleScope`: a set of items, visible in this module.
- pub fn scope(self, db: &impl HirDatabase) -> Vec<(Name, ScopeDef)> {
+ pub fn scope(self, db: &impl HirDatabase) -> Vec<(Name, ScopeDef, Option<Import>)> {
db.crate_def_map(self.id.krate)[self.id.local_id]
.scope
.entries()
- .map(|(name, res)| (name.clone(), res.def.into()))
+ .map(|(name, res)| {
+ (name.clone(), res.def.into(), res.import.map(|id| Import { parent: self, id }))
+ })
.collect()
}
}
}
-// pub struct Import {
-// pub(crate) parent: Module,
-// pub(crate) id: LocalImportId,
-// }
+pub struct Import {
+ pub(crate) parent: Module,
+ pub(crate) id: LocalImportId,
+}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct StructField {
BodyQuery, BodyWithSourceMapQuery, ConstDataQuery, CrateDefMapQuery, CrateLangItemsQuery,
DefDatabase, DefDatabaseStorage, DocumentationQuery, EnumDataQuery, ExprScopesQuery,
FunctionDataQuery, GenericParamsQuery, ImplDataQuery, InternDatabase, InternDatabaseStorage,
- LangItemQuery, ModuleLangItemsQuery, RawItemsQuery, StaticDataQuery, StructDataQuery,
- TraitDataQuery, TypeAliasDataQuery,
+ LangItemQuery, ModuleLangItemsQuery, RawItemsQuery, RawItemsWithSourceMapQuery,
+ StaticDataQuery, StructDataQuery, TraitDataQuery, TypeAliasDataQuery,
};
pub use hir_expand::db::{
AstDatabase, AstDatabaseStorage, AstIdMapQuery, MacroArgQuery, MacroDefQuery, MacroExpandQuery,
use ra_syntax::ast;
use crate::{
- db::DefDatabase, Const, Enum, EnumVariant, FieldSource, Function, ImplBlock, MacroDef, Module,
- Static, Struct, StructField, Trait, TypeAlias, TypeParam, Union,
+ db::DefDatabase, Const, Enum, EnumVariant, FieldSource, Function, ImplBlock, Import, MacroDef,
+ Module, Static, Struct, StructField, Trait, TypeAlias, TypeParam, Union,
};
pub use hir_expand::InFile;
self.id.lookup(db).source(db)
}
}
+impl HasSource for Import {
+ type Ast = Either<ast::UseTree, ast::ExternCrateItem>;
+
+ /// Returns the syntax of the last path segment corresponding to this import
+ fn source(self, db: &impl DefDatabase) -> InFile<Self::Ast> {
+ let src = self.parent.definition_source(db);
+ let (_, source_map) = db.raw_items_with_source_map(src.file_id);
+ let root = db.parse_or_expand(src.file_id).unwrap();
+ let ptr = source_map.get(self.id);
+ src.with_value(ptr.map_left(|it| it.to_node(&root)).map_right(|it| it.to_node(&root)))
+ }
+}
impl HasSource for TypeParam {
type Ast = Either<ast::TraitDef, ast::TypeParam>;
pub use crate::{
code_model::{
Adt, AssocItem, AttrDef, Const, Crate, CrateDependency, DefWithBody, Docs, Enum,
- EnumVariant, FieldSource, Function, GenericDef, HasAttrs, ImplBlock, Local, MacroDef,
- Module, ModuleDef, ScopeDef, Static, Struct, StructField, Trait, Type, TypeAlias,
+ EnumVariant, FieldSource, Function, GenericDef, HasAttrs, ImplBlock, Import, Local,
+ MacroDef, Module, ModuleDef, ScopeDef, Static, Struct, StructField, Trait, Type, TypeAlias,
TypeParam, Union, VariantDef,
},
from_source::FromSource,
docs::Documentation,
generics::GenericParams,
lang_item::{LangItemTarget, LangItems},
- nameres::{raw::RawItems, CrateDefMap},
+ nameres::{
+ raw::{ImportSourceMap, RawItems},
+ CrateDefMap,
+ },
AttrDefId, ConstId, ConstLoc, DefWithBodyId, EnumId, EnumLoc, FunctionId, FunctionLoc,
GenericDefId, ImplId, ImplLoc, ModuleId, StaticId, StaticLoc, StructId, StructLoc, TraitId,
TraitLoc, TypeAliasId, TypeAliasLoc, UnionId, UnionLoc,
#[salsa::query_group(DefDatabaseStorage)]
pub trait DefDatabase: InternDatabase + AstDatabase {
+ #[salsa::invoke(RawItems::raw_items_with_source_map_query)]
+ fn raw_items_with_source_map(
+ &self,
+ file_id: HirFileId,
+ ) -> (Arc<RawItems>, Arc<ImportSourceMap>);
+
#[salsa::invoke(RawItems::raw_items_query)]
fn raw_items(&self, file_id: HirFileId) -> Arc<RawItems>;
use once_cell::sync::Lazy;
use rustc_hash::FxHashMap;
-use crate::{per_ns::PerNs, BuiltinType, ImplId, MacroDefId, ModuleDefId, TraitId};
+use crate::{per_ns::PerNs, BuiltinType, ImplId, LocalImportId, MacroDefId, ModuleDefId, TraitId};
#[derive(Debug, Default, PartialEq, Eq)]
pub struct ItemScope {
BuiltinType::ALL
.iter()
.map(|(name, ty)| {
- (name.clone(), Resolution { def: PerNs::types(ty.clone().into()), declaration: false })
+ (name.clone(), Resolution { def: PerNs::types(ty.clone().into()), import: None })
})
.collect()
});
}
pub fn declarations(&self) -> impl Iterator<Item = ModuleDefId> + '_ {
- self.entries().filter(|(_name, res)| res.declaration).flat_map(|(_name, res)| {
- res.def.take_types().into_iter().chain(res.def.take_values().into_iter())
- })
+ self.entries()
+ .filter_map(|(_name, res)| if res.import.is_none() { Some(res.def) } else { None })
+ .flat_map(|per_ns| {
+ per_ns.take_types().into_iter().chain(per_ns.take_values().into_iter())
+ })
}
pub fn impls(&self) -> impl Iterator<Item = ImplId> + ExactSizeIterator + '_ {
self.legacy_macros.insert(name, mac);
}
- pub(crate) fn push_res(&mut self, name: Name, res: &Resolution, declaration: bool) -> bool {
+ pub(crate) fn push_res(
+ &mut self,
+ name: Name,
+ res: &Resolution,
+ import: Option<LocalImportId>,
+ ) -> bool {
let mut changed = false;
let existing = self.items.entry(name.clone()).or_default();
if existing.def.types.is_none() && res.def.types.is_some() {
existing.def.types = res.def.types;
- existing.declaration |= declaration;
+ existing.import = import.or(res.import);
changed = true;
}
if existing.def.values.is_none() && res.def.values.is_some() {
existing.def.values = res.def.values;
- existing.declaration |= declaration;
+ existing.import = import.or(res.import);
changed = true;
}
if existing.def.macros.is_none() && res.def.macros.is_some() {
existing.def.macros = res.def.macros;
- existing.declaration |= declaration;
+ existing.import = import.or(res.import);
changed = true;
}
+ if existing.def.is_none()
+ && res.def.is_none()
+ && existing.import.is_none()
+ && res.import.is_some()
+ {
+ existing.import = res.import;
+ }
changed
}
pub struct Resolution {
/// None for unresolved
pub def: PerNs,
- pub declaration: bool,
+ /// ident by which this is imported into local scope.
+ pub import: Option<LocalImportId>,
}
use crate::body::Expander;
use crate::builtin_type::BuiltinType;
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct LocalImportId(RawId);
+impl_arena_id!(LocalImportId);
+
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct ModuleId {
pub krate: CrateId,
path::{ModPath, PathKind},
per_ns::PerNs,
AdtId, AstId, ConstLoc, ContainerId, EnumLoc, EnumVariantId, FunctionLoc, ImplLoc, Intern,
- LocalModuleId, ModuleDefId, ModuleId, StaticLoc, StructLoc, TraitLoc, TypeAliasLoc, UnionLoc,
+ LocalImportId, LocalModuleId, ModuleDefId, ModuleId, StaticLoc, StructLoc, TraitLoc,
+ TypeAliasLoc, UnionLoc,
};
pub(super) fn collect_defs(db: &impl DefDatabase, mut def_map: CrateDefMap) -> CrateDefMap {
#[derive(Clone, Debug, Eq, PartialEq)]
struct ImportDirective {
module_id: LocalModuleId,
- import_id: raw::LocalImportId,
+ import_id: LocalImportId,
import: raw::ImportData,
status: PartialResolvedImport,
}
struct DefCollector<'a, DB> {
db: &'a DB,
def_map: CrateDefMap,
- glob_imports: FxHashMap<LocalModuleId, Vec<(LocalModuleId, raw::LocalImportId)>>,
+ glob_imports: FxHashMap<LocalModuleId, Vec<(LocalModuleId, LocalImportId)>>,
unresolved_imports: Vec<ImportDirective>,
resolved_imports: Vec<ImportDirective>,
unexpanded_macros: Vec<MacroDirective>,
if export {
self.update(
self.def_map.root,
- &[(name, Resolution { def: PerNs::macros(macro_), declaration: false })],
+ None,
+ &[(name, Resolution { def: PerNs::macros(macro_), import: None })],
);
}
}
// Module scoped macros is included
let items = scope.collect_resolutions();
- self.update(module_id, &items);
+ self.update(module_id, Some(import_id), &items);
} else {
// glob import from same crate => we do an initial
// import, and then need to propagate any further
// Module scoped macros is included
let items = scope.collect_resolutions();
- self.update(module_id, &items);
+ self.update(module_id, Some(import_id), &items);
// record the glob import in case we add further items
let glob = self.glob_imports.entry(m.local_id).or_default();
if !glob.iter().any(|it| *it == (module_id, import_id)) {
let variant = EnumVariantId { parent: e, local_id };
let res = Resolution {
def: PerNs::both(variant.into(), variant.into()),
- declaration: false,
+ import: Some(import_id),
};
(name, res)
})
.collect::<Vec<_>>();
- self.update(module_id, &resolutions);
+ self.update(module_id, Some(import_id), &resolutions);
}
Some(d) => {
log::debug!("glob import {:?} from non-module/enum {:?}", import, d);
}
}
- let resolution = Resolution { def, declaration: false };
- self.update(module_id, &[(name, resolution)]);
+ let resolution = Resolution { def, import: Some(import_id) };
+ self.update(module_id, Some(import_id), &[(name, resolution)]);
}
None => tested_by!(bogus_paths),
}
}
}
- fn update(&mut self, module_id: LocalModuleId, resolutions: &[(Name, Resolution)]) {
- self.update_recursive(module_id, resolutions, 0)
+ fn update(
+ &mut self,
+ module_id: LocalModuleId,
+ import: Option<LocalImportId>,
+ resolutions: &[(Name, Resolution)],
+ ) {
+ self.update_recursive(module_id, import, resolutions, 0)
}
fn update_recursive(
&mut self,
module_id: LocalModuleId,
+ import: Option<LocalImportId>,
resolutions: &[(Name, Resolution)],
depth: usize,
) {
let scope = &mut self.def_map.modules[module_id].scope;
let mut changed = false;
for (name, res) in resolutions {
- changed |= scope.push_res(name.clone(), res, depth == 0 && res.declaration);
+ changed |= scope.push_res(name.clone(), res, import);
}
if !changed {
.flat_map(|v| v.iter())
.cloned()
.collect::<Vec<_>>();
- for (glob_importing_module, _glob_import) in glob_imports {
+ for (glob_importing_module, glob_import) in glob_imports {
// We pass the glob import so that the tracked import in those modules is that glob import
- self.update_recursive(glob_importing_module, resolutions, depth + 1);
+ self.update_recursive(glob_importing_module, Some(glob_import), resolutions, depth + 1);
}
}
def: PerNs::types(
ModuleId { krate: self.def_collector.def_map.krate, local_id: res }.into(),
),
- declaration: true,
+ import: None,
};
- self.def_collector.update(self.module_id, &[(name, resolution)]);
+ self.def_collector.update(self.module_id, None, &[(name, resolution)]);
res
}
PerNs::types(def.into())
}
};
- let resolution = Resolution { def, declaration: true };
- self.def_collector.update(self.module_id, &[(name, resolution)])
+ let resolution = Resolution { def, import: None };
+ self.def_collector.update(self.module_id, None, &[(name, resolution)])
}
fn collect_derives(&mut self, attrs: &Attrs, def: &raw::DefData) {
use std::{ops::Index, sync::Arc};
+use either::Either;
use hir_expand::{
ast_id_map::AstIdMap,
db::AstDatabase,
hygiene::Hygiene,
name::{AsName, Name},
};
-use ra_arena::{impl_arena_id, Arena, RawId};
+use ra_arena::{impl_arena_id, map::ArenaMap, Arena, RawId};
use ra_syntax::{
ast::{self, AttrsOwner, NameOwner},
- AstNode,
+ AstNode, AstPtr,
};
use test_utils::tested_by;
-use crate::{attr::Attrs, db::DefDatabase, path::ModPath, FileAstId, HirFileId, InFile};
-
-#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
-pub(super) struct LocalImportId(RawId);
-impl_arena_id!(LocalImportId);
+use crate::{
+ attr::Attrs, db::DefDatabase, path::ModPath, trace::Trace, FileAstId, HirFileId, InFile,
+ LocalImportId,
+};
/// `RawItems` is a set of top-level items in a file (except for impls).
///
items: Vec<RawItem>,
}
+#[derive(Debug, Default, PartialEq, Eq)]
+pub struct ImportSourceMap {
+ map: ArenaMap<LocalImportId, ImportSourcePtr>,
+}
+
+type ImportSourcePtr = Either<AstPtr<ast::UseTree>, AstPtr<ast::ExternCrateItem>>;
+
+impl ImportSourceMap {
+ pub fn get(&self, import: LocalImportId) -> ImportSourcePtr {
+ self.map[import].clone()
+ }
+}
+
impl RawItems {
pub(crate) fn raw_items_query(
db: &(impl DefDatabase + AstDatabase),
file_id: HirFileId,
) -> Arc<RawItems> {
+ db.raw_items_with_source_map(file_id).0
+ }
+
+ pub(crate) fn raw_items_with_source_map_query(
+ db: &(impl DefDatabase + AstDatabase),
+ file_id: HirFileId,
+ ) -> (Arc<RawItems>, Arc<ImportSourceMap>) {
let mut collector = RawItemsCollector {
raw_items: RawItems::default(),
source_ast_id_map: db.ast_id_map(file_id),
+ imports: Trace::new(),
file_id,
hygiene: Hygiene::new(db, file_id),
};
collector.process_module(None, item_list);
}
}
- let raw_items = collector.raw_items;
- Arc::new(raw_items)
+ let mut raw_items = collector.raw_items;
+ let (arena, map) = collector.imports.into_arena_and_map();
+ raw_items.imports = arena;
+ let source_map = ImportSourceMap { map };
+ (Arc::new(raw_items), Arc::new(source_map))
}
pub(super) fn items(&self) -> &[RawItem] {
struct RawItemsCollector {
raw_items: RawItems,
+ imports: Trace<LocalImportId, ImportData, ImportSourcePtr>,
source_ast_id_map: Arc<AstIdMap>,
file_id: HirFileId,
hygiene: Hygiene,
ModPath::expand_use_item(
InFile { value: use_item, file_id: self.file_id },
&self.hygiene,
- |path, _use_tree, is_glob, alias| {
+ |path, use_tree, is_glob, alias| {
let import_data = ImportData {
path,
alias,
is_extern_crate: false,
is_macro_use: false,
};
- buf.push(import_data);
+ buf.push((import_data, Either::Left(AstPtr::new(use_tree))));
},
);
- for import_data in buf {
- self.push_import(current_module, attrs.clone(), import_data);
+ for (import_data, ptr) in buf {
+ self.push_import(current_module, attrs.clone(), import_data, ptr);
}
}
is_extern_crate: true,
is_macro_use,
};
- self.push_import(current_module, attrs, import_data);
+ self.push_import(
+ current_module,
+ attrs,
+ import_data,
+ Either::Right(AstPtr::new(&extern_crate)),
+ );
}
}
self.push_item(current_module, attrs, RawItemKind::Impl(imp))
}
- fn push_import(&mut self, current_module: Option<Module>, attrs: Attrs, data: ImportData) {
- let import = self.raw_items.imports.alloc(data);
+ fn push_import(
+ &mut self,
+ current_module: Option<Module>,
+ attrs: Attrs,
+ data: ImportData,
+ source: ImportSourcePtr,
+ ) {
+ let import = self.imports.alloc(|| source, || data);
self.push_item(current_module, attrs, RawItemKind::Import(import))
}
}
impl<ID: ra_arena::ArenaId + Copy, T, V> Trace<ID, T, V> {
+ pub(crate) fn new() -> Trace<ID, T, V> {
+ Trace { arena: Some(Arena::default()), map: Some(ArenaMap::default()), len: 0 }
+ }
+
pub(crate) fn new_for_arena() -> Trace<ID, T, V> {
Trace { arena: Some(Arena::default()), map: None, len: 0 }
}
pub(crate) fn into_map(mut self) -> ArenaMap<ID, V> {
self.map.take().unwrap()
}
+
+ pub(crate) fn into_arena_and_map(mut self) -> (Arena<ID, T>, ArenaMap<ID, V>) {
+ (self.arena.take().unwrap(), self.map.take().unwrap())
+ }
}
self.query(hir::db::AstIdMapQuery).sweep(sweep);
+ self.query(hir::db::RawItemsWithSourceMapQuery).sweep(sweep);
self.query(hir::db::BodyWithSourceMapQuery).sweep(sweep);
self.query(hir::db::ExprScopesQuery).sweep(sweep);
hir::db::StructDataQuery
hir::db::EnumDataQuery
hir::db::TraitDataQuery
+ hir::db::RawItemsWithSourceMapQuery
hir::db::RawItemsQuery
hir::db::CrateDefMapQuery
hir::db::GenericParamsQuery
//! FIXME: write short doc here
-use hir::{Adt, PathResolution, ScopeDef};
+use either::Either;
+use hir::{Adt, HasSource, PathResolution};
use ra_syntax::AstNode;
use test_utils::tested_by;
match def {
hir::ModuleDef::Module(module) => {
let module_scope = module.scope(ctx.db);
- for (name, def) in module_scope {
- if ctx.use_item_syntax.is_some() {
- if let hir::ScopeDef::ModuleDef(hir::ModuleDef::BuiltinType(..)) = def {
+ for (name, def, import) in module_scope {
+ if let hir::ScopeDef::ModuleDef(hir::ModuleDef::BuiltinType(..)) = def {
+ if ctx.use_item_syntax.is_some() {
tested_by!(dont_complete_primitive_in_use);
continue;
}
- if let ScopeDef::Unknown = def {
- if let Some(name_ref) = ctx.name_ref.as_ref() {
- if &name_ref.syntax().text() == name.to_string().as_str() {
+ }
+ if Some(module) == ctx.module {
+ if let Some(import) = import {
+ if let Either::Left(use_tree) = import.source(ctx.db).value {
+ if use_tree.syntax().text_range().contains_inclusive(ctx.offset) {
// for `use self::foo<|>`, don't suggest `foo` as a completion
tested_by!(dont_complete_current_use);
continue;
pub(super) analyzer: hir::SourceAnalyzer,
pub(super) offset: TextUnit,
pub(super) token: SyntaxToken,
- pub(super) name_ref: Option<ast::NameRef>,
pub(super) module: Option<hir::Module>,
pub(super) function_syntax: Option<ast::FnDef>,
pub(super) use_item_syntax: Option<ast::UseItem>,
analyzer,
token,
offset: position.offset,
- name_ref: None,
module,
function_syntax: None,
use_item_syntax: None,
}
fn classify_name_ref(&mut self, original_file: SourceFile, name_ref: ast::NameRef) {
- self.name_ref =
- find_node_at_offset(original_file.syntax(), name_ref.syntax().text_range().start());
let name_range = name_ref.syntax().text_range();
if name_ref.syntax().parent().and_then(ast::RecordField::cast).is_some() {
self.record_lit_syntax = find_node_at_offset(original_file.syntax(), self.offset);