1 //! This module provides `StaticIndex` which is used for powering
2 //! read-only code browsers and emitting LSIF
4 use std::collections::HashMap;
6 use hir::{db::HirDatabase, Crate, Module, Semantics};
8 base_db::{FileId, FileRange, SourceDatabaseExt},
9 defs::{Definition, IdentClass},
10 FxHashSet, RootDatabase,
12 use syntax::{AstNode, SyntaxKind::*, SyntaxToken, TextRange, T};
15 hover::hover_for_definition,
16 moniker::{def_to_moniker, MonikerResult},
17 parent_module::crates_for,
18 Analysis, Fold, HoverConfig, HoverDocFormat, HoverResult, InlayHint, InlayHintsConfig,
22 /// A static representation of fully analyzed source code.
24 /// The intended use-case is powering read-only code browsers and emitting LSIF
26 pub struct StaticIndex<'a> {
27 pub files: Vec<StaticIndexedFile>,
28 pub tokens: TokenStore,
29 analysis: &'a Analysis,
31 def_map: HashMap<Definition, TokenId>,
35 pub struct ReferenceData {
37 pub is_definition: bool,
41 pub struct TokenStaticData {
42 pub hover: Option<HoverResult>,
43 pub definition: Option<FileRange>,
44 pub references: Vec<ReferenceData>,
45 pub moniker: Option<MonikerResult>,
48 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
49 pub struct TokenId(usize);
52 pub fn raw(self) -> usize {
57 #[derive(Default, Debug)]
58 pub struct TokenStore(Vec<TokenStaticData>);
61 pub fn insert(&mut self, data: TokenStaticData) -> TokenId {
62 let id = TokenId(self.0.len());
67 pub fn get_mut(&mut self, id: TokenId) -> Option<&mut TokenStaticData> {
71 pub fn get(&self, id: TokenId) -> Option<&TokenStaticData> {
75 pub fn iter(self) -> impl Iterator<Item = (TokenId, TokenStaticData)> {
76 self.0.into_iter().enumerate().map(|(i, x)| (TokenId(i), x))
81 pub struct StaticIndexedFile {
84 pub inlay_hints: Vec<InlayHint>,
85 pub tokens: Vec<(TextRange, TokenId)>,
88 fn all_modules(db: &dyn HirDatabase) -> Vec<Module> {
89 let mut worklist: Vec<_> =
90 Crate::all(db).into_iter().map(|krate| krate.root_module(db)).collect();
91 let mut modules = Vec::new();
93 while let Some(module) = worklist.pop() {
95 worklist.extend(module.children(db));
101 impl StaticIndex<'_> {
102 fn add_file(&mut self, file_id: FileId) {
103 let current_crate = crates_for(self.db, file_id).pop().map(Into::into);
104 let folds = self.analysis.folding_ranges(file_id).unwrap();
105 let inlay_hints = self
111 parameter_hints: true,
112 chaining_hints: true,
113 closure_return_type_hints: crate::ClosureReturnTypeHints::WithBlock,
114 lifetime_elision_hints: crate::LifetimeElisionHints::Never,
115 reborrow_hints: crate::ReborrowHints::Never,
116 hide_named_constructor_hints: false,
117 hide_closure_initialization_hints: false,
118 param_names_for_lifetime_elision_hints: false,
119 binding_mode_hints: false,
120 max_length: Some(25),
121 closing_brace_hints_min_lines: Some(25),
128 let sema = hir::Semantics::new(self.db);
129 let tokens_or_nodes = sema.parse(file_id).syntax().clone();
130 let tokens = tokens_or_nodes.descendants_with_tokens().filter_map(|x| match x {
131 syntax::NodeOrToken::Node(_) => None,
132 syntax::NodeOrToken::Token(x) => Some(x),
134 let hover_config = HoverConfig {
135 links_in_hover: true,
136 documentation: Some(HoverDocFormat::Markdown),
139 let tokens = tokens.filter(|token| {
142 IDENT | INT_NUMBER | LIFETIME_IDENT | T![self] | T![super] | T![crate] | T![Self]
145 let mut result = StaticIndexedFile { file_id, inlay_hints, folds, tokens: vec![] };
146 for token in tokens {
147 let range = token.text_range();
148 let node = token.parent().unwrap();
149 let def = match get_definition(&sema, token.clone()) {
153 let id = if let Some(x) = self.def_map.get(&def) {
156 let x = self.tokens.insert(TokenStaticData {
157 hover: hover_for_definition(&sema, file_id, def, &node, &hover_config),
160 .map(|x| FileRange { file_id: x.file_id, range: x.focus_or_full_range() }),
162 moniker: current_crate.and_then(|cc| def_to_moniker(self.db, def, cc)),
164 self.def_map.insert(def, x);
167 let token = self.tokens.get_mut(id).unwrap();
168 token.references.push(ReferenceData {
169 range: FileRange { range, file_id },
170 is_definition: match def.try_to_nav(self.db) {
171 Some(x) => x.file_id == file_id && x.focus_or_full_range() == range,
175 result.tokens.push((range, id));
177 self.files.push(result);
180 pub fn compute(analysis: &Analysis) -> StaticIndex<'_> {
181 let db = &*analysis.db;
182 let work = all_modules(db).into_iter().filter(|module| {
183 let file_id = module.definition_source(db).file_id.original_file(db);
184 let source_root = db.file_source_root(file_id);
185 let source_root = db.source_root(source_root);
186 !source_root.is_library
188 let mut this = StaticIndex {
190 tokens: Default::default(),
193 def_map: Default::default(),
195 let mut visited_files = FxHashSet::default();
197 let file_id = module.definition_source(db).file_id.original_file(db);
198 if visited_files.contains(&file_id) {
201 this.add_file(file_id);
203 visited_files.insert(file_id);
209 fn get_definition(sema: &Semantics<'_, RootDatabase>, token: SyntaxToken) -> Option<Definition> {
210 for token in sema.descend_into_macros(token) {
211 let def = IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops);
212 if let Some(&[x]) = def.as_deref() {
221 use crate::{fixture, StaticIndex};
222 use ide_db::base_db::FileRange;
223 use std::collections::HashSet;
224 use syntax::TextSize;
226 fn check_all_ranges(ra_fixture: &str) {
227 let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture);
228 let s = StaticIndex::compute(&analysis);
229 let mut range_set: HashSet<_> = ranges.iter().map(|x| x.0).collect();
231 for (range, _) in f.tokens {
232 let x = FileRange { file_id: f.file_id, range };
233 if !range_set.contains(&x) {
234 panic!("additional range {:?}", x);
236 range_set.remove(&x);
239 if !range_set.is_empty() {
240 panic!("unfound ranges {:?}", range_set);
244 fn check_definitions(ra_fixture: &str) {
245 let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture);
246 let s = StaticIndex::compute(&analysis);
247 let mut range_set: HashSet<_> = ranges.iter().map(|x| x.0).collect();
248 for (_, t) in s.tokens.iter() {
249 if let Some(x) = t.definition {
250 if x.range.start() == TextSize::from(0) {
251 // ignore definitions that are whole of file
254 if !range_set.contains(&x) {
255 panic!("additional definition {:?}", x);
257 range_set.remove(&x);
260 if !range_set.is_empty() {
261 panic!("unfound definitions {:?}", range_set);
266 fn struct_and_enum() {
289 //- /main.rs crate:main deps:foo
298 //- /foo/lib.rs crate:foo
312 #[rustc_builtin_macro]
313 //^^^^^^^^^^^^^^^^^^^