use hir::{Documentation, ModPath, Mutability};
use ide_db::helpers::{
- insert_use::{self, ImportScope, ImportScopePtr, MergeBehaviour},
+ insert_use::{self, ImportScope, MergeBehaviour},
mod_path_to_ast,
};
-use syntax::{algo, SyntaxNode, TextRange};
+use syntax::{algo, TextRange};
use text_edit::TextEdit;
use crate::config::SnippetCap;
pub merge_behaviour: Option<MergeBehaviour>,
}
-#[derive(Debug, Clone)]
-pub struct ImportEditPtr {
- pub import_path: ModPath,
- pub import_scope: ImportScopePtr,
- pub merge_behaviour: Option<MergeBehaviour>,
-}
-
-impl ImportEditPtr {
- pub fn into_import_edit(self, root: &SyntaxNode) -> Option<ImportEdit> {
- Some(ImportEdit {
- import_path: self.import_path,
- import_scope: self.import_scope.into_scope(root)?,
- merge_behaviour: self.merge_behaviour,
- })
- }
-}
-
impl ImportEdit {
- pub fn get_edit_ptr(&self) -> ImportEditPtr {
- ImportEditPtr {
- import_path: self.import_path.clone(),
- import_scope: self.import_scope.get_ptr(),
- merge_behaviour: self.merge_behaviour,
- }
- }
-
+ // TODO kb remove this at all now, since it's used only once?
/// Attempts to insert the import to the given scope, producing a text edit.
/// May return no edit in edge cases, such as scope already containing the import.
pub fn to_text_edit(&self) -> Option<TextEdit> {
pub use crate::{
config::{CompletionConfig, CompletionResolveCapability},
- item::{
- CompletionItem, CompletionItemKind, CompletionScore, ImportEdit, ImportEditPtr,
- InsertTextFormat,
- },
+ item::{CompletionItem, CompletionItemKind, CompletionScore, ImportEdit, InsertTextFormat},
};
//FIXME: split the following feature into fine-grained features.
};
pub use completion::{
CompletionConfig, CompletionItem, CompletionItemKind, CompletionResolveCapability,
- CompletionScore, ImportEdit, ImportEditPtr, InsertTextFormat,
+ CompletionScore, ImportEdit, InsertTextFormat,
};
pub use ide_db::{
call_info::CallInfo,
edit::{AstNodeEdit, IndentLevel},
make, AstNode, PathSegmentKind, VisibilityOwner,
},
- AstToken, InsertPosition, NodeOrToken, SyntaxElement, SyntaxNode, SyntaxNodePtr, SyntaxToken,
+ AstToken, InsertPosition, NodeOrToken, SyntaxElement, SyntaxNode, SyntaxToken,
};
use test_utils::mark;
Module(ast::ItemList),
}
-impl ImportScope {
- pub fn get_ptr(&self) -> ImportScopePtr {
- match self {
- ImportScope::File(file) => ImportScopePtr::File(SyntaxNodePtr::new(file.syntax())),
- ImportScope::Module(module) => {
- ImportScopePtr::Module(SyntaxNodePtr::new(module.syntax()))
- }
- }
- }
-}
-
-#[derive(Debug, Clone)]
-pub enum ImportScopePtr {
- File(SyntaxNodePtr),
- Module(SyntaxNodePtr),
-}
-
-impl ImportScopePtr {
- pub fn into_scope(self, root: &SyntaxNode) -> Option<ImportScope> {
- Some(match self {
- ImportScopePtr::File(file_ptr) => {
- ImportScope::File(ast::SourceFile::cast(file_ptr.to_node(root))?)
- }
- ImportScopePtr::Module(module_ptr) => {
- ImportScope::File(ast::SourceFile::cast(module_ptr.to_node(root))?)
- }
- })
- }
-}
-
impl ImportScope {
pub fn from(syntax: SyntaxNode) -> Option<Self> {
if let Some(module) = ast::Module::cast(syntax.clone()) {
use crossbeam_channel::{unbounded, Receiver, Sender};
use flycheck::FlycheckHandle;
-use ide::{Analysis, AnalysisHost, Change, FileId, ImportEditPtr};
+use ide::{Analysis, AnalysisHost, Change, FileId};
use ide_db::base_db::{CrateId, VfsPath};
use lsp_types::{SemanticTokens, Url};
use parking_lot::{Mutex, RwLock};
pub(crate) config: Config,
pub(crate) analysis_host: AnalysisHost,
pub(crate) diagnostics: DiagnosticCollection,
- pub(crate) completion_resolve_data: Arc<FxHashMap<usize, ImportEditPtr>>,
pub(crate) mem_docs: FxHashMap<VfsPath, DocumentData>,
pub(crate) semantic_tokens_cache: Arc<Mutex<FxHashMap<Url, SemanticTokens>>>,
pub(crate) vfs: Arc<RwLock<(vfs::Vfs, FxHashMap<FileId, LineEndings>)>>,
pub(crate) semantic_tokens_cache: Arc<Mutex<FxHashMap<Url, SemanticTokens>>>,
vfs: Arc<RwLock<(vfs::Vfs, FxHashMap<FileId, LineEndings>)>>,
pub(crate) workspaces: Arc<Vec<ProjectWorkspace>>,
- pub(crate) completion_resolve_data: Arc<FxHashMap<usize, ImportEditPtr>>,
}
impl GlobalState {
config,
analysis_host,
diagnostics: Default::default(),
- completion_resolve_data: Arc::new(FxHashMap::default()),
mem_docs: FxHashMap::default(),
semantic_tokens_cache: Arc::new(Default::default()),
vfs: Arc::new(RwLock::new((vfs::Vfs::default(), FxHashMap::default()))),
check_fixes: Arc::clone(&self.diagnostics.check_fixes),
mem_docs: self.mem_docs.clone(),
semantic_tokens_cache: Arc::clone(&self.semantic_tokens_cache),
- completion_resolve_data: Arc::clone(&self.completion_resolve_data),
}
}
use std::{
io::Write as _,
process::{self, Stdio},
- sync::Arc,
};
use ide::{
SymbolTag, TextDocumentIdentifier, Url, WorkspaceEdit,
};
use project_model::TargetKind;
-use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use serde_json::to_value;
use stdx::{format_to, split_once};
}
pub(crate) fn handle_completion(
- global_state: &mut GlobalState,
+ snap: GlobalStateSnapshot,
params: lsp_types::CompletionParams,
) -> Result<Option<lsp_types::CompletionResponse>> {
let _p = profile::span("handle_completion");
- let snap = global_state.snapshot();
let text_document_url = params.text_document_position.text_document.uri.clone();
let position = from_proto::file_position(&snap, params.text_document_position)?;
let completion_triggered_after_single_colon = {
};
let line_index = snap.analysis.file_line_index(position.file_id)?;
let line_endings = snap.file_line_endings(position.file_id);
- let mut completion_resolve_data = FxHashMap::default();
let items: Vec<CompletionItem> = items
.into_iter()
to_proto::completion_item(&line_index, line_endings, item.clone());
if snap.config.completion.resolve_additional_edits_lazily() {
+ // TODO kb add resolve data somehow here
if let Some(import_edit) = item.import_to_add() {
- completion_resolve_data.insert(item_index, import_edit.get_edit_ptr());
-
- let data = serde_json::to_value(&CompletionData {
- document_url: text_document_url.clone(),
- import_id: item_index,
- })
- .expect(&format!("Should be able to serialize usize value {}", item_index));
+ // let data = serde_json::to_value(&CompletionData {
+ // document_url: text_document_url.clone(),
+ // import_id: item_index,
+ // })
+ // .expect(&format!("Should be able to serialize usize value {}", item_index));
for new_item in &mut new_completion_items {
- new_item.data = Some(data.clone());
+ // new_item.data = Some(data.clone());
}
}
}
})
.collect();
- global_state.completion_resolve_data = Arc::new(completion_resolve_data);
-
let completion_list = lsp_types::CompletionList { is_incomplete: true, items };
Ok(Some(completion_list.into()))
}
return Ok(original_completion);
}
- let (import_edit_ptr, document_url) = match original_completion
+ let resolve_data = match original_completion
.data
- .as_ref()
- .map(|data| serde_json::from_value::<CompletionData>(data.clone()))
+ .take()
+ .map(|data| serde_json::from_value::<CompletionResolveData>(data))
.transpose()?
- .and_then(|data| {
- let import_edit_ptr = snap.completion_resolve_data.get(&data.import_id).cloned();
- Some((import_edit_ptr, data.document_url))
- }) {
+ {
Some(data) => data,
None => return Ok(original_completion),
};
- let file_id = from_proto::file_id(&snap, &document_url)?;
- let root = snap.analysis.parse(file_id)?;
-
- if let Some(import_to_add) =
- import_edit_ptr.and_then(|import_edit| import_edit.into_import_edit(root.syntax()))
- {
- // FIXME actually add all additional edits here? see `to_proto::completion_item` for more
- append_import_edits(
- &mut original_completion,
- &import_to_add,
- snap.analysis.file_line_index(file_id)?.as_ref(),
- snap.file_line_endings(file_id),
- );
- }
+ // TODO kb get the resolve data and somehow reparse the whole ast again?
+ // let file_id = from_proto::file_id(&snap, &document_url)?;
+ // let root = snap.analysis.parse(file_id)?;
+
+ // if let Some(import_to_add) =
+ // import_edit_ptr.and_then(|import_edit| import_edit.into_import_edit(root.syntax()))
+ // {
+ // // FIXME actually add all additional edits here? see `to_proto::completion_item` for more
+ // append_import_edits(
+ // &mut original_completion,
+ // &import_to_add,
+ // snap.analysis.file_line_index(file_id)?.as_ref(),
+ // snap.file_line_endings(file_id),
+ // );
+ // }
Ok(original_completion)
}
}
#[derive(Debug, Serialize, Deserialize)]
-struct CompletionData {
+struct CompletionResolveData {
document_url: Url,
import_id: usize,
}
handlers::handle_matching_brace(s.snapshot(), p)
})?
.on_sync::<lsp_ext::MemoryUsage>(|s, p| handlers::handle_memory_usage(s, p))?
- .on_sync::<lsp_types::request::Completion>(handlers::handle_completion)?
- .on::<lsp_types::request::ResolveCompletionItem>(handlers::handle_completion_resolve)
.on::<lsp_ext::AnalyzerStatus>(handlers::handle_analyzer_status)
.on::<lsp_ext::SyntaxTree>(handlers::handle_syntax_tree)
.on::<lsp_ext::ExpandMacro>(handlers::handle_expand_macro)
.on::<lsp_types::request::GotoDefinition>(handlers::handle_goto_definition)
.on::<lsp_types::request::GotoImplementation>(handlers::handle_goto_implementation)
.on::<lsp_types::request::GotoTypeDefinition>(handlers::handle_goto_type_definition)
+ .on::<lsp_types::request::Completion>(handlers::handle_completion)
+ .on::<lsp_types::request::ResolveCompletionItem>(handlers::handle_completion_resolve)
.on::<lsp_types::request::CodeLensRequest>(handlers::handle_code_lens)
.on::<lsp_types::request::CodeLensResolve>(handlers::handle_code_lens_resolve)
.on::<lsp_types::request::FoldingRangeRequest>(handlers::handle_folding_range)