]> git.lizzy.rs Git - rust.git/commitdiff
Remove the state
authorKirill Bulatov <mail4score@gmail.com>
Fri, 4 Dec 2020 08:02:22 +0000 (10:02 +0200)
committerKirill Bulatov <mail4score@gmail.com>
Mon, 7 Dec 2020 21:41:08 +0000 (23:41 +0200)
crates/completion/src/item.rs
crates/completion/src/lib.rs
crates/ide/src/lib.rs
crates/ide_db/src/helpers/insert_use.rs
crates/rust-analyzer/src/global_state.rs
crates/rust-analyzer/src/handlers.rs
crates/rust-analyzer/src/main_loop.rs

index dd25ca75c2fbe07c07c8c1886e4f93f2993e8a1d..2dadf7e5b927779d19266de9f663aea44f1f92ec 100644 (file)
@@ -4,10 +4,10 @@
 
 use hir::{Documentation, ModPath, Mutability};
 use ide_db::helpers::{
-    insert_use::{self, ImportScope, ImportScopePtr, MergeBehaviour},
+    insert_use::{self, ImportScope, MergeBehaviour},
     mod_path_to_ast,
 };
-use syntax::{algo, SyntaxNode, TextRange};
+use syntax::{algo, TextRange};
 use text_edit::TextEdit;
 
 use crate::config::SnippetCap;
@@ -275,32 +275,8 @@ pub struct ImportEdit {
     pub merge_behaviour: Option<MergeBehaviour>,
 }
 
-#[derive(Debug, Clone)]
-pub struct ImportEditPtr {
-    pub import_path: ModPath,
-    pub import_scope: ImportScopePtr,
-    pub merge_behaviour: Option<MergeBehaviour>,
-}
-
-impl ImportEditPtr {
-    pub fn into_import_edit(self, root: &SyntaxNode) -> Option<ImportEdit> {
-        Some(ImportEdit {
-            import_path: self.import_path,
-            import_scope: self.import_scope.into_scope(root)?,
-            merge_behaviour: self.merge_behaviour,
-        })
-    }
-}
-
 impl ImportEdit {
-    pub fn get_edit_ptr(&self) -> ImportEditPtr {
-        ImportEditPtr {
-            import_path: self.import_path.clone(),
-            import_scope: self.import_scope.get_ptr(),
-            merge_behaviour: self.merge_behaviour,
-        }
-    }
-
+    // TODO kb remove this at all now, since it's used only once?
     /// Attempts to insert the import to the given scope, producing a text edit.
     /// May return no edit in edge cases, such as scope already containing the import.
     pub fn to_text_edit(&self) -> Option<TextEdit> {
index c277cd466d90f0b5b016eba34d0d1ce5602c83fc..c57203c808bbb75fcd5708047967e93368461e1c 100644 (file)
 
 pub use crate::{
     config::{CompletionConfig, CompletionResolveCapability},
-    item::{
-        CompletionItem, CompletionItemKind, CompletionScore, ImportEdit, ImportEditPtr,
-        InsertTextFormat,
-    },
+    item::{CompletionItem, CompletionItemKind, CompletionScore, ImportEdit, InsertTextFormat},
 };
 
 //FIXME: split the following feature into fine-grained features.
index c52e53d2750c8cd44b3b811154076e8062eae52d..9e38d650634e44f8e47f95aa5f0417c95dd59b68 100644 (file)
@@ -81,7 +81,7 @@ macro_rules! eprintln {
 };
 pub use completion::{
     CompletionConfig, CompletionItem, CompletionItemKind, CompletionResolveCapability,
-    CompletionScore, ImportEdit, ImportEditPtr, InsertTextFormat,
+    CompletionScore, ImportEdit, InsertTextFormat,
 };
 pub use ide_db::{
     call_info::CallInfo,
index 0dae9a541ba200b23a6b698c9a7ba8878359b1e2..040843990d0d93a21c43d4283ccce5541166f6ca 100644 (file)
@@ -11,7 +11,7 @@
         edit::{AstNodeEdit, IndentLevel},
         make, AstNode, PathSegmentKind, VisibilityOwner,
     },
-    AstToken, InsertPosition, NodeOrToken, SyntaxElement, SyntaxNode, SyntaxNodePtr, SyntaxToken,
+    AstToken, InsertPosition, NodeOrToken, SyntaxElement, SyntaxNode, SyntaxToken,
 };
 use test_utils::mark;
 
@@ -21,36 +21,6 @@ pub enum ImportScope {
     Module(ast::ItemList),
 }
 
-impl ImportScope {
-    pub fn get_ptr(&self) -> ImportScopePtr {
-        match self {
-            ImportScope::File(file) => ImportScopePtr::File(SyntaxNodePtr::new(file.syntax())),
-            ImportScope::Module(module) => {
-                ImportScopePtr::Module(SyntaxNodePtr::new(module.syntax()))
-            }
-        }
-    }
-}
-
-#[derive(Debug, Clone)]
-pub enum ImportScopePtr {
-    File(SyntaxNodePtr),
-    Module(SyntaxNodePtr),
-}
-
-impl ImportScopePtr {
-    pub fn into_scope(self, root: &SyntaxNode) -> Option<ImportScope> {
-        Some(match self {
-            ImportScopePtr::File(file_ptr) => {
-                ImportScope::File(ast::SourceFile::cast(file_ptr.to_node(root))?)
-            }
-            ImportScopePtr::Module(module_ptr) => {
-                ImportScope::File(ast::SourceFile::cast(module_ptr.to_node(root))?)
-            }
-        })
-    }
-}
-
 impl ImportScope {
     pub fn from(syntax: SyntaxNode) -> Option<Self> {
         if let Some(module) = ast::Module::cast(syntax.clone()) {
index 0fe69b99667ae2adb690e7d449b8de955205ae84..a27495d0d80a3ba7fc5a6b185e4e615e5f379543 100644 (file)
@@ -7,7 +7,7 @@
 
 use crossbeam_channel::{unbounded, Receiver, Sender};
 use flycheck::FlycheckHandle;
-use ide::{Analysis, AnalysisHost, Change, FileId, ImportEditPtr};
+use ide::{Analysis, AnalysisHost, Change, FileId};
 use ide_db::base_db::{CrateId, VfsPath};
 use lsp_types::{SemanticTokens, Url};
 use parking_lot::{Mutex, RwLock};
@@ -69,7 +69,6 @@ pub(crate) struct GlobalState {
     pub(crate) config: Config,
     pub(crate) analysis_host: AnalysisHost,
     pub(crate) diagnostics: DiagnosticCollection,
-    pub(crate) completion_resolve_data: Arc<FxHashMap<usize, ImportEditPtr>>,
     pub(crate) mem_docs: FxHashMap<VfsPath, DocumentData>,
     pub(crate) semantic_tokens_cache: Arc<Mutex<FxHashMap<Url, SemanticTokens>>>,
     pub(crate) vfs: Arc<RwLock<(vfs::Vfs, FxHashMap<FileId, LineEndings>)>>,
@@ -91,7 +90,6 @@ pub(crate) struct GlobalStateSnapshot {
     pub(crate) semantic_tokens_cache: Arc<Mutex<FxHashMap<Url, SemanticTokens>>>,
     vfs: Arc<RwLock<(vfs::Vfs, FxHashMap<FileId, LineEndings>)>>,
     pub(crate) workspaces: Arc<Vec<ProjectWorkspace>>,
-    pub(crate) completion_resolve_data: Arc<FxHashMap<usize, ImportEditPtr>>,
 }
 
 impl GlobalState {
@@ -123,7 +121,6 @@ pub(crate) fn new(sender: Sender<lsp_server::Message>, config: Config) -> Global
             config,
             analysis_host,
             diagnostics: Default::default(),
-            completion_resolve_data: Arc::new(FxHashMap::default()),
             mem_docs: FxHashMap::default(),
             semantic_tokens_cache: Arc::new(Default::default()),
             vfs: Arc::new(RwLock::new((vfs::Vfs::default(), FxHashMap::default()))),
@@ -194,7 +191,6 @@ pub(crate) fn snapshot(&self) -> GlobalStateSnapshot {
             check_fixes: Arc::clone(&self.diagnostics.check_fixes),
             mem_docs: self.mem_docs.clone(),
             semantic_tokens_cache: Arc::clone(&self.semantic_tokens_cache),
-            completion_resolve_data: Arc::clone(&self.completion_resolve_data),
         }
     }
 
index 1ea1e1f438a38c775e7d03206eba9769cffcbb9d..dacd4ec508143673778b37902c767a710dcb3d0d 100644 (file)
@@ -5,7 +5,6 @@
 use std::{
     io::Write as _,
     process::{self, Stdio},
-    sync::Arc,
 };
 
 use ide::{
@@ -26,7 +25,6 @@
     SymbolTag, TextDocumentIdentifier, Url, WorkspaceEdit,
 };
 use project_model::TargetKind;
-use rustc_hash::FxHashMap;
 use serde::{Deserialize, Serialize};
 use serde_json::to_value;
 use stdx::{format_to, split_once};
@@ -539,11 +537,10 @@ pub(crate) fn handle_runnables(
 }
 
 pub(crate) fn handle_completion(
-    global_state: &mut GlobalState,
+    snap: GlobalStateSnapshot,
     params: lsp_types::CompletionParams,
 ) -> Result<Option<lsp_types::CompletionResponse>> {
     let _p = profile::span("handle_completion");
-    let snap = global_state.snapshot();
     let text_document_url = params.text_document_position.text_document.uri.clone();
     let position = from_proto::file_position(&snap, params.text_document_position)?;
     let completion_triggered_after_single_colon = {
@@ -574,7 +571,6 @@ pub(crate) fn handle_completion(
     };
     let line_index = snap.analysis.file_line_index(position.file_id)?;
     let line_endings = snap.file_line_endings(position.file_id);
-    let mut completion_resolve_data = FxHashMap::default();
 
     let items: Vec<CompletionItem> = items
         .into_iter()
@@ -584,16 +580,15 @@ pub(crate) fn handle_completion(
                 to_proto::completion_item(&line_index, line_endings, item.clone());
 
             if snap.config.completion.resolve_additional_edits_lazily() {
+                // TODO kb add resolve data somehow here
                 if let Some(import_edit) = item.import_to_add() {
-                    completion_resolve_data.insert(item_index, import_edit.get_edit_ptr());
-
-                    let data = serde_json::to_value(&CompletionData {
-                        document_url: text_document_url.clone(),
-                        import_id: item_index,
-                    })
-                    .expect(&format!("Should be able to serialize usize value {}", item_index));
+                    //     let data = serde_json::to_value(&CompletionData {
+                    //         document_url: text_document_url.clone(),
+                    //         import_id: item_index,
+                    //     })
+                    //     .expect(&format!("Should be able to serialize usize value {}", item_index));
                     for new_item in &mut new_completion_items {
-                        new_item.data = Some(data.clone());
+                        // new_item.data = Some(data.clone());
                     }
                 }
             }
@@ -602,8 +597,6 @@ pub(crate) fn handle_completion(
         })
         .collect();
 
-    global_state.completion_resolve_data = Arc::new(completion_resolve_data);
-
     let completion_list = lsp_types::CompletionList { is_incomplete: true, items };
     Ok(Some(completion_list.into()))
 }
@@ -624,33 +617,31 @@ pub(crate) fn handle_completion_resolve(
         return Ok(original_completion);
     }
 
-    let (import_edit_ptr, document_url) = match original_completion
+    let resolve_data = match original_completion
         .data
-        .as_ref()
-        .map(|data| serde_json::from_value::<CompletionData>(data.clone()))
+        .take()
+        .map(|data| serde_json::from_value::<CompletionResolveData>(data))
         .transpose()?
-        .and_then(|data| {
-            let import_edit_ptr = snap.completion_resolve_data.get(&data.import_id).cloned();
-            Some((import_edit_ptr, data.document_url))
-        }) {
+    {
         Some(data) => data,
         None => return Ok(original_completion),
     };
 
-    let file_id = from_proto::file_id(&snap, &document_url)?;
-    let root = snap.analysis.parse(file_id)?;
-
-    if let Some(import_to_add) =
-        import_edit_ptr.and_then(|import_edit| import_edit.into_import_edit(root.syntax()))
-    {
-        // FIXME actually add all additional edits here? see `to_proto::completion_item` for more
-        append_import_edits(
-            &mut original_completion,
-            &import_to_add,
-            snap.analysis.file_line_index(file_id)?.as_ref(),
-            snap.file_line_endings(file_id),
-        );
-    }
+    // TODO kb get the resolve data and somehow reparse the whole ast again?
+    // let file_id = from_proto::file_id(&snap, &document_url)?;
+    // let root = snap.analysis.parse(file_id)?;
+
+    // if let Some(import_to_add) =
+    //     import_edit_ptr.and_then(|import_edit| import_edit.into_import_edit(root.syntax()))
+    // {
+    //     // FIXME actually add all additional edits here? see `to_proto::completion_item` for more
+    //     append_import_edits(
+    //         &mut original_completion,
+    //         &import_to_add,
+    //         snap.analysis.file_line_index(file_id)?.as_ref(),
+    //         snap.file_line_endings(file_id),
+    //     );
+    // }
 
     Ok(original_completion)
 }
@@ -1614,7 +1605,7 @@ fn should_skip_target(runnable: &Runnable, cargo_spec: Option<&CargoTargetSpec>)
 }
 
 #[derive(Debug, Serialize, Deserialize)]
-struct CompletionData {
+struct CompletionResolveData {
     document_url: Url,
     import_id: usize,
 }
index aad37fde15d3db19b07ba18dddbcc3f21ef0f2f4..95be2ebd397e81aace24ca7da9ef316e527f74f8 100644 (file)
@@ -436,8 +436,6 @@ fn on_request(&mut self, request_received: Instant, req: Request) -> Result<()>
                 handlers::handle_matching_brace(s.snapshot(), p)
             })?
             .on_sync::<lsp_ext::MemoryUsage>(|s, p| handlers::handle_memory_usage(s, p))?
-            .on_sync::<lsp_types::request::Completion>(handlers::handle_completion)?
-            .on::<lsp_types::request::ResolveCompletionItem>(handlers::handle_completion_resolve)
             .on::<lsp_ext::AnalyzerStatus>(handlers::handle_analyzer_status)
             .on::<lsp_ext::SyntaxTree>(handlers::handle_syntax_tree)
             .on::<lsp_ext::ExpandMacro>(handlers::handle_expand_macro)
@@ -455,6 +453,8 @@ fn on_request(&mut self, request_received: Instant, req: Request) -> Result<()>
             .on::<lsp_types::request::GotoDefinition>(handlers::handle_goto_definition)
             .on::<lsp_types::request::GotoImplementation>(handlers::handle_goto_implementation)
             .on::<lsp_types::request::GotoTypeDefinition>(handlers::handle_goto_type_definition)
+            .on::<lsp_types::request::Completion>(handlers::handle_completion)
+            .on::<lsp_types::request::ResolveCompletionItem>(handlers::handle_completion_resolve)
             .on::<lsp_types::request::CodeLensRequest>(handlers::handle_code_lens)
             .on::<lsp_types::request::CodeLensResolve>(handlers::handle_code_lens_resolve)
             .on::<lsp_types::request::FoldingRangeRequest>(handlers::handle_folding_range)