]> git.lizzy.rs Git - rust.git/blobdiff - crates/rust-analyzer/src/reload.rs
Replaced fold with for loop
[rust.git] / crates / rust-analyzer / src / reload.rs
index 8144554bcab477797d3e4d6ec47c05fa35e92e10..eecc83e02ac499d4b0e40156c4d1db06e3f73002 100644 (file)
@@ -4,8 +4,13 @@
 use flycheck::{FlycheckConfig, FlycheckHandle};
 use hir::db::DefDatabase;
 use ide::Change;
-use ide_db::base_db::{CrateGraph, SourceRoot, VfsPath};
-use project_model::{ProcMacroClient, ProjectWorkspace, WorkspaceBuildScripts};
+use ide_db::base_db::{
+    CrateGraph, Env, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind,
+    SourceRoot, VfsPath,
+};
+use proc_macro_api::{MacroDylib, ProcMacroServer};
+use project_model::{ProjectWorkspace, WorkspaceBuildScripts};
+use syntax::SmolStr;
 use vfs::{file_set::FileSetConfig, AbsPath, AbsPathBuf, ChangeKind};
 
 use crate::{
@@ -44,7 +49,7 @@ pub(crate) fn update_configuration(&mut self, config: Config) {
             self.analysis_host.update_lru_capacity(self.config.lru_capacity());
         }
         if self.config.linked_projects() != old_config.linked_projects() {
-            self.fetch_workspaces_request()
+            self.fetch_workspaces_queue.request_op()
         } else if self.config.flycheck() != old_config.flycheck() {
             self.reload_flycheck();
         }
@@ -54,65 +59,19 @@ pub(crate) fn update_configuration(&mut self, config: Config) {
             .raw_database_mut()
             .set_enable_proc_attr_macros(self.config.expand_proc_attr_macros());
     }
-    pub(crate) fn maybe_refresh(&mut self, changes: &[(AbsPathBuf, ChangeKind)]) {
-        if !changes.iter().any(|(path, kind)| is_interesting(path, *kind)) {
-            return;
-        }
-        log::info!(
-            "Requesting workspace reload because of the following changes: {}",
-            itertools::join(
-                changes
-                    .iter()
-                    .filter(|(path, kind)| is_interesting(path, *kind))
-                    .map(|(path, kind)| format!("{}: {:?}", path.display(), kind)),
-                ", "
-            )
-        );
-        self.fetch_workspaces_request();
-
-        fn is_interesting(path: &AbsPath, change_kind: ChangeKind) -> bool {
-            const IMPLICIT_TARGET_FILES: &[&str] = &["build.rs", "src/main.rs", "src/lib.rs"];
-            const IMPLICIT_TARGET_DIRS: &[&str] = &["src/bin", "examples", "tests", "benches"];
-            let file_name = path.file_name().unwrap_or_default();
-
-            if file_name == "Cargo.toml" || file_name == "Cargo.lock" {
-                return true;
-            }
-            if change_kind == ChangeKind::Modify {
-                return false;
-            }
-            if path.extension().unwrap_or_default() != "rs" {
-                return false;
-            }
-            if IMPLICIT_TARGET_FILES.iter().any(|it| path.as_ref().ends_with(it)) {
-                return true;
-            }
-            let parent = match path.parent() {
-                Some(it) => it,
-                None => return false,
-            };
-            if IMPLICIT_TARGET_DIRS.iter().any(|it| parent.as_ref().ends_with(it)) {
-                return true;
-            }
-            if file_name == "main.rs" {
-                let grand_parent = match parent.parent() {
-                    Some(it) => it,
-                    None => return false,
-                };
-                if IMPLICIT_TARGET_DIRS.iter().any(|it| grand_parent.as_ref().ends_with(it)) {
-                    return true;
-                }
-            }
-            false
-        }
-    }
-    pub(crate) fn report_new_status_if_needed(&mut self) {
+
+    pub(crate) fn current_status(&self) -> lsp_ext::ServerStatusParams {
         let mut status = lsp_ext::ServerStatusParams {
             health: lsp_ext::Health::Ok,
             quiescent: self.is_quiescent(),
             message: None,
         };
 
+        if self.proc_macro_changed {
+            status.health = lsp_ext::Health::Warning;
+            status.message =
+                Some("Reload required due to source changes of a procedural macro.".into())
+        }
         if let Some(error) = self.fetch_build_data_error() {
             status.health = lsp_ext::Health::Warning;
             status.message = Some(error)
@@ -129,28 +88,11 @@ pub(crate) fn report_new_status_if_needed(&mut self) {
             status.health = lsp_ext::Health::Error;
             status.message = Some(error)
         }
-
-        if self.last_reported_status.as_ref() != Some(&status) {
-            self.last_reported_status = Some(status.clone());
-
-            if let (lsp_ext::Health::Error, Some(message)) = (status.health, &status.message) {
-                self.show_message(lsp_types::MessageType::Error, message.clone());
-            }
-
-            if self.config.server_status_notification() {
-                self.send_notification::<lsp_ext::ServerStatusNotification>(status);
-            }
-        }
+        status
     }
 
-    pub(crate) fn fetch_workspaces_request(&mut self) {
-        self.fetch_workspaces_queue.request_op()
-    }
-    pub(crate) fn fetch_workspaces_if_needed(&mut self) {
-        if !self.fetch_workspaces_queue.should_start_op() {
-            return;
-        }
-        log::info!("will fetch workspaces");
+    pub(crate) fn fetch_workspaces(&mut self) {
+        tracing::info!("will fetch workspaces");
 
         self.task_pool.handle.spawn_with_sender({
             let linked_projects = self.config.linked_projects();
@@ -193,28 +135,15 @@ pub(crate) fn fetch_workspaces_if_needed(&mut self) {
                         .push(project_model::ProjectWorkspace::load_detached_files(detached_files));
                 }
 
-                log::info!("did fetch workspaces {:?}", workspaces);
+                tracing::info!("did fetch workspaces {:?}", workspaces);
                 sender
                     .send(Task::FetchWorkspace(ProjectWorkspaceProgress::End(workspaces)))
                     .unwrap();
             }
         });
     }
-    pub(crate) fn fetch_workspaces_completed(
-        &mut self,
-        workspaces: Vec<anyhow::Result<ProjectWorkspace>>,
-    ) {
-        self.fetch_workspaces_queue.op_completed(workspaces)
-    }
-
-    pub(crate) fn fetch_build_data_request(&mut self) {
-        self.fetch_build_data_queue.request_op();
-    }
-    pub(crate) fn fetch_build_data_if_needed(&mut self) {
-        if !self.fetch_build_data_queue.should_start_op() {
-            return;
-        }
 
+    pub(crate) fn fetch_build_data(&mut self) {
         let workspaces = Arc::clone(&self.workspaces);
         let config = self.config.cargo();
         self.task_pool.handle.spawn_with_sender(move |sender| {
@@ -229,31 +158,17 @@ pub(crate) fn fetch_build_data_if_needed(&mut self) {
             let mut res = Vec::new();
             for ws in workspaces.iter() {
                 res.push(ws.run_build_scripts(&config, &progress));
-                let ws = match ws {
-                    ProjectWorkspace::Cargo { cargo, .. } => cargo,
-                    ProjectWorkspace::DetachedFiles { .. } | ProjectWorkspace::Json { .. } => {
-                        res.push(Ok(WorkspaceBuildScripts::default()));
-                        continue;
-                    }
-                };
-                res.push(WorkspaceBuildScripts::run(&config, ws, &progress))
             }
             sender.send(Task::FetchBuildData(BuildDataProgress::End((workspaces, res)))).unwrap();
         });
     }
-    pub(crate) fn fetch_build_data_completed(
-        &mut self,
-        build_data: (Arc<Vec<ProjectWorkspace>>, Vec<anyhow::Result<WorkspaceBuildScripts>>),
-    ) {
-        self.fetch_build_data_queue.op_completed(build_data)
-    }
 
     pub(crate) fn switch_workspaces(&mut self) {
         let _p = profile::span("GlobalState::switch_workspaces");
-        log::info!("will switch workspaces");
+        tracing::info!("will switch workspaces");
 
         if let Some(error_message) = self.fetch_workspace_error() {
-            log::error!("failed to switch workspaces: {}", error_message);
+            tracing::error!("failed to switch workspaces: {}", error_message);
             if !self.workspaces.is_empty() {
                 // It only makes sense to switch to a partially broken workspace
                 // if we don't have any workspace at all yet.
@@ -262,7 +177,7 @@ pub(crate) fn switch_workspaces(&mut self) {
         }
 
         if let Some(error_message) = self.fetch_build_data_error() {
-            log::error!("failed to switch build data: {}", error_message);
+            tracing::error!("failed to switch build data: {}", error_message);
         }
 
         let workspaces = self
@@ -302,7 +217,7 @@ fn eq_ignore_build_data<'a>(
 
         if same_workspaces {
             let (workspaces, build_scripts) = self.fetch_build_data_queue.last_op_result();
-            if Arc::ptr_eq(&workspaces, &self.workspaces) {
+            if Arc::ptr_eq(workspaces, &self.workspaces) {
                 let workspaces = workspaces
                     .iter()
                     .cloned()
@@ -327,38 +242,33 @@ fn eq_ignore_build_data<'a>(
         }
 
         if let FilesWatcher::Client = self.config.files().watcher {
-            if self.config.did_change_watched_files_dynamic_registration() {
-                let registration_options = lsp_types::DidChangeWatchedFilesRegistrationOptions {
-                    watchers: self
-                        .workspaces
-                        .iter()
-                        .flat_map(|ws| ws.to_roots())
-                        .filter(|it| it.is_member)
-                        .flat_map(|root| {
-                            root.include.into_iter().flat_map(|it| {
-                                [
-                                    format!("{}/**/*.rs", it.display()),
-                                    format!("{}/**/Cargo.toml", it.display()),
-                                    format!("{}/**/Cargo.lock", it.display()),
-                                ]
-                            })
-                        })
-                        .map(|glob_pattern| lsp_types::FileSystemWatcher {
-                            glob_pattern,
-                            kind: None,
+            let registration_options = lsp_types::DidChangeWatchedFilesRegistrationOptions {
+                watchers: self
+                    .workspaces
+                    .iter()
+                    .flat_map(|ws| ws.to_roots())
+                    .filter(|it| it.is_local)
+                    .flat_map(|root| {
+                        root.include.into_iter().flat_map(|it| {
+                            [
+                                format!("{}/**/*.rs", it.display()),
+                                format!("{}/**/Cargo.toml", it.display()),
+                                format!("{}/**/Cargo.lock", it.display()),
+                            ]
                         })
-                        .collect(),
-                };
-                let registration = lsp_types::Registration {
-                    id: "workspace/didChangeWatchedFiles".to_string(),
-                    method: "workspace/didChangeWatchedFiles".to_string(),
-                    register_options: Some(serde_json::to_value(registration_options).unwrap()),
-                };
-                self.send_request::<lsp_types::request::RegisterCapability>(
-                    lsp_types::RegistrationParams { registrations: vec![registration] },
-                    |_, _| (),
-                );
-            }
+                    })
+                    .map(|glob_pattern| lsp_types::FileSystemWatcher { glob_pattern, kind: None })
+                    .collect(),
+            };
+            let registration = lsp_types::Registration {
+                id: "workspace/didChangeWatchedFiles".to_string(),
+                method: "workspace/didChangeWatchedFiles".to_string(),
+                register_options: Some(serde_json::to_value(registration_options).unwrap()),
+            };
+            self.send_request::<lsp_types::request::RegisterCapability>(
+                lsp_types::RegistrationParams { registrations: vec![registration] },
+                |_, _| (),
+            );
         }
 
         let mut change = Change::new();
@@ -369,10 +279,10 @@ fn eq_ignore_build_data<'a>(
         if self.proc_macro_client.is_none() {
             self.proc_macro_client = match self.config.proc_macro_srv() {
                 None => None,
-                Some((path, args)) => match ProcMacroClient::extern_process(path.clone(), args) {
+                Some((path, args)) => match ProcMacroServer::spawn(path.clone(), args) {
                     Ok(it) => Some(it),
                     Err(err) => {
-                        log::error!(
+                        tracing::error!(
                             "Failed to run proc_macro_srv from path {}, error: {:?}",
                             path.display(),
                             err
@@ -396,27 +306,36 @@ fn eq_ignore_build_data<'a>(
 
         // Create crate graph from all the workspaces
         let crate_graph = {
-            let mut crate_graph = CrateGraph::default();
+            let proc_macro_client = self.proc_macro_client.as_ref();
+            let mut load_proc_macro = move |path: &AbsPath, dummy_replace: &_| {
+                load_proc_macro(proc_macro_client, path, dummy_replace)
+            };
+
             let vfs = &mut self.vfs.write().0;
             let loader = &mut self.loader;
             let mem_docs = &self.mem_docs;
-            let mut load = |path: &AbsPath| {
+            let mut load = move |path: &AbsPath| {
                 let _p = profile::span("GlobalState::load");
                 let vfs_path = vfs::VfsPath::from(path.to_path_buf());
-                if !mem_docs.contains_key(&vfs_path) {
+                if !mem_docs.contains(&vfs_path) {
                     let contents = loader.handle.load_sync(path);
                     vfs.set_file_contents(vfs_path.clone(), contents);
                 }
                 let res = vfs.file_id(&vfs_path);
                 if res.is_none() {
-                    log::warn!("failed to load {}", path.display())
+                    tracing::warn!("failed to load {}", path.display())
                 }
                 res
             };
+
+            let mut crate_graph = CrateGraph::default();
             for ws in self.workspaces.iter() {
-                crate_graph.extend(ws.to_crate_graph(self.proc_macro_client.as_ref(), &mut load));
+                crate_graph.extend(ws.to_crate_graph(
+                    self.config.dummy_replacements(),
+                    &mut load_proc_macro,
+                    &mut load,
+                ));
             }
-
             crate_graph
         };
         change.set_crate_graph(crate_graph);
@@ -426,7 +345,7 @@ fn eq_ignore_build_data<'a>(
         self.analysis_host.apply_change(change);
         self.process_changes();
         self.reload_flycheck();
-        log::info!("did switch workspaces");
+        tracing::info!("did switch workspaces");
     }
 
     fn fetch_workspace_error(&self) -> Option<String> {
@@ -446,19 +365,29 @@ fn fetch_workspace_error(&self) -> Option<String> {
     }
 
     fn fetch_build_data_error(&self) -> Option<String> {
-        let mut buf = String::new();
+        let mut buf = "rust-analyzer failed to run build scripts:\n".to_string();
+        let mut has_errors = false;
 
         for ws in &self.fetch_build_data_queue.last_op_result().1 {
-            if let Err(err) = ws {
-                stdx::format_to!(buf, "rust-analyzer failed to run custom build: {:#}\n", err);
+            match ws {
+                Ok(data) => {
+                    if let Some(err) = data.error() {
+                        has_errors = true;
+                        stdx::format_to!(buf, "{:#}\n", err);
+                    }
+                }
+                Err(err) => {
+                    has_errors = true;
+                    stdx::format_to!(buf, "{:#}\n", err);
+                }
             }
         }
 
-        if buf.is_empty() {
-            return None;
+        if has_errors {
+            Some(buf)
+        } else {
+            None
         }
-
-        Some(buf)
     }
 
     fn reload_flycheck(&mut self) {
@@ -494,7 +423,7 @@ fn reload_flycheck(&mut self) {
                     id,
                     Box::new(move |msg| sender.send(msg).unwrap()),
                     config.clone(),
-                    root.to_path_buf().into(),
+                    root.to_path_buf(),
                 )
             })
             .collect();
@@ -539,12 +468,12 @@ pub(crate) fn new(
                 vfs::loader::Entry::Directories(dirs)
             };
 
-            if root.is_member {
+            if root.is_local {
                 res.watch.push(res.load.len());
             }
             res.load.push(entry);
 
-            if root.is_member {
+            if root.is_local {
                 local_filesets.push(fsc.len());
             }
             fsc.add_file_set(file_set_roots)
@@ -581,3 +510,129 @@ pub(crate) fn partition(&self, vfs: &vfs::Vfs) -> Vec<SourceRoot> {
             .collect()
     }
 }
+
+/// Load the proc-macros for the given lib path, replacing all expanders whose names are in `dummy_replace`
+/// with an identity dummy expander.
+pub(crate) fn load_proc_macro(
+    client: Option<&ProcMacroServer>,
+    path: &AbsPath,
+    dummy_replace: &[Box<str>],
+) -> Vec<ProcMacro> {
+    let dylib = match MacroDylib::new(path.to_path_buf()) {
+        Ok(it) => it,
+        Err(err) => {
+            // FIXME: that's not really right -- we store this error in a
+            // persistent status.
+            tracing::warn!("failed to load proc macro: {}", err);
+            return Vec::new();
+        }
+    };
+
+    return client
+        .map(|it| it.load_dylib(dylib))
+        .into_iter()
+        .flat_map(|it| match it {
+            Ok(Ok(macros)) => macros,
+            Err(err) => {
+                tracing::error!("proc macro server crashed: {}", err);
+                Vec::new()
+            }
+            Ok(Err(err)) => {
+                // FIXME: that's not really right -- we store this error in a
+                // persistent status.
+                tracing::warn!("failed to load proc macro: {}", err);
+                Vec::new()
+            }
+        })
+        .map(|expander| expander_to_proc_macro(expander, dummy_replace))
+        .collect();
+
+    fn expander_to_proc_macro(
+        expander: proc_macro_api::ProcMacro,
+        dummy_replace: &[Box<str>],
+    ) -> ProcMacro {
+        let name = SmolStr::from(expander.name());
+        let kind = match expander.kind() {
+            proc_macro_api::ProcMacroKind::CustomDerive => ProcMacroKind::CustomDerive,
+            proc_macro_api::ProcMacroKind::FuncLike => ProcMacroKind::FuncLike,
+            proc_macro_api::ProcMacroKind::Attr => ProcMacroKind::Attr,
+        };
+        let expander: Arc<dyn ProcMacroExpander> =
+            if dummy_replace.iter().any(|replace| &**replace == name) {
+                Arc::new(DummyExpander)
+            } else {
+                Arc::new(Expander(expander))
+            };
+        ProcMacro { name, kind, expander }
+    }
+
+    #[derive(Debug)]
+    struct Expander(proc_macro_api::ProcMacro);
+
+    impl ProcMacroExpander for Expander {
+        fn expand(
+            &self,
+            subtree: &tt::Subtree,
+            attrs: Option<&tt::Subtree>,
+            env: &Env,
+        ) -> Result<tt::Subtree, ProcMacroExpansionError> {
+            let env = env.iter().map(|(k, v)| (k.to_string(), v.to_string())).collect();
+            match self.0.expand(subtree, attrs, env) {
+                Ok(Ok(subtree)) => Ok(subtree),
+                Ok(Err(err)) => Err(ProcMacroExpansionError::Panic(err.0)),
+                Err(err) => Err(ProcMacroExpansionError::System(err.to_string())),
+            }
+        }
+    }
+
+    /// Dummy identity expander, used for proc-macros that are deliberately ignored by the user.
+    #[derive(Debug)]
+    struct DummyExpander;
+
+    impl ProcMacroExpander for DummyExpander {
+        fn expand(
+            &self,
+            subtree: &tt::Subtree,
+            _: Option<&tt::Subtree>,
+            _: &Env,
+        ) -> Result<tt::Subtree, ProcMacroExpansionError> {
+            Ok(subtree.clone())
+        }
+    }
+}
+
+pub(crate) fn should_refresh_for_change(path: &AbsPath, change_kind: ChangeKind) -> bool {
+    const IMPLICIT_TARGET_FILES: &[&str] = &["build.rs", "src/main.rs", "src/lib.rs"];
+    const IMPLICIT_TARGET_DIRS: &[&str] = &["src/bin", "examples", "tests", "benches"];
+    let file_name = path.file_name().unwrap_or_default();
+
+    if file_name == "Cargo.toml" || file_name == "Cargo.lock" {
+        return true;
+    }
+    if change_kind == ChangeKind::Modify {
+        return false;
+    }
+    if path.extension().unwrap_or_default() != "rs" {
+        return false;
+    }
+    if IMPLICIT_TARGET_FILES.iter().any(|it| path.as_ref().ends_with(it)) {
+        return true;
+    }
+    let parent = match path.parent() {
+        Some(it) => it,
+        None => return false,
+    };
+    if IMPLICIT_TARGET_DIRS.iter().any(|it| parent.as_ref().ends_with(it)) {
+        return true;
+    }
+    if file_name == "main.rs" {
+        let grand_parent = match parent.parent() {
+            Some(it) => it,
+            None => return false,
+        };
+        if IMPLICIT_TARGET_DIRS.iter().any(|it| grand_parent.as_ref().ends_with(it)) {
+            return true;
+        }
+    }
+    false
+}