]> git.lizzy.rs Git - rust.git/blob - crates/rust-analyzer/src/reload.rs
feat: Add config to replace specific proc-macros with dummy expanders
[rust.git] / crates / rust-analyzer / src / reload.rs
1 //! Project loading & configuration updates
2 use std::{mem, sync::Arc};
3
4 use flycheck::{FlycheckConfig, FlycheckHandle};
5 use hir::db::DefDatabase;
6 use ide::Change;
7 use ide_db::base_db::{
8     CrateGraph, Env, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind,
9     SourceRoot, VfsPath,
10 };
11 use proc_macro_api::{MacroDylib, ProcMacroServer};
12 use project_model::{ProjectWorkspace, WorkspaceBuildScripts};
13 use syntax::SmolStr;
14 use vfs::{file_set::FileSetConfig, AbsPath, AbsPathBuf, ChangeKind};
15
16 use crate::{
17     config::{Config, FilesWatcher, LinkedProject},
18     global_state::GlobalState,
19     lsp_ext,
20     main_loop::Task,
21 };
22
23 #[derive(Debug)]
24 pub(crate) enum ProjectWorkspaceProgress {
25     Begin,
26     Report(String),
27     End(Vec<anyhow::Result<ProjectWorkspace>>),
28 }
29
30 #[derive(Debug)]
31 pub(crate) enum BuildDataProgress {
32     Begin,
33     Report(String),
34     End((Arc<Vec<ProjectWorkspace>>, Vec<anyhow::Result<WorkspaceBuildScripts>>)),
35 }
36
37 impl GlobalState {
38     pub(crate) fn is_quiescent(&self) -> bool {
39         !(self.fetch_workspaces_queue.op_in_progress()
40             || self.fetch_build_data_queue.op_in_progress()
41             || self.vfs_progress_config_version < self.vfs_config_version
42             || self.vfs_progress_n_done < self.vfs_progress_n_total)
43     }
44
45     pub(crate) fn update_configuration(&mut self, config: Config) {
46         let _p = profile::span("GlobalState::update_configuration");
47         let old_config = mem::replace(&mut self.config, Arc::new(config));
48         if self.config.lru_capacity() != old_config.lru_capacity() {
49             self.analysis_host.update_lru_capacity(self.config.lru_capacity());
50         }
51         if self.config.linked_projects() != old_config.linked_projects() {
52             self.fetch_workspaces_queue.request_op()
53         } else if self.config.flycheck() != old_config.flycheck() {
54             self.reload_flycheck();
55         }
56
57         // Apply experimental feature flags.
58         self.analysis_host
59             .raw_database_mut()
60             .set_enable_proc_attr_macros(self.config.expand_proc_attr_macros());
61     }
62
63     pub(crate) fn current_status(&self) -> lsp_ext::ServerStatusParams {
64         let mut status = lsp_ext::ServerStatusParams {
65             health: lsp_ext::Health::Ok,
66             quiescent: self.is_quiescent(),
67             message: None,
68         };
69
70         if self.proc_macro_changed {
71             status.health = lsp_ext::Health::Warning;
72             status.message =
73                 Some("Reload required due to source changes of a procedural macro.".into())
74         }
75         if let Some(error) = self.fetch_build_data_error() {
76             status.health = lsp_ext::Health::Warning;
77             status.message = Some(error)
78         }
79         if !self.config.cargo_autoreload()
80             && self.is_quiescent()
81             && self.fetch_workspaces_queue.op_requested()
82         {
83             status.health = lsp_ext::Health::Warning;
84             status.message = Some("Workspace reload required".to_string())
85         }
86
87         if let Some(error) = self.fetch_workspace_error() {
88             status.health = lsp_ext::Health::Error;
89             status.message = Some(error)
90         }
91         status
92     }
93
94     pub(crate) fn fetch_workspaces(&mut self) {
95         tracing::info!("will fetch workspaces");
96
97         self.task_pool.handle.spawn_with_sender({
98             let linked_projects = self.config.linked_projects();
99             let detached_files = self.config.detached_files().to_vec();
100             let cargo_config = self.config.cargo();
101
102             move |sender| {
103                 let progress = {
104                     let sender = sender.clone();
105                     move |msg| {
106                         sender
107                             .send(Task::FetchWorkspace(ProjectWorkspaceProgress::Report(msg)))
108                             .unwrap()
109                     }
110                 };
111
112                 sender.send(Task::FetchWorkspace(ProjectWorkspaceProgress::Begin)).unwrap();
113
114                 let mut workspaces = linked_projects
115                     .iter()
116                     .map(|project| match project {
117                         LinkedProject::ProjectManifest(manifest) => {
118                             project_model::ProjectWorkspace::load(
119                                 manifest.clone(),
120                                 &cargo_config,
121                                 &progress,
122                             )
123                         }
124                         LinkedProject::InlineJsonProject(it) => {
125                             project_model::ProjectWorkspace::load_inline(
126                                 it.clone(),
127                                 cargo_config.target.as_deref(),
128                             )
129                         }
130                     })
131                     .collect::<Vec<_>>();
132
133                 if !detached_files.is_empty() {
134                     workspaces
135                         .push(project_model::ProjectWorkspace::load_detached_files(detached_files));
136                 }
137
138                 tracing::info!("did fetch workspaces {:?}", workspaces);
139                 sender
140                     .send(Task::FetchWorkspace(ProjectWorkspaceProgress::End(workspaces)))
141                     .unwrap();
142             }
143         });
144     }
145
146     pub(crate) fn fetch_build_data(&mut self) {
147         let workspaces = Arc::clone(&self.workspaces);
148         let config = self.config.cargo();
149         self.task_pool.handle.spawn_with_sender(move |sender| {
150             sender.send(Task::FetchBuildData(BuildDataProgress::Begin)).unwrap();
151
152             let progress = {
153                 let sender = sender.clone();
154                 move |msg| {
155                     sender.send(Task::FetchBuildData(BuildDataProgress::Report(msg))).unwrap()
156                 }
157             };
158             let mut res = Vec::new();
159             for ws in workspaces.iter() {
160                 res.push(ws.run_build_scripts(&config, &progress));
161             }
162             sender.send(Task::FetchBuildData(BuildDataProgress::End((workspaces, res)))).unwrap();
163         });
164     }
165
166     pub(crate) fn switch_workspaces(&mut self) {
167         let _p = profile::span("GlobalState::switch_workspaces");
168         tracing::info!("will switch workspaces");
169
170         if let Some(error_message) = self.fetch_workspace_error() {
171             tracing::error!("failed to switch workspaces: {}", error_message);
172             if !self.workspaces.is_empty() {
173                 // It only makes sense to switch to a partially broken workspace
174                 // if we don't have any workspace at all yet.
175                 return;
176             }
177         }
178
179         if let Some(error_message) = self.fetch_build_data_error() {
180             tracing::error!("failed to switch build data: {}", error_message);
181         }
182
183         let workspaces = self
184             .fetch_workspaces_queue
185             .last_op_result()
186             .iter()
187             .filter_map(|res| res.as_ref().ok().cloned())
188             .collect::<Vec<_>>();
189
190         fn eq_ignore_build_data<'a>(
191             left: &'a ProjectWorkspace,
192             right: &'a ProjectWorkspace,
193         ) -> bool {
194             let key = |p: &'a ProjectWorkspace| match p {
195                 ProjectWorkspace::Cargo {
196                     cargo,
197                     sysroot,
198                     rustc,
199                     rustc_cfg,
200                     cfg_overrides,
201
202                     build_scripts: _,
203                 } => Some((cargo, sysroot, rustc, rustc_cfg, cfg_overrides)),
204                 _ => None,
205             };
206             match (key(left), key(right)) {
207                 (Some(lk), Some(rk)) => lk == rk,
208                 _ => left == right,
209             }
210         }
211
212         let same_workspaces = workspaces.len() == self.workspaces.len()
213             && workspaces
214                 .iter()
215                 .zip(self.workspaces.iter())
216                 .all(|(l, r)| eq_ignore_build_data(l, r));
217
218         if same_workspaces {
219             let (workspaces, build_scripts) = self.fetch_build_data_queue.last_op_result();
220             if Arc::ptr_eq(workspaces, &self.workspaces) {
221                 let workspaces = workspaces
222                     .iter()
223                     .cloned()
224                     .zip(build_scripts)
225                     .map(|(mut ws, bs)| {
226                         ws.set_build_scripts(bs.as_ref().ok().cloned().unwrap_or_default());
227                         ws
228                     })
229                     .collect::<Vec<_>>();
230
231                 // Workspaces are the same, but we've updated build data.
232                 self.workspaces = Arc::new(workspaces);
233             } else {
234                 // Current build scripts do not match the version of the active
235                 // workspace, so there's nothing for us to update.
236                 return;
237             }
238         } else {
239             // Here, we completely changed the workspace (Cargo.toml edit), so
240             // we don't care about build-script results, they are stale.
241             self.workspaces = Arc::new(workspaces)
242         }
243
244         if let FilesWatcher::Client = self.config.files().watcher {
245             let registration_options = lsp_types::DidChangeWatchedFilesRegistrationOptions {
246                 watchers: self
247                     .workspaces
248                     .iter()
249                     .flat_map(|ws| ws.to_roots())
250                     .filter(|it| it.is_local)
251                     .flat_map(|root| {
252                         root.include.into_iter().flat_map(|it| {
253                             [
254                                 format!("{}/**/*.rs", it.display()),
255                                 format!("{}/**/Cargo.toml", it.display()),
256                                 format!("{}/**/Cargo.lock", it.display()),
257                             ]
258                         })
259                     })
260                     .map(|glob_pattern| lsp_types::FileSystemWatcher { glob_pattern, kind: None })
261                     .collect(),
262             };
263             let registration = lsp_types::Registration {
264                 id: "workspace/didChangeWatchedFiles".to_string(),
265                 method: "workspace/didChangeWatchedFiles".to_string(),
266                 register_options: Some(serde_json::to_value(registration_options).unwrap()),
267             };
268             self.send_request::<lsp_types::request::RegisterCapability>(
269                 lsp_types::RegistrationParams { registrations: vec![registration] },
270                 |_, _| (),
271             );
272         }
273
274         let mut change = Change::new();
275
276         let files_config = self.config.files();
277         let project_folders = ProjectFolders::new(&self.workspaces, &files_config.exclude);
278
279         if self.proc_macro_client.is_none() {
280             self.proc_macro_client = match self.config.proc_macro_srv() {
281                 None => None,
282                 Some((path, args)) => match ProcMacroServer::spawn(path.clone(), args) {
283                     Ok(it) => Some(it),
284                     Err(err) => {
285                         tracing::error!(
286                             "Failed to run proc_macro_srv from path {}, error: {:?}",
287                             path.display(),
288                             err
289                         );
290                         None
291                     }
292                 },
293             };
294             self.analysis_host
295                 .raw_database_mut()
296                 .set_enable_proc_attr_macros(self.config.expand_proc_attr_macros());
297         }
298
299         let watch = match files_config.watcher {
300             FilesWatcher::Client => vec![],
301             FilesWatcher::Notify => project_folders.watch,
302         };
303         self.vfs_config_version += 1;
304         self.loader.handle.set_config(vfs::loader::Config {
305             load: project_folders.load,
306             watch,
307             version: self.vfs_config_version,
308         });
309
310         // Create crate graph from all the workspaces
311         let crate_graph = {
312             let proc_macro_client = self.proc_macro_client.as_ref();
313             let mut load_proc_macro = move |path: &AbsPath, dummy_replace: &_| {
314                 load_proc_macro(proc_macro_client, path, dummy_replace)
315             };
316
317             let vfs = &mut self.vfs.write().0;
318             let loader = &mut self.loader;
319             let mem_docs = &self.mem_docs;
320             let mut load = move |path: &AbsPath| {
321                 let _p = profile::span("GlobalState::load");
322                 let vfs_path = vfs::VfsPath::from(path.to_path_buf());
323                 if !mem_docs.contains(&vfs_path) {
324                     let contents = loader.handle.load_sync(path);
325                     vfs.set_file_contents(vfs_path.clone(), contents);
326                 }
327                 let res = vfs.file_id(&vfs_path);
328                 if res.is_none() {
329                     tracing::warn!("failed to load {}", path.display())
330                 }
331                 res
332             };
333
334             let mut crate_graph = CrateGraph::default();
335             for ws in self.workspaces.iter() {
336                 crate_graph.extend(ws.to_crate_graph(
337                     self.config.dummy_replacements(),
338                     &mut load_proc_macro,
339                     &mut load,
340                 ));
341             }
342             crate_graph
343         };
344         change.set_crate_graph(crate_graph);
345
346         self.source_root_config = project_folders.source_root_config;
347
348         self.analysis_host.apply_change(change);
349         self.process_changes();
350         self.reload_flycheck();
351         tracing::info!("did switch workspaces");
352     }
353
354     fn fetch_workspace_error(&self) -> Option<String> {
355         let mut buf = String::new();
356
357         for ws in self.fetch_workspaces_queue.last_op_result() {
358             if let Err(err) = ws {
359                 stdx::format_to!(buf, "rust-analyzer failed to load workspace: {:#}\n", err);
360             }
361         }
362
363         if buf.is_empty() {
364             return None;
365         }
366
367         Some(buf)
368     }
369
370     fn fetch_build_data_error(&self) -> Option<String> {
371         let mut buf = "rust-analyzer failed to run build scripts:\n".to_string();
372         let mut has_errors = false;
373
374         for ws in &self.fetch_build_data_queue.last_op_result().1 {
375             match ws {
376                 Ok(data) => {
377                     if let Some(err) = data.error() {
378                         has_errors = true;
379                         stdx::format_to!(buf, "{:#}\n", err);
380                     }
381                 }
382                 Err(err) => {
383                     has_errors = true;
384                     stdx::format_to!(buf, "{:#}\n", err);
385                 }
386             }
387         }
388
389         if has_errors {
390             Some(buf)
391         } else {
392             None
393         }
394     }
395
396     fn reload_flycheck(&mut self) {
397         let _p = profile::span("GlobalState::reload_flycheck");
398         let config = match self.config.flycheck() {
399             Some(it) => it,
400             None => {
401                 self.flycheck = Vec::new();
402                 return;
403             }
404         };
405
406         let sender = self.flycheck_sender.clone();
407         self.flycheck = self
408             .workspaces
409             .iter()
410             .enumerate()
411             .filter_map(|(id, w)| match w {
412                 ProjectWorkspace::Cargo { cargo, .. } => Some((id, cargo.workspace_root())),
413                 ProjectWorkspace::Json { project, .. } => {
414                     // Enable flychecks for json projects if a custom flycheck command was supplied
415                     // in the workspace configuration.
416                     match config {
417                         FlycheckConfig::CustomCommand { .. } => Some((id, project.path())),
418                         _ => None,
419                     }
420                 }
421                 ProjectWorkspace::DetachedFiles { .. } => None,
422             })
423             .map(|(id, root)| {
424                 let sender = sender.clone();
425                 FlycheckHandle::spawn(
426                     id,
427                     Box::new(move |msg| sender.send(msg).unwrap()),
428                     config.clone(),
429                     root.to_path_buf(),
430                 )
431             })
432             .collect();
433     }
434 }
435
436 #[derive(Default)]
437 pub(crate) struct ProjectFolders {
438     pub(crate) load: Vec<vfs::loader::Entry>,
439     pub(crate) watch: Vec<usize>,
440     pub(crate) source_root_config: SourceRootConfig,
441 }
442
443 impl ProjectFolders {
444     pub(crate) fn new(
445         workspaces: &[ProjectWorkspace],
446         global_excludes: &[AbsPathBuf],
447     ) -> ProjectFolders {
448         let mut res = ProjectFolders::default();
449         let mut fsc = FileSetConfig::builder();
450         let mut local_filesets = vec![];
451
452         for root in workspaces.iter().flat_map(|ws| ws.to_roots()) {
453             let file_set_roots: Vec<VfsPath> =
454                 root.include.iter().cloned().map(VfsPath::from).collect();
455
456             let entry = {
457                 let mut dirs = vfs::loader::Directories::default();
458                 dirs.extensions.push("rs".into());
459                 dirs.include.extend(root.include);
460                 dirs.exclude.extend(root.exclude);
461                 for excl in global_excludes {
462                     if dirs
463                         .include
464                         .iter()
465                         .any(|incl| incl.starts_with(excl) || excl.starts_with(incl))
466                     {
467                         dirs.exclude.push(excl.clone());
468                     }
469                 }
470
471                 vfs::loader::Entry::Directories(dirs)
472             };
473
474             if root.is_local {
475                 res.watch.push(res.load.len());
476             }
477             res.load.push(entry);
478
479             if root.is_local {
480                 local_filesets.push(fsc.len());
481             }
482             fsc.add_file_set(file_set_roots)
483         }
484
485         let fsc = fsc.build();
486         res.source_root_config = SourceRootConfig { fsc, local_filesets };
487
488         res
489     }
490 }
491
492 #[derive(Default, Debug)]
493 pub(crate) struct SourceRootConfig {
494     pub(crate) fsc: FileSetConfig,
495     pub(crate) local_filesets: Vec<usize>,
496 }
497
498 impl SourceRootConfig {
499     pub(crate) fn partition(&self, vfs: &vfs::Vfs) -> Vec<SourceRoot> {
500         let _p = profile::span("SourceRootConfig::partition");
501         self.fsc
502             .partition(vfs)
503             .into_iter()
504             .enumerate()
505             .map(|(idx, file_set)| {
506                 let is_local = self.local_filesets.contains(&idx);
507                 if is_local {
508                     SourceRoot::new_local(file_set)
509                 } else {
510                     SourceRoot::new_library(file_set)
511                 }
512             })
513             .collect()
514     }
515 }
516
517 pub(crate) fn load_proc_macro(
518     client: Option<&ProcMacroServer>,
519     path: &AbsPath,
520     dummy_replace: &[Box<str>],
521 ) -> Vec<ProcMacro> {
522     let dylib = match MacroDylib::new(path.to_path_buf()) {
523         Ok(it) => it,
524         Err(err) => {
525             // FIXME: that's not really right -- we store this error in a
526             // persistent status.
527             tracing::warn!("failed to load proc macro: {}", err);
528             return Vec::new();
529         }
530     };
531
532     return client
533         .map(|it| it.load_dylib(dylib))
534         .into_iter()
535         .flat_map(|it| match it {
536             Ok(Ok(macros)) => macros,
537             Err(err) => {
538                 tracing::error!("proc macro server crashed: {}", err);
539                 Vec::new()
540             }
541             Ok(Err(err)) => {
542                 // FIXME: that's not really right -- we store this error in a
543                 // persistent status.
544                 tracing::warn!("failed to load proc macro: {}", err);
545                 Vec::new()
546             }
547         })
548         .map(|expander| expander_to_proc_macro(expander, dummy_replace))
549         .collect();
550
551     fn expander_to_proc_macro(
552         expander: proc_macro_api::ProcMacro,
553         dummy_replace: &[Box<str>],
554     ) -> ProcMacro {
555         let name = SmolStr::from(expander.name());
556         let kind = match expander.kind() {
557             proc_macro_api::ProcMacroKind::CustomDerive => ProcMacroKind::CustomDerive,
558             proc_macro_api::ProcMacroKind::FuncLike => ProcMacroKind::FuncLike,
559             proc_macro_api::ProcMacroKind::Attr => ProcMacroKind::Attr,
560         };
561         let expander: Arc<dyn ProcMacroExpander> =
562             if dummy_replace.iter().any(|replace| &**replace == name) {
563                 Arc::new(DummyExpander)
564             } else {
565                 Arc::new(Expander(expander))
566             };
567         ProcMacro { name, kind, expander }
568     }
569
570     #[derive(Debug)]
571     struct Expander(proc_macro_api::ProcMacro);
572
573     impl ProcMacroExpander for Expander {
574         fn expand(
575             &self,
576             subtree: &tt::Subtree,
577             attrs: Option<&tt::Subtree>,
578             env: &Env,
579         ) -> Result<tt::Subtree, ProcMacroExpansionError> {
580             let env = env.iter().map(|(k, v)| (k.to_string(), v.to_string())).collect();
581             match self.0.expand(subtree, attrs, env) {
582                 Ok(Ok(subtree)) => Ok(subtree),
583                 Ok(Err(err)) => Err(ProcMacroExpansionError::Panic(err.0)),
584                 Err(err) => Err(ProcMacroExpansionError::System(err.to_string())),
585             }
586         }
587     }
588
589     #[derive(Debug)]
590     struct DummyExpander;
591
592     impl ProcMacroExpander for DummyExpander {
593         fn expand(
594             &self,
595             subtree: &tt::Subtree,
596             _: Option<&tt::Subtree>,
597             _: &Env,
598         ) -> Result<tt::Subtree, ProcMacroExpansionError> {
599             Ok(subtree.clone())
600         }
601     }
602 }
603
604 pub(crate) fn should_refresh_for_change(path: &AbsPath, change_kind: ChangeKind) -> bool {
605     const IMPLICIT_TARGET_FILES: &[&str] = &["build.rs", "src/main.rs", "src/lib.rs"];
606     const IMPLICIT_TARGET_DIRS: &[&str] = &["src/bin", "examples", "tests", "benches"];
607     let file_name = path.file_name().unwrap_or_default();
608
609     if file_name == "Cargo.toml" || file_name == "Cargo.lock" {
610         return true;
611     }
612     if change_kind == ChangeKind::Modify {
613         return false;
614     }
615     if path.extension().unwrap_or_default() != "rs" {
616         return false;
617     }
618     if IMPLICIT_TARGET_FILES.iter().any(|it| path.as_ref().ends_with(it)) {
619         return true;
620     }
621     let parent = match path.parent() {
622         Some(it) => it,
623         None => return false,
624     };
625     if IMPLICIT_TARGET_DIRS.iter().any(|it| parent.as_ref().ends_with(it)) {
626         return true;
627     }
628     if file_name == "main.rs" {
629         let grand_parent = match parent.parent() {
630             Some(it) => it,
631             None => return false,
632         };
633         if IMPLICIT_TARGET_DIRS.iter().any(|it| grand_parent.as_ref().ends_with(it)) {
634             return true;
635         }
636     }
637     false
638 }