]> git.lizzy.rs Git - rust.git/blob - crates/rust-analyzer/src/reload.rs
Merge #11145
[rust.git] / crates / rust-analyzer / src / reload.rs
1 //! Project loading & configuration updates
2 use std::{mem, sync::Arc};
3
4 use flycheck::{FlycheckConfig, FlycheckHandle};
5 use hir::db::DefDatabase;
6 use ide::Change;
7 use ide_db::base_db::{
8     CrateGraph, Env, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind,
9     SourceRoot, VfsPath,
10 };
11 use proc_macro_api::{MacroDylib, ProcMacroServer};
12 use project_model::{ProjectWorkspace, WorkspaceBuildScripts};
13 use syntax::SmolStr;
14 use vfs::{file_set::FileSetConfig, AbsPath, AbsPathBuf, ChangeKind};
15
16 use crate::{
17     config::{Config, FilesWatcher, LinkedProject},
18     global_state::GlobalState,
19     lsp_ext,
20     main_loop::Task,
21 };
22
23 #[derive(Debug)]
24 pub(crate) enum ProjectWorkspaceProgress {
25     Begin,
26     Report(String),
27     End(Vec<anyhow::Result<ProjectWorkspace>>),
28 }
29
30 #[derive(Debug)]
31 pub(crate) enum BuildDataProgress {
32     Begin,
33     Report(String),
34     End((Arc<Vec<ProjectWorkspace>>, Vec<anyhow::Result<WorkspaceBuildScripts>>)),
35 }
36
37 impl GlobalState {
38     pub(crate) fn is_quiescent(&self) -> bool {
39         !(self.fetch_workspaces_queue.op_in_progress()
40             || self.fetch_build_data_queue.op_in_progress()
41             || self.vfs_progress_config_version < self.vfs_config_version
42             || self.vfs_progress_n_done < self.vfs_progress_n_total)
43     }
44
45     pub(crate) fn update_configuration(&mut self, config: Config) {
46         let _p = profile::span("GlobalState::update_configuration");
47         let old_config = mem::replace(&mut self.config, Arc::new(config));
48         if self.config.lru_capacity() != old_config.lru_capacity() {
49             self.analysis_host.update_lru_capacity(self.config.lru_capacity());
50         }
51         if self.config.linked_projects() != old_config.linked_projects() {
52             self.fetch_workspaces_queue.request_op()
53         } else if self.config.flycheck() != old_config.flycheck() {
54             self.reload_flycheck();
55         }
56
57         // Apply experimental feature flags.
58         self.analysis_host
59             .raw_database_mut()
60             .set_enable_proc_attr_macros(self.config.expand_proc_attr_macros());
61     }
62
63     pub(crate) fn current_status(&self) -> lsp_ext::ServerStatusParams {
64         let mut status = lsp_ext::ServerStatusParams {
65             health: lsp_ext::Health::Ok,
66             quiescent: self.is_quiescent(),
67             message: None,
68         };
69
70         if self.proc_macro_changed {
71             status.health = lsp_ext::Health::Warning;
72             status.message =
73                 Some("Reload required due to source changes of a procedural macro.".into())
74         }
75         if let Some(error) = self.fetch_build_data_error() {
76             status.health = lsp_ext::Health::Warning;
77             status.message = Some(error)
78         }
79         if !self.config.cargo_autoreload()
80             && self.is_quiescent()
81             && self.fetch_workspaces_queue.op_requested()
82         {
83             status.health = lsp_ext::Health::Warning;
84             status.message = Some("Workspace reload required".to_string())
85         }
86
87         if let Some(error) = self.fetch_workspace_error() {
88             status.health = lsp_ext::Health::Error;
89             status.message = Some(error)
90         }
91         status
92     }
93
94     pub(crate) fn fetch_workspaces(&mut self) {
95         tracing::info!("will fetch workspaces");
96
97         self.task_pool.handle.spawn_with_sender({
98             let linked_projects = self.config.linked_projects();
99             let detached_files = self.config.detached_files().to_vec();
100             let cargo_config = self.config.cargo();
101
102             move |sender| {
103                 let progress = {
104                     let sender = sender.clone();
105                     move |msg| {
106                         sender
107                             .send(Task::FetchWorkspace(ProjectWorkspaceProgress::Report(msg)))
108                             .unwrap()
109                     }
110                 };
111
112                 sender.send(Task::FetchWorkspace(ProjectWorkspaceProgress::Begin)).unwrap();
113
114                 let mut workspaces = linked_projects
115                     .iter()
116                     .map(|project| match project {
117                         LinkedProject::ProjectManifest(manifest) => {
118                             project_model::ProjectWorkspace::load(
119                                 manifest.clone(),
120                                 &cargo_config,
121                                 &progress,
122                             )
123                         }
124                         LinkedProject::InlineJsonProject(it) => {
125                             project_model::ProjectWorkspace::load_inline(
126                                 it.clone(),
127                                 cargo_config.target.as_deref(),
128                             )
129                         }
130                     })
131                     .collect::<Vec<_>>();
132
133                 if !detached_files.is_empty() {
134                     workspaces
135                         .push(project_model::ProjectWorkspace::load_detached_files(detached_files));
136                 }
137
138                 tracing::info!("did fetch workspaces {:?}", workspaces);
139                 sender
140                     .send(Task::FetchWorkspace(ProjectWorkspaceProgress::End(workspaces)))
141                     .unwrap();
142             }
143         });
144     }
145
146     pub(crate) fn fetch_build_data(&mut self) {
147         let workspaces = Arc::clone(&self.workspaces);
148         let config = self.config.cargo();
149         self.task_pool.handle.spawn_with_sender(move |sender| {
150             sender.send(Task::FetchBuildData(BuildDataProgress::Begin)).unwrap();
151
152             let progress = {
153                 let sender = sender.clone();
154                 move |msg| {
155                     sender.send(Task::FetchBuildData(BuildDataProgress::Report(msg))).unwrap()
156                 }
157             };
158             let mut res = Vec::new();
159             for ws in workspaces.iter() {
160                 res.push(ws.run_build_scripts(&config, &progress));
161             }
162             sender.send(Task::FetchBuildData(BuildDataProgress::End((workspaces, res)))).unwrap();
163         });
164     }
165
166     pub(crate) fn switch_workspaces(&mut self) {
167         let _p = profile::span("GlobalState::switch_workspaces");
168         tracing::info!("will switch workspaces");
169
170         if let Some(error_message) = self.fetch_workspace_error() {
171             tracing::error!("failed to switch workspaces: {}", error_message);
172             if !self.workspaces.is_empty() {
173                 // It only makes sense to switch to a partially broken workspace
174                 // if we don't have any workspace at all yet.
175                 return;
176             }
177         }
178
179         if let Some(error_message) = self.fetch_build_data_error() {
180             tracing::error!("failed to switch build data: {}", error_message);
181         }
182
183         let workspaces = self
184             .fetch_workspaces_queue
185             .last_op_result()
186             .iter()
187             .filter_map(|res| res.as_ref().ok().cloned())
188             .collect::<Vec<_>>();
189
190         fn eq_ignore_build_data<'a>(
191             left: &'a ProjectWorkspace,
192             right: &'a ProjectWorkspace,
193         ) -> bool {
194             let key = |p: &'a ProjectWorkspace| match p {
195                 ProjectWorkspace::Cargo {
196                     cargo,
197                     sysroot,
198                     rustc,
199                     rustc_cfg,
200                     cfg_overrides,
201
202                     build_scripts: _,
203                 } => Some((cargo, sysroot, rustc, rustc_cfg, cfg_overrides)),
204                 _ => None,
205             };
206             match (key(left), key(right)) {
207                 (Some(lk), Some(rk)) => lk == rk,
208                 _ => left == right,
209             }
210         }
211
212         let same_workspaces = workspaces.len() == self.workspaces.len()
213             && workspaces
214                 .iter()
215                 .zip(self.workspaces.iter())
216                 .all(|(l, r)| eq_ignore_build_data(l, r));
217
218         if same_workspaces {
219             let (workspaces, build_scripts) = self.fetch_build_data_queue.last_op_result();
220             if Arc::ptr_eq(workspaces, &self.workspaces) {
221                 let workspaces = workspaces
222                     .iter()
223                     .cloned()
224                     .zip(build_scripts)
225                     .map(|(mut ws, bs)| {
226                         ws.set_build_scripts(bs.as_ref().ok().cloned().unwrap_or_default());
227                         ws
228                     })
229                     .collect::<Vec<_>>();
230
231                 // Workspaces are the same, but we've updated build data.
232                 self.workspaces = Arc::new(workspaces);
233             } else {
234                 // Current build scripts do not match the version of the active
235                 // workspace, so there's nothing for us to update.
236                 return;
237             }
238         } else {
239             // Here, we completely changed the workspace (Cargo.toml edit), so
240             // we don't care about build-script results, they are stale.
241             self.workspaces = Arc::new(workspaces)
242         }
243
244         if let FilesWatcher::Client = self.config.files().watcher {
245             let registration_options = lsp_types::DidChangeWatchedFilesRegistrationOptions {
246                 watchers: self
247                     .workspaces
248                     .iter()
249                     .flat_map(|ws| ws.to_roots())
250                     .filter(|it| it.is_local)
251                     .flat_map(|root| {
252                         root.include.into_iter().flat_map(|it| {
253                             [
254                                 format!("{}/**/*.rs", it.display()),
255                                 format!("{}/**/Cargo.toml", it.display()),
256                                 format!("{}/**/Cargo.lock", it.display()),
257                             ]
258                         })
259                     })
260                     .map(|glob_pattern| lsp_types::FileSystemWatcher { glob_pattern, kind: None })
261                     .collect(),
262             };
263             let registration = lsp_types::Registration {
264                 id: "workspace/didChangeWatchedFiles".to_string(),
265                 method: "workspace/didChangeWatchedFiles".to_string(),
266                 register_options: Some(serde_json::to_value(registration_options).unwrap()),
267             };
268             self.send_request::<lsp_types::request::RegisterCapability>(
269                 lsp_types::RegistrationParams { registrations: vec![registration] },
270                 |_, _| (),
271             );
272         }
273
274         let mut change = Change::new();
275
276         let files_config = self.config.files();
277         let project_folders = ProjectFolders::new(&self.workspaces, &files_config.exclude);
278
279         if self.proc_macro_client.is_none() {
280             self.proc_macro_client = match self.config.proc_macro_srv() {
281                 None => None,
282                 Some((path, args)) => match ProcMacroServer::spawn(path.clone(), args) {
283                     Ok(it) => Some(it),
284                     Err(err) => {
285                         tracing::error!(
286                             "Failed to run proc_macro_srv from path {}, error: {:?}",
287                             path.display(),
288                             err
289                         );
290                         None
291                     }
292                 },
293             };
294         }
295
296         let watch = match files_config.watcher {
297             FilesWatcher::Client => vec![],
298             FilesWatcher::Notify => project_folders.watch,
299         };
300         self.vfs_config_version += 1;
301         self.loader.handle.set_config(vfs::loader::Config {
302             load: project_folders.load,
303             watch,
304             version: self.vfs_config_version,
305         });
306
307         // Create crate graph from all the workspaces
308         let crate_graph = {
309             let proc_macro_client = self.proc_macro_client.as_ref();
310             let mut load_proc_macro = move |path: &AbsPath, dummy_replace: &_| {
311                 load_proc_macro(proc_macro_client, path, dummy_replace)
312             };
313
314             let vfs = &mut self.vfs.write().0;
315             let loader = &mut self.loader;
316             let mem_docs = &self.mem_docs;
317             let mut load = move |path: &AbsPath| {
318                 let _p = profile::span("GlobalState::load");
319                 let vfs_path = vfs::VfsPath::from(path.to_path_buf());
320                 if !mem_docs.contains(&vfs_path) {
321                     let contents = loader.handle.load_sync(path);
322                     vfs.set_file_contents(vfs_path.clone(), contents);
323                 }
324                 let res = vfs.file_id(&vfs_path);
325                 if res.is_none() {
326                     tracing::warn!("failed to load {}", path.display())
327                 }
328                 res
329             };
330
331             let mut crate_graph = CrateGraph::default();
332             for ws in self.workspaces.iter() {
333                 crate_graph.extend(ws.to_crate_graph(
334                     self.config.dummy_replacements(),
335                     &mut load_proc_macro,
336                     &mut load,
337                 ));
338             }
339             crate_graph
340         };
341         change.set_crate_graph(crate_graph);
342
343         self.source_root_config = project_folders.source_root_config;
344
345         self.analysis_host.apply_change(change);
346         self.process_changes();
347         self.reload_flycheck();
348         tracing::info!("did switch workspaces");
349     }
350
351     fn fetch_workspace_error(&self) -> Option<String> {
352         let mut buf = String::new();
353
354         for ws in self.fetch_workspaces_queue.last_op_result() {
355             if let Err(err) = ws {
356                 stdx::format_to!(buf, "rust-analyzer failed to load workspace: {:#}\n", err);
357             }
358         }
359
360         if buf.is_empty() {
361             return None;
362         }
363
364         Some(buf)
365     }
366
367     fn fetch_build_data_error(&self) -> Option<String> {
368         let mut buf = "rust-analyzer failed to run build scripts:\n".to_string();
369         let mut has_errors = false;
370
371         for ws in &self.fetch_build_data_queue.last_op_result().1 {
372             match ws {
373                 Ok(data) => {
374                     if let Some(err) = data.error() {
375                         has_errors = true;
376                         stdx::format_to!(buf, "{:#}\n", err);
377                     }
378                 }
379                 Err(err) => {
380                     has_errors = true;
381                     stdx::format_to!(buf, "{:#}\n", err);
382                 }
383             }
384         }
385
386         if has_errors {
387             Some(buf)
388         } else {
389             None
390         }
391     }
392
393     fn reload_flycheck(&mut self) {
394         let _p = profile::span("GlobalState::reload_flycheck");
395         let config = match self.config.flycheck() {
396             Some(it) => it,
397             None => {
398                 self.flycheck = Vec::new();
399                 return;
400             }
401         };
402
403         let sender = self.flycheck_sender.clone();
404         self.flycheck = self
405             .workspaces
406             .iter()
407             .enumerate()
408             .filter_map(|(id, w)| match w {
409                 ProjectWorkspace::Cargo { cargo, .. } => Some((id, cargo.workspace_root())),
410                 ProjectWorkspace::Json { project, .. } => {
411                     // Enable flychecks for json projects if a custom flycheck command was supplied
412                     // in the workspace configuration.
413                     match config {
414                         FlycheckConfig::CustomCommand { .. } => Some((id, project.path())),
415                         _ => None,
416                     }
417                 }
418                 ProjectWorkspace::DetachedFiles { .. } => None,
419             })
420             .map(|(id, root)| {
421                 let sender = sender.clone();
422                 FlycheckHandle::spawn(
423                     id,
424                     Box::new(move |msg| sender.send(msg).unwrap()),
425                     config.clone(),
426                     root.to_path_buf(),
427                 )
428             })
429             .collect();
430     }
431 }
432
433 #[derive(Default)]
434 pub(crate) struct ProjectFolders {
435     pub(crate) load: Vec<vfs::loader::Entry>,
436     pub(crate) watch: Vec<usize>,
437     pub(crate) source_root_config: SourceRootConfig,
438 }
439
440 impl ProjectFolders {
441     pub(crate) fn new(
442         workspaces: &[ProjectWorkspace],
443         global_excludes: &[AbsPathBuf],
444     ) -> ProjectFolders {
445         let mut res = ProjectFolders::default();
446         let mut fsc = FileSetConfig::builder();
447         let mut local_filesets = vec![];
448
449         for root in workspaces.iter().flat_map(|ws| ws.to_roots()) {
450             let file_set_roots: Vec<VfsPath> =
451                 root.include.iter().cloned().map(VfsPath::from).collect();
452
453             let entry = {
454                 let mut dirs = vfs::loader::Directories::default();
455                 dirs.extensions.push("rs".into());
456                 dirs.include.extend(root.include);
457                 dirs.exclude.extend(root.exclude);
458                 for excl in global_excludes {
459                     if dirs
460                         .include
461                         .iter()
462                         .any(|incl| incl.starts_with(excl) || excl.starts_with(incl))
463                     {
464                         dirs.exclude.push(excl.clone());
465                     }
466                 }
467
468                 vfs::loader::Entry::Directories(dirs)
469             };
470
471             if root.is_local {
472                 res.watch.push(res.load.len());
473             }
474             res.load.push(entry);
475
476             if root.is_local {
477                 local_filesets.push(fsc.len());
478             }
479             fsc.add_file_set(file_set_roots)
480         }
481
482         let fsc = fsc.build();
483         res.source_root_config = SourceRootConfig { fsc, local_filesets };
484
485         res
486     }
487 }
488
489 #[derive(Default, Debug)]
490 pub(crate) struct SourceRootConfig {
491     pub(crate) fsc: FileSetConfig,
492     pub(crate) local_filesets: Vec<usize>,
493 }
494
495 impl SourceRootConfig {
496     pub(crate) fn partition(&self, vfs: &vfs::Vfs) -> Vec<SourceRoot> {
497         let _p = profile::span("SourceRootConfig::partition");
498         self.fsc
499             .partition(vfs)
500             .into_iter()
501             .enumerate()
502             .map(|(idx, file_set)| {
503                 let is_local = self.local_filesets.contains(&idx);
504                 if is_local {
505                     SourceRoot::new_local(file_set)
506                 } else {
507                     SourceRoot::new_library(file_set)
508                 }
509             })
510             .collect()
511     }
512 }
513
514 /// Load the proc-macros for the given lib path, replacing all expanders whose names are in `dummy_replace`
515 /// with an identity dummy expander.
516 pub(crate) fn load_proc_macro(
517     client: Option<&ProcMacroServer>,
518     path: &AbsPath,
519     dummy_replace: &[Box<str>],
520 ) -> Vec<ProcMacro> {
521     let dylib = match MacroDylib::new(path.to_path_buf()) {
522         Ok(it) => it,
523         Err(err) => {
524             // FIXME: that's not really right -- we store this error in a
525             // persistent status.
526             tracing::warn!("failed to load proc macro: {}", err);
527             return Vec::new();
528         }
529     };
530
531     return client
532         .map(|it| it.load_dylib(dylib))
533         .into_iter()
534         .flat_map(|it| match it {
535             Ok(Ok(macros)) => macros,
536             Err(err) => {
537                 tracing::error!("proc macro server crashed: {}", err);
538                 Vec::new()
539             }
540             Ok(Err(err)) => {
541                 // FIXME: that's not really right -- we store this error in a
542                 // persistent status.
543                 tracing::warn!("failed to load proc macro: {}", err);
544                 Vec::new()
545             }
546         })
547         .map(|expander| expander_to_proc_macro(expander, dummy_replace))
548         .collect();
549
550     fn expander_to_proc_macro(
551         expander: proc_macro_api::ProcMacro,
552         dummy_replace: &[Box<str>],
553     ) -> ProcMacro {
554         let name = SmolStr::from(expander.name());
555         let kind = match expander.kind() {
556             proc_macro_api::ProcMacroKind::CustomDerive => ProcMacroKind::CustomDerive,
557             proc_macro_api::ProcMacroKind::FuncLike => ProcMacroKind::FuncLike,
558             proc_macro_api::ProcMacroKind::Attr => ProcMacroKind::Attr,
559         };
560         let expander: Arc<dyn ProcMacroExpander> =
561             if dummy_replace.iter().any(|replace| &**replace == name) {
562                 Arc::new(DummyExpander)
563             } else {
564                 Arc::new(Expander(expander))
565             };
566         ProcMacro { name, kind, expander }
567     }
568
569     #[derive(Debug)]
570     struct Expander(proc_macro_api::ProcMacro);
571
572     impl ProcMacroExpander for Expander {
573         fn expand(
574             &self,
575             subtree: &tt::Subtree,
576             attrs: Option<&tt::Subtree>,
577             env: &Env,
578         ) -> Result<tt::Subtree, ProcMacroExpansionError> {
579             let env = env.iter().map(|(k, v)| (k.to_string(), v.to_string())).collect();
580             match self.0.expand(subtree, attrs, env) {
581                 Ok(Ok(subtree)) => Ok(subtree),
582                 Ok(Err(err)) => Err(ProcMacroExpansionError::Panic(err.0)),
583                 Err(err) => Err(ProcMacroExpansionError::System(err.to_string())),
584             }
585         }
586     }
587
588     /// Dummy identity expander, used for proc-macros that are deliberately ignored by the user.
589     #[derive(Debug)]
590     struct DummyExpander;
591
592     impl ProcMacroExpander for DummyExpander {
593         fn expand(
594             &self,
595             subtree: &tt::Subtree,
596             _: Option<&tt::Subtree>,
597             _: &Env,
598         ) -> Result<tt::Subtree, ProcMacroExpansionError> {
599             Ok(subtree.clone())
600         }
601     }
602 }
603
604 pub(crate) fn should_refresh_for_change(path: &AbsPath, change_kind: ChangeKind) -> bool {
605     const IMPLICIT_TARGET_FILES: &[&str] = &["build.rs", "src/main.rs", "src/lib.rs"];
606     const IMPLICIT_TARGET_DIRS: &[&str] = &["src/bin", "examples", "tests", "benches"];
607     let file_name = path.file_name().unwrap_or_default();
608
609     if file_name == "Cargo.toml" || file_name == "Cargo.lock" {
610         return true;
611     }
612     if change_kind == ChangeKind::Modify {
613         return false;
614     }
615     if path.extension().unwrap_or_default() != "rs" {
616         return false;
617     }
618     if IMPLICIT_TARGET_FILES.iter().any(|it| path.as_ref().ends_with(it)) {
619         return true;
620     }
621     let parent = match path.parent() {
622         Some(it) => it,
623         None => return false,
624     };
625     if IMPLICIT_TARGET_DIRS.iter().any(|it| parent.as_ref().ends_with(it)) {
626         return true;
627     }
628     if file_name == "main.rs" {
629         let grand_parent = match parent.parent() {
630             Some(it) => it,
631             None => return false,
632         };
633         if IMPLICIT_TARGET_DIRS.iter().any(|it| grand_parent.as_ref().ends_with(it)) {
634             return true;
635         }
636     }
637     false
638 }