]> git.lizzy.rs Git - rust.git/blob - crates/rust-analyzer/src/reload.rs
Merge #10691
[rust.git] / crates / rust-analyzer / src / reload.rs
1 //! Project loading & configuration updates
2 use std::{mem, sync::Arc};
3
4 use flycheck::{FlycheckConfig, FlycheckHandle};
5 use hir::db::DefDatabase;
6 use ide::Change;
7 use ide_db::base_db::{
8     CrateGraph, Env, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind,
9     SourceRoot, VfsPath,
10 };
11 use proc_macro_api::{MacroDylib, ProcMacroServer};
12 use project_model::{ProjectWorkspace, WorkspaceBuildScripts};
13 use vfs::{file_set::FileSetConfig, AbsPath, AbsPathBuf, ChangeKind};
14
15 use crate::{
16     config::{Config, FilesWatcher, LinkedProject},
17     global_state::GlobalState,
18     lsp_ext,
19     main_loop::Task,
20 };
21
22 #[derive(Debug)]
23 pub(crate) enum ProjectWorkspaceProgress {
24     Begin,
25     Report(String),
26     End(Vec<anyhow::Result<ProjectWorkspace>>),
27 }
28
29 #[derive(Debug)]
30 pub(crate) enum BuildDataProgress {
31     Begin,
32     Report(String),
33     End((Arc<Vec<ProjectWorkspace>>, Vec<anyhow::Result<WorkspaceBuildScripts>>)),
34 }
35
36 impl GlobalState {
37     pub(crate) fn is_quiescent(&self) -> bool {
38         !(self.fetch_workspaces_queue.op_in_progress()
39             || self.fetch_build_data_queue.op_in_progress()
40             || self.vfs_progress_config_version < self.vfs_config_version
41             || self.vfs_progress_n_done < self.vfs_progress_n_total)
42     }
43
44     pub(crate) fn update_configuration(&mut self, config: Config) {
45         let _p = profile::span("GlobalState::update_configuration");
46         let old_config = mem::replace(&mut self.config, Arc::new(config));
47         if self.config.lru_capacity() != old_config.lru_capacity() {
48             self.analysis_host.update_lru_capacity(self.config.lru_capacity());
49         }
50         if self.config.linked_projects() != old_config.linked_projects() {
51             self.fetch_workspaces_queue.request_op()
52         } else if self.config.flycheck() != old_config.flycheck() {
53             self.reload_flycheck();
54         }
55
56         // Apply experimental feature flags.
57         self.analysis_host
58             .raw_database_mut()
59             .set_enable_proc_attr_macros(self.config.expand_proc_attr_macros());
60     }
61
62     pub(crate) fn current_status(&self) -> lsp_ext::ServerStatusParams {
63         let mut status = lsp_ext::ServerStatusParams {
64             health: lsp_ext::Health::Ok,
65             quiescent: self.is_quiescent(),
66             message: None,
67         };
68
69         if self.proc_macro_changed {
70             status.health = lsp_ext::Health::Warning;
71             status.message =
72                 Some("Reload required due to source changes of a procedural macro.".into())
73         }
74         if let Some(error) = self.fetch_build_data_error() {
75             status.health = lsp_ext::Health::Warning;
76             status.message = Some(error)
77         }
78         if !self.config.cargo_autoreload()
79             && self.is_quiescent()
80             && self.fetch_workspaces_queue.op_requested()
81         {
82             status.health = lsp_ext::Health::Warning;
83             status.message = Some("Workspace reload required".to_string())
84         }
85
86         if let Some(error) = self.fetch_workspace_error() {
87             status.health = lsp_ext::Health::Error;
88             status.message = Some(error)
89         }
90         status
91     }
92
93     pub(crate) fn fetch_workspaces(&mut self) {
94         tracing::info!("will fetch workspaces");
95
96         self.task_pool.handle.spawn_with_sender({
97             let linked_projects = self.config.linked_projects();
98             let detached_files = self.config.detached_files().to_vec();
99             let cargo_config = self.config.cargo();
100
101             move |sender| {
102                 let progress = {
103                     let sender = sender.clone();
104                     move |msg| {
105                         sender
106                             .send(Task::FetchWorkspace(ProjectWorkspaceProgress::Report(msg)))
107                             .unwrap()
108                     }
109                 };
110
111                 sender.send(Task::FetchWorkspace(ProjectWorkspaceProgress::Begin)).unwrap();
112
113                 let mut workspaces = linked_projects
114                     .iter()
115                     .map(|project| match project {
116                         LinkedProject::ProjectManifest(manifest) => {
117                             project_model::ProjectWorkspace::load(
118                                 manifest.clone(),
119                                 &cargo_config,
120                                 &progress,
121                             )
122                         }
123                         LinkedProject::InlineJsonProject(it) => {
124                             project_model::ProjectWorkspace::load_inline(
125                                 it.clone(),
126                                 cargo_config.target.as_deref(),
127                             )
128                         }
129                     })
130                     .collect::<Vec<_>>();
131
132                 if !detached_files.is_empty() {
133                     workspaces
134                         .push(project_model::ProjectWorkspace::load_detached_files(detached_files));
135                 }
136
137                 tracing::info!("did fetch workspaces {:?}", workspaces);
138                 sender
139                     .send(Task::FetchWorkspace(ProjectWorkspaceProgress::End(workspaces)))
140                     .unwrap();
141             }
142         });
143     }
144
145     pub(crate) fn fetch_build_data(&mut self) {
146         let workspaces = Arc::clone(&self.workspaces);
147         let config = self.config.cargo();
148         self.task_pool.handle.spawn_with_sender(move |sender| {
149             sender.send(Task::FetchBuildData(BuildDataProgress::Begin)).unwrap();
150
151             let progress = {
152                 let sender = sender.clone();
153                 move |msg| {
154                     sender.send(Task::FetchBuildData(BuildDataProgress::Report(msg))).unwrap()
155                 }
156             };
157             let mut res = Vec::new();
158             for ws in workspaces.iter() {
159                 res.push(ws.run_build_scripts(&config, &progress));
160             }
161             sender.send(Task::FetchBuildData(BuildDataProgress::End((workspaces, res)))).unwrap();
162         });
163     }
164
165     pub(crate) fn switch_workspaces(&mut self) {
166         let _p = profile::span("GlobalState::switch_workspaces");
167         tracing::info!("will switch workspaces");
168
169         if let Some(error_message) = self.fetch_workspace_error() {
170             tracing::error!("failed to switch workspaces: {}", error_message);
171             if !self.workspaces.is_empty() {
172                 // It only makes sense to switch to a partially broken workspace
173                 // if we don't have any workspace at all yet.
174                 return;
175             }
176         }
177
178         if let Some(error_message) = self.fetch_build_data_error() {
179             tracing::error!("failed to switch build data: {}", error_message);
180         }
181
182         let workspaces = self
183             .fetch_workspaces_queue
184             .last_op_result()
185             .iter()
186             .filter_map(|res| res.as_ref().ok().cloned())
187             .collect::<Vec<_>>();
188
189         fn eq_ignore_build_data<'a>(
190             left: &'a ProjectWorkspace,
191             right: &'a ProjectWorkspace,
192         ) -> bool {
193             let key = |p: &'a ProjectWorkspace| match p {
194                 ProjectWorkspace::Cargo {
195                     cargo,
196                     sysroot,
197                     rustc,
198                     rustc_cfg,
199                     cfg_overrides,
200
201                     build_scripts: _,
202                 } => Some((cargo, sysroot, rustc, rustc_cfg, cfg_overrides)),
203                 _ => None,
204             };
205             match (key(left), key(right)) {
206                 (Some(lk), Some(rk)) => lk == rk,
207                 _ => left == right,
208             }
209         }
210
211         let same_workspaces = workspaces.len() == self.workspaces.len()
212             && workspaces
213                 .iter()
214                 .zip(self.workspaces.iter())
215                 .all(|(l, r)| eq_ignore_build_data(l, r));
216
217         if same_workspaces {
218             let (workspaces, build_scripts) = self.fetch_build_data_queue.last_op_result();
219             if Arc::ptr_eq(workspaces, &self.workspaces) {
220                 let workspaces = workspaces
221                     .iter()
222                     .cloned()
223                     .zip(build_scripts)
224                     .map(|(mut ws, bs)| {
225                         ws.set_build_scripts(bs.as_ref().ok().cloned().unwrap_or_default());
226                         ws
227                     })
228                     .collect::<Vec<_>>();
229
230                 // Workspaces are the same, but we've updated build data.
231                 self.workspaces = Arc::new(workspaces);
232             } else {
233                 // Current build scripts do not match the version of the active
234                 // workspace, so there's nothing for us to update.
235                 return;
236             }
237         } else {
238             // Here, we completely changed the workspace (Cargo.toml edit), so
239             // we don't care about build-script results, they are stale.
240             self.workspaces = Arc::new(workspaces)
241         }
242
243         if let FilesWatcher::Client = self.config.files().watcher {
244             if self.config.did_change_watched_files_dynamic_registration() {
245                 let registration_options = lsp_types::DidChangeWatchedFilesRegistrationOptions {
246                     watchers: self
247                         .workspaces
248                         .iter()
249                         .flat_map(|ws| ws.to_roots())
250                         .filter(|it| it.is_local)
251                         .flat_map(|root| {
252                             root.include.into_iter().flat_map(|it| {
253                                 [
254                                     format!("{}/**/*.rs", it.display()),
255                                     format!("{}/**/Cargo.toml", it.display()),
256                                     format!("{}/**/Cargo.lock", it.display()),
257                                 ]
258                             })
259                         })
260                         .map(|glob_pattern| lsp_types::FileSystemWatcher {
261                             glob_pattern,
262                             kind: None,
263                         })
264                         .collect(),
265                 };
266                 let registration = lsp_types::Registration {
267                     id: "workspace/didChangeWatchedFiles".to_string(),
268                     method: "workspace/didChangeWatchedFiles".to_string(),
269                     register_options: Some(serde_json::to_value(registration_options).unwrap()),
270                 };
271                 self.send_request::<lsp_types::request::RegisterCapability>(
272                     lsp_types::RegistrationParams { registrations: vec![registration] },
273                     |_, _| (),
274                 );
275             }
276         }
277
278         let mut change = Change::new();
279
280         let files_config = self.config.files();
281         let project_folders = ProjectFolders::new(&self.workspaces, &files_config.exclude);
282
283         if self.proc_macro_client.is_none() {
284             self.proc_macro_client = match self.config.proc_macro_srv() {
285                 None => None,
286                 Some((path, args)) => match ProcMacroServer::spawn(path.clone(), args) {
287                     Ok(it) => Some(it),
288                     Err(err) => {
289                         tracing::error!(
290                             "Failed to run proc_macro_srv from path {}, error: {:?}",
291                             path.display(),
292                             err
293                         );
294                         None
295                     }
296                 },
297             };
298         }
299
300         let watch = match files_config.watcher {
301             FilesWatcher::Client => vec![],
302             FilesWatcher::Notify => project_folders.watch,
303         };
304         self.vfs_config_version += 1;
305         self.loader.handle.set_config(vfs::loader::Config {
306             load: project_folders.load,
307             watch,
308             version: self.vfs_config_version,
309         });
310
311         // Create crate graph from all the workspaces
312         let crate_graph = {
313             let proc_macro_client = self.proc_macro_client.as_ref();
314             let mut load_proc_macro =
315                 move |path: &AbsPath| load_proc_macro(proc_macro_client, path);
316
317             let vfs = &mut self.vfs.write().0;
318             let loader = &mut self.loader;
319             let mem_docs = &self.mem_docs;
320             let mut load = move |path: &AbsPath| {
321                 let _p = profile::span("GlobalState::load");
322                 let vfs_path = vfs::VfsPath::from(path.to_path_buf());
323                 if !mem_docs.contains(&vfs_path) {
324                     let contents = loader.handle.load_sync(path);
325                     vfs.set_file_contents(vfs_path.clone(), contents);
326                 }
327                 let res = vfs.file_id(&vfs_path);
328                 if res.is_none() {
329                     tracing::warn!("failed to load {}", path.display())
330                 }
331                 res
332             };
333
334             let mut crate_graph = CrateGraph::default();
335             for ws in self.workspaces.iter() {
336                 crate_graph.extend(ws.to_crate_graph(&mut load_proc_macro, &mut load));
337             }
338             crate_graph
339         };
340         change.set_crate_graph(crate_graph);
341
342         self.source_root_config = project_folders.source_root_config;
343
344         self.analysis_host.apply_change(change);
345         self.process_changes();
346         self.reload_flycheck();
347         tracing::info!("did switch workspaces");
348     }
349
350     fn fetch_workspace_error(&self) -> Option<String> {
351         let mut buf = String::new();
352
353         for ws in self.fetch_workspaces_queue.last_op_result() {
354             if let Err(err) = ws {
355                 stdx::format_to!(buf, "rust-analyzer failed to load workspace: {:#}\n", err);
356             }
357         }
358
359         if buf.is_empty() {
360             return None;
361         }
362
363         Some(buf)
364     }
365
366     fn fetch_build_data_error(&self) -> Option<String> {
367         let mut buf = "rust-analyzer failed to run build scripts:\n".to_string();
368         let mut has_errors = false;
369
370         for ws in &self.fetch_build_data_queue.last_op_result().1 {
371             match ws {
372                 Ok(data) => {
373                     if let Some(err) = data.error() {
374                         has_errors = true;
375                         stdx::format_to!(buf, "{:#}\n", err);
376                     }
377                 }
378                 Err(err) => {
379                     has_errors = true;
380                     stdx::format_to!(buf, "{:#}\n", err);
381                 }
382             }
383         }
384
385         if has_errors {
386             Some(buf)
387         } else {
388             None
389         }
390     }
391
392     fn reload_flycheck(&mut self) {
393         let _p = profile::span("GlobalState::reload_flycheck");
394         let config = match self.config.flycheck() {
395             Some(it) => it,
396             None => {
397                 self.flycheck = Vec::new();
398                 return;
399             }
400         };
401
402         let sender = self.flycheck_sender.clone();
403         self.flycheck = self
404             .workspaces
405             .iter()
406             .enumerate()
407             .filter_map(|(id, w)| match w {
408                 ProjectWorkspace::Cargo { cargo, .. } => Some((id, cargo.workspace_root())),
409                 ProjectWorkspace::Json { project, .. } => {
410                     // Enable flychecks for json projects if a custom flycheck command was supplied
411                     // in the workspace configuration.
412                     match config {
413                         FlycheckConfig::CustomCommand { .. } => Some((id, project.path())),
414                         _ => None,
415                     }
416                 }
417                 ProjectWorkspace::DetachedFiles { .. } => None,
418             })
419             .map(|(id, root)| {
420                 let sender = sender.clone();
421                 FlycheckHandle::spawn(
422                     id,
423                     Box::new(move |msg| sender.send(msg).unwrap()),
424                     config.clone(),
425                     root.to_path_buf(),
426                 )
427             })
428             .collect();
429     }
430 }
431
432 #[derive(Default)]
433 pub(crate) struct ProjectFolders {
434     pub(crate) load: Vec<vfs::loader::Entry>,
435     pub(crate) watch: Vec<usize>,
436     pub(crate) source_root_config: SourceRootConfig,
437 }
438
439 impl ProjectFolders {
440     pub(crate) fn new(
441         workspaces: &[ProjectWorkspace],
442         global_excludes: &[AbsPathBuf],
443     ) -> ProjectFolders {
444         let mut res = ProjectFolders::default();
445         let mut fsc = FileSetConfig::builder();
446         let mut local_filesets = vec![];
447
448         for root in workspaces.iter().flat_map(|ws| ws.to_roots()) {
449             let file_set_roots: Vec<VfsPath> =
450                 root.include.iter().cloned().map(VfsPath::from).collect();
451
452             let entry = {
453                 let mut dirs = vfs::loader::Directories::default();
454                 dirs.extensions.push("rs".into());
455                 dirs.include.extend(root.include);
456                 dirs.exclude.extend(root.exclude);
457                 for excl in global_excludes {
458                     if dirs
459                         .include
460                         .iter()
461                         .any(|incl| incl.starts_with(excl) || excl.starts_with(incl))
462                     {
463                         dirs.exclude.push(excl.clone());
464                     }
465                 }
466
467                 vfs::loader::Entry::Directories(dirs)
468             };
469
470             if root.is_local {
471                 res.watch.push(res.load.len());
472             }
473             res.load.push(entry);
474
475             if root.is_local {
476                 local_filesets.push(fsc.len());
477             }
478             fsc.add_file_set(file_set_roots)
479         }
480
481         let fsc = fsc.build();
482         res.source_root_config = SourceRootConfig { fsc, local_filesets };
483
484         res
485     }
486 }
487
488 #[derive(Default, Debug)]
489 pub(crate) struct SourceRootConfig {
490     pub(crate) fsc: FileSetConfig,
491     pub(crate) local_filesets: Vec<usize>,
492 }
493
494 impl SourceRootConfig {
495     pub(crate) fn partition(&self, vfs: &vfs::Vfs) -> Vec<SourceRoot> {
496         let _p = profile::span("SourceRootConfig::partition");
497         self.fsc
498             .partition(vfs)
499             .into_iter()
500             .enumerate()
501             .map(|(idx, file_set)| {
502                 let is_local = self.local_filesets.contains(&idx);
503                 if is_local {
504                     SourceRoot::new_local(file_set)
505                 } else {
506                     SourceRoot::new_library(file_set)
507                 }
508             })
509             .collect()
510     }
511 }
512
513 pub(crate) fn load_proc_macro(client: Option<&ProcMacroServer>, path: &AbsPath) -> Vec<ProcMacro> {
514     let dylib = match MacroDylib::new(path.to_path_buf()) {
515         Ok(it) => it,
516         Err(err) => {
517             // FIXME: that's not really right -- we store this error in a
518             // persistent status.
519             tracing::warn!("failed to load proc macro: {}", err);
520             return Vec::new();
521         }
522     };
523
524     return client
525         .map(|it| it.load_dylib(dylib))
526         .into_iter()
527         .flat_map(|it| match it {
528             Ok(Ok(macros)) => macros,
529             Err(err) => {
530                 tracing::error!("proc macro server crashed: {}", err);
531                 Vec::new()
532             }
533             Ok(Err(err)) => {
534                 // FIXME: that's not really right -- we store this error in a
535                 // persistent status.
536                 tracing::warn!("failed to load proc macro: {}", err);
537                 Vec::new()
538             }
539         })
540         .map(expander_to_proc_macro)
541         .collect();
542
543     fn expander_to_proc_macro(expander: proc_macro_api::ProcMacro) -> ProcMacro {
544         let name = expander.name().into();
545         let kind = match expander.kind() {
546             proc_macro_api::ProcMacroKind::CustomDerive => ProcMacroKind::CustomDerive,
547             proc_macro_api::ProcMacroKind::FuncLike => ProcMacroKind::FuncLike,
548             proc_macro_api::ProcMacroKind::Attr => ProcMacroKind::Attr,
549         };
550         let expander = Arc::new(Expander(expander));
551         ProcMacro { name, kind, expander }
552     }
553
554     #[derive(Debug)]
555     struct Expander(proc_macro_api::ProcMacro);
556
557     impl ProcMacroExpander for Expander {
558         fn expand(
559             &self,
560             subtree: &tt::Subtree,
561             attrs: Option<&tt::Subtree>,
562             env: &Env,
563         ) -> Result<tt::Subtree, ProcMacroExpansionError> {
564             let env = env.iter().map(|(k, v)| (k.to_string(), v.to_string())).collect();
565             match self.0.expand(subtree, attrs, env) {
566                 Ok(Ok(subtree)) => Ok(subtree),
567                 Ok(Err(err)) => Err(ProcMacroExpansionError::Panic(err.0)),
568                 Err(err) => Err(ProcMacroExpansionError::System(err.to_string())),
569             }
570         }
571     }
572 }
573
574 pub(crate) fn should_refresh_for_change(path: &AbsPath, change_kind: ChangeKind) -> bool {
575     const IMPLICIT_TARGET_FILES: &[&str] = &["build.rs", "src/main.rs", "src/lib.rs"];
576     const IMPLICIT_TARGET_DIRS: &[&str] = &["src/bin", "examples", "tests", "benches"];
577     let file_name = path.file_name().unwrap_or_default();
578
579     if file_name == "Cargo.toml" || file_name == "Cargo.lock" {
580         return true;
581     }
582     if change_kind == ChangeKind::Modify {
583         return false;
584     }
585     if path.extension().unwrap_or_default() != "rs" {
586         return false;
587     }
588     if IMPLICIT_TARGET_FILES.iter().any(|it| path.as_ref().ends_with(it)) {
589         return true;
590     }
591     let parent = match path.parent() {
592         Some(it) => it,
593         None => return false,
594     };
595     if IMPLICIT_TARGET_DIRS.iter().any(|it| parent.as_ref().ends_with(it)) {
596         return true;
597     }
598     if file_name == "main.rs" {
599         let grand_parent = match parent.parent() {
600             Some(it) => it,
601             None => return false,
602         };
603         if IMPLICIT_TARGET_DIRS.iter().any(|it| grand_parent.as_ref().ends_with(it)) {
604             return true;
605         }
606     }
607     false
608 }