]> git.lizzy.rs Git - rust.git/blob - crates/rust-analyzer/src/reload.rs
Merge #9979
[rust.git] / crates / rust-analyzer / src / reload.rs
1 //! Project loading & configuration updates
2 use std::{mem, sync::Arc};
3
4 use flycheck::{FlycheckConfig, FlycheckHandle};
5 use hir::db::DefDatabase;
6 use ide::Change;
7 use ide_db::base_db::{
8     CrateGraph, Env, ProcMacro, ProcMacroExpander, ProcMacroKind, SourceRoot, VfsPath,
9 };
10 use proc_macro_api::ProcMacroClient;
11 use project_model::{ProjectWorkspace, WorkspaceBuildScripts};
12 use vfs::{file_set::FileSetConfig, AbsPath, AbsPathBuf, ChangeKind};
13
14 use crate::{
15     config::{Config, FilesWatcher, LinkedProject},
16     global_state::GlobalState,
17     lsp_ext,
18     main_loop::Task,
19 };
20
21 #[derive(Debug)]
22 pub(crate) enum ProjectWorkspaceProgress {
23     Begin,
24     Report(String),
25     End(Vec<anyhow::Result<ProjectWorkspace>>),
26 }
27
28 #[derive(Debug)]
29 pub(crate) enum BuildDataProgress {
30     Begin,
31     Report(String),
32     End((Arc<Vec<ProjectWorkspace>>, Vec<anyhow::Result<WorkspaceBuildScripts>>)),
33 }
34
35 impl GlobalState {
36     pub(crate) fn is_quiescent(&self) -> bool {
37         !(self.fetch_workspaces_queue.op_in_progress()
38             || self.fetch_build_data_queue.op_in_progress()
39             || self.vfs_progress_config_version < self.vfs_config_version
40             || self.vfs_progress_n_done < self.vfs_progress_n_total)
41     }
42
43     pub(crate) fn update_configuration(&mut self, config: Config) {
44         let _p = profile::span("GlobalState::update_configuration");
45         let old_config = mem::replace(&mut self.config, Arc::new(config));
46         if self.config.lru_capacity() != old_config.lru_capacity() {
47             self.analysis_host.update_lru_capacity(self.config.lru_capacity());
48         }
49         if self.config.linked_projects() != old_config.linked_projects() {
50             self.fetch_workspaces_request()
51         } else if self.config.flycheck() != old_config.flycheck() {
52             self.reload_flycheck();
53         }
54
55         // Apply experimental feature flags.
56         self.analysis_host
57             .raw_database_mut()
58             .set_enable_proc_attr_macros(self.config.expand_proc_attr_macros());
59     }
60     pub(crate) fn maybe_refresh(&mut self, changes: &[(AbsPathBuf, ChangeKind)]) {
61         if !changes.iter().any(|(path, kind)| is_interesting(path, *kind)) {
62             return;
63         }
64         log::info!(
65             "Requesting workspace reload because of the following changes: {}",
66             itertools::join(
67                 changes
68                     .iter()
69                     .filter(|(path, kind)| is_interesting(path, *kind))
70                     .map(|(path, kind)| format!("{}: {:?}", path.display(), kind)),
71                 ", "
72             )
73         );
74         self.fetch_workspaces_request();
75
76         fn is_interesting(path: &AbsPath, change_kind: ChangeKind) -> bool {
77             const IMPLICIT_TARGET_FILES: &[&str] = &["build.rs", "src/main.rs", "src/lib.rs"];
78             const IMPLICIT_TARGET_DIRS: &[&str] = &["src/bin", "examples", "tests", "benches"];
79             let file_name = path.file_name().unwrap_or_default();
80
81             if file_name == "Cargo.toml" || file_name == "Cargo.lock" {
82                 return true;
83             }
84             if change_kind == ChangeKind::Modify {
85                 return false;
86             }
87             if path.extension().unwrap_or_default() != "rs" {
88                 return false;
89             }
90             if IMPLICIT_TARGET_FILES.iter().any(|it| path.as_ref().ends_with(it)) {
91                 return true;
92             }
93             let parent = match path.parent() {
94                 Some(it) => it,
95                 None => return false,
96             };
97             if IMPLICIT_TARGET_DIRS.iter().any(|it| parent.as_ref().ends_with(it)) {
98                 return true;
99             }
100             if file_name == "main.rs" {
101                 let grand_parent = match parent.parent() {
102                     Some(it) => it,
103                     None => return false,
104                 };
105                 if IMPLICIT_TARGET_DIRS.iter().any(|it| grand_parent.as_ref().ends_with(it)) {
106                     return true;
107                 }
108             }
109             false
110         }
111     }
112     pub(crate) fn report_new_status_if_needed(&mut self) {
113         let mut status = lsp_ext::ServerStatusParams {
114             health: lsp_ext::Health::Ok,
115             quiescent: self.is_quiescent(),
116             message: None,
117         };
118
119         if let Some(error) = self.fetch_build_data_error() {
120             status.health = lsp_ext::Health::Warning;
121             status.message = Some(error)
122         }
123         if !self.config.cargo_autoreload()
124             && self.is_quiescent()
125             && self.fetch_workspaces_queue.op_requested()
126         {
127             status.health = lsp_ext::Health::Warning;
128             status.message = Some("Workspace reload required".to_string())
129         }
130
131         if let Some(error) = self.fetch_workspace_error() {
132             status.health = lsp_ext::Health::Error;
133             status.message = Some(error)
134         }
135
136         if self.last_reported_status.as_ref() != Some(&status) {
137             self.last_reported_status = Some(status.clone());
138
139             if let (lsp_ext::Health::Error, Some(message)) = (status.health, &status.message) {
140                 self.show_message(lsp_types::MessageType::Error, message.clone());
141             }
142
143             if self.config.server_status_notification() {
144                 self.send_notification::<lsp_ext::ServerStatusNotification>(status);
145             }
146         }
147     }
148
149     pub(crate) fn fetch_workspaces_request(&mut self) {
150         self.fetch_workspaces_queue.request_op()
151     }
152     pub(crate) fn fetch_workspaces_if_needed(&mut self) {
153         if !self.fetch_workspaces_queue.should_start_op() {
154             return;
155         }
156         log::info!("will fetch workspaces");
157
158         self.task_pool.handle.spawn_with_sender({
159             let linked_projects = self.config.linked_projects();
160             let detached_files = self.config.detached_files().to_vec();
161             let cargo_config = self.config.cargo();
162
163             move |sender| {
164                 let progress = {
165                     let sender = sender.clone();
166                     move |msg| {
167                         sender
168                             .send(Task::FetchWorkspace(ProjectWorkspaceProgress::Report(msg)))
169                             .unwrap()
170                     }
171                 };
172
173                 sender.send(Task::FetchWorkspace(ProjectWorkspaceProgress::Begin)).unwrap();
174
175                 let mut workspaces = linked_projects
176                     .iter()
177                     .map(|project| match project {
178                         LinkedProject::ProjectManifest(manifest) => {
179                             project_model::ProjectWorkspace::load(
180                                 manifest.clone(),
181                                 &cargo_config,
182                                 &progress,
183                             )
184                         }
185                         LinkedProject::InlineJsonProject(it) => {
186                             project_model::ProjectWorkspace::load_inline(
187                                 it.clone(),
188                                 cargo_config.target.as_deref(),
189                             )
190                         }
191                     })
192                     .collect::<Vec<_>>();
193
194                 if !detached_files.is_empty() {
195                     workspaces
196                         .push(project_model::ProjectWorkspace::load_detached_files(detached_files));
197                 }
198
199                 log::info!("did fetch workspaces {:?}", workspaces);
200                 sender
201                     .send(Task::FetchWorkspace(ProjectWorkspaceProgress::End(workspaces)))
202                     .unwrap();
203             }
204         });
205     }
206     pub(crate) fn fetch_workspaces_completed(
207         &mut self,
208         workspaces: Vec<anyhow::Result<ProjectWorkspace>>,
209     ) {
210         self.fetch_workspaces_queue.op_completed(workspaces)
211     }
212
213     pub(crate) fn fetch_build_data_request(&mut self) {
214         self.fetch_build_data_queue.request_op();
215     }
216     pub(crate) fn fetch_build_data_if_needed(&mut self) {
217         if !self.fetch_build_data_queue.should_start_op() {
218             return;
219         }
220
221         let workspaces = Arc::clone(&self.workspaces);
222         let config = self.config.cargo();
223         self.task_pool.handle.spawn_with_sender(move |sender| {
224             sender.send(Task::FetchBuildData(BuildDataProgress::Begin)).unwrap();
225
226             let progress = {
227                 let sender = sender.clone();
228                 move |msg| {
229                     sender.send(Task::FetchBuildData(BuildDataProgress::Report(msg))).unwrap()
230                 }
231             };
232             let mut res = Vec::new();
233             for ws in workspaces.iter() {
234                 res.push(ws.run_build_scripts(&config, &progress));
235                 let ws = match ws {
236                     ProjectWorkspace::Cargo { cargo, .. } => cargo,
237                     ProjectWorkspace::DetachedFiles { .. } | ProjectWorkspace::Json { .. } => {
238                         res.push(Ok(WorkspaceBuildScripts::default()));
239                         continue;
240                     }
241                 };
242                 res.push(WorkspaceBuildScripts::run(&config, ws, &progress))
243             }
244             sender.send(Task::FetchBuildData(BuildDataProgress::End((workspaces, res)))).unwrap();
245         });
246     }
247     pub(crate) fn fetch_build_data_completed(
248         &mut self,
249         build_data: (Arc<Vec<ProjectWorkspace>>, Vec<anyhow::Result<WorkspaceBuildScripts>>),
250     ) {
251         self.fetch_build_data_queue.op_completed(build_data)
252     }
253
254     pub(crate) fn switch_workspaces(&mut self) {
255         let _p = profile::span("GlobalState::switch_workspaces");
256         log::info!("will switch workspaces");
257
258         if let Some(error_message) = self.fetch_workspace_error() {
259             log::error!("failed to switch workspaces: {}", error_message);
260             if !self.workspaces.is_empty() {
261                 // It only makes sense to switch to a partially broken workspace
262                 // if we don't have any workspace at all yet.
263                 return;
264             }
265         }
266
267         if let Some(error_message) = self.fetch_build_data_error() {
268             log::error!("failed to switch build data: {}", error_message);
269         }
270
271         let workspaces = self
272             .fetch_workspaces_queue
273             .last_op_result()
274             .iter()
275             .filter_map(|res| res.as_ref().ok().cloned())
276             .collect::<Vec<_>>();
277
278         fn eq_ignore_build_data<'a>(
279             left: &'a ProjectWorkspace,
280             right: &'a ProjectWorkspace,
281         ) -> bool {
282             let key = |p: &'a ProjectWorkspace| match p {
283                 ProjectWorkspace::Cargo {
284                     cargo,
285                     sysroot,
286                     rustc,
287                     rustc_cfg,
288                     cfg_overrides,
289
290                     build_scripts: _,
291                 } => Some((cargo, sysroot, rustc, rustc_cfg, cfg_overrides)),
292                 _ => None,
293             };
294             match (key(left), key(right)) {
295                 (Some(lk), Some(rk)) => lk == rk,
296                 _ => left == right,
297             }
298         }
299
300         let same_workspaces = workspaces.len() == self.workspaces.len()
301             && workspaces
302                 .iter()
303                 .zip(self.workspaces.iter())
304                 .all(|(l, r)| eq_ignore_build_data(l, r));
305
306         if same_workspaces {
307             let (workspaces, build_scripts) = self.fetch_build_data_queue.last_op_result();
308             if Arc::ptr_eq(&workspaces, &self.workspaces) {
309                 let workspaces = workspaces
310                     .iter()
311                     .cloned()
312                     .zip(build_scripts)
313                     .map(|(mut ws, bs)| {
314                         ws.set_build_scripts(bs.as_ref().ok().cloned().unwrap_or_default());
315                         ws
316                     })
317                     .collect::<Vec<_>>();
318
319                 // Workspaces are the same, but we've updated build data.
320                 self.workspaces = Arc::new(workspaces);
321             } else {
322                 // Current build scripts do not match the version of the active
323                 // workspace, so there's nothing for us to update.
324                 return;
325             }
326         } else {
327             // Here, we completely changed the workspace (Cargo.toml edit), so
328             // we don't care about build-script results, they are stale.
329             self.workspaces = Arc::new(workspaces)
330         }
331
332         if let FilesWatcher::Client = self.config.files().watcher {
333             if self.config.did_change_watched_files_dynamic_registration() {
334                 let registration_options = lsp_types::DidChangeWatchedFilesRegistrationOptions {
335                     watchers: self
336                         .workspaces
337                         .iter()
338                         .flat_map(|ws| ws.to_roots())
339                         .filter(|it| it.is_member)
340                         .flat_map(|root| {
341                             root.include.into_iter().flat_map(|it| {
342                                 [
343                                     format!("{}/**/*.rs", it.display()),
344                                     format!("{}/**/Cargo.toml", it.display()),
345                                     format!("{}/**/Cargo.lock", it.display()),
346                                 ]
347                             })
348                         })
349                         .map(|glob_pattern| lsp_types::FileSystemWatcher {
350                             glob_pattern,
351                             kind: None,
352                         })
353                         .collect(),
354                 };
355                 let registration = lsp_types::Registration {
356                     id: "workspace/didChangeWatchedFiles".to_string(),
357                     method: "workspace/didChangeWatchedFiles".to_string(),
358                     register_options: Some(serde_json::to_value(registration_options).unwrap()),
359                 };
360                 self.send_request::<lsp_types::request::RegisterCapability>(
361                     lsp_types::RegistrationParams { registrations: vec![registration] },
362                     |_, _| (),
363                 );
364             }
365         }
366
367         let mut change = Change::new();
368
369         let files_config = self.config.files();
370         let project_folders = ProjectFolders::new(&self.workspaces, &files_config.exclude);
371
372         if self.proc_macro_client.is_none() {
373             self.proc_macro_client = match self.config.proc_macro_srv() {
374                 None => None,
375                 Some((path, args)) => match ProcMacroClient::extern_process(path.clone(), args) {
376                     Ok(it) => Some(it),
377                     Err(err) => {
378                         log::error!(
379                             "Failed to run proc_macro_srv from path {}, error: {:?}",
380                             path.display(),
381                             err
382                         );
383                         None
384                     }
385                 },
386             };
387         }
388
389         let watch = match files_config.watcher {
390             FilesWatcher::Client => vec![],
391             FilesWatcher::Notify => project_folders.watch,
392         };
393         self.vfs_config_version += 1;
394         self.loader.handle.set_config(vfs::loader::Config {
395             load: project_folders.load,
396             watch,
397             version: self.vfs_config_version,
398         });
399
400         // Create crate graph from all the workspaces
401         let crate_graph = {
402             let proc_macro_client = self.proc_macro_client.as_ref();
403             let mut load_proc_macro =
404                 move |path: &AbsPath| load_proc_macro(proc_macro_client, path);
405
406             let vfs = &mut self.vfs.write().0;
407             let loader = &mut self.loader;
408             let mem_docs = &self.mem_docs;
409             let mut load = move |path: &AbsPath| {
410                 let _p = profile::span("GlobalState::load");
411                 let vfs_path = vfs::VfsPath::from(path.to_path_buf());
412                 if !mem_docs.contains(&vfs_path) {
413                     let contents = loader.handle.load_sync(path);
414                     vfs.set_file_contents(vfs_path.clone(), contents);
415                 }
416                 let res = vfs.file_id(&vfs_path);
417                 if res.is_none() {
418                     log::warn!("failed to load {}", path.display())
419                 }
420                 res
421             };
422
423             let mut crate_graph = CrateGraph::default();
424             for ws in self.workspaces.iter() {
425                 crate_graph.extend(ws.to_crate_graph(&mut load_proc_macro, &mut load));
426             }
427             crate_graph
428         };
429         change.set_crate_graph(crate_graph);
430
431         self.source_root_config = project_folders.source_root_config;
432
433         self.analysis_host.apply_change(change);
434         self.process_changes();
435         self.reload_flycheck();
436         log::info!("did switch workspaces");
437     }
438
439     fn fetch_workspace_error(&self) -> Option<String> {
440         let mut buf = String::new();
441
442         for ws in self.fetch_workspaces_queue.last_op_result() {
443             if let Err(err) = ws {
444                 stdx::format_to!(buf, "rust-analyzer failed to load workspace: {:#}\n", err);
445             }
446         }
447
448         if buf.is_empty() {
449             return None;
450         }
451
452         Some(buf)
453     }
454
455     fn fetch_build_data_error(&self) -> Option<String> {
456         let mut buf = String::new();
457
458         for ws in &self.fetch_build_data_queue.last_op_result().1 {
459             if let Err(err) = ws {
460                 stdx::format_to!(buf, "rust-analyzer failed to run custom build: {:#}\n", err);
461             }
462         }
463
464         if buf.is_empty() {
465             return None;
466         }
467
468         Some(buf)
469     }
470
471     fn reload_flycheck(&mut self) {
472         let _p = profile::span("GlobalState::reload_flycheck");
473         let config = match self.config.flycheck() {
474             Some(it) => it,
475             None => {
476                 self.flycheck = Vec::new();
477                 return;
478             }
479         };
480
481         let sender = self.flycheck_sender.clone();
482         self.flycheck = self
483             .workspaces
484             .iter()
485             .enumerate()
486             .filter_map(|(id, w)| match w {
487                 ProjectWorkspace::Cargo { cargo, .. } => Some((id, cargo.workspace_root())),
488                 ProjectWorkspace::Json { project, .. } => {
489                     // Enable flychecks for json projects if a custom flycheck command was supplied
490                     // in the workspace configuration.
491                     match config {
492                         FlycheckConfig::CustomCommand { .. } => Some((id, project.path())),
493                         _ => None,
494                     }
495                 }
496                 ProjectWorkspace::DetachedFiles { .. } => None,
497             })
498             .map(|(id, root)| {
499                 let sender = sender.clone();
500                 FlycheckHandle::spawn(
501                     id,
502                     Box::new(move |msg| sender.send(msg).unwrap()),
503                     config.clone(),
504                     root.to_path_buf().into(),
505                 )
506             })
507             .collect();
508     }
509 }
510
511 #[derive(Default)]
512 pub(crate) struct ProjectFolders {
513     pub(crate) load: Vec<vfs::loader::Entry>,
514     pub(crate) watch: Vec<usize>,
515     pub(crate) source_root_config: SourceRootConfig,
516 }
517
518 impl ProjectFolders {
519     pub(crate) fn new(
520         workspaces: &[ProjectWorkspace],
521         global_excludes: &[AbsPathBuf],
522     ) -> ProjectFolders {
523         let mut res = ProjectFolders::default();
524         let mut fsc = FileSetConfig::builder();
525         let mut local_filesets = vec![];
526
527         for root in workspaces.iter().flat_map(|ws| ws.to_roots()) {
528             let file_set_roots: Vec<VfsPath> =
529                 root.include.iter().cloned().map(VfsPath::from).collect();
530
531             let entry = {
532                 let mut dirs = vfs::loader::Directories::default();
533                 dirs.extensions.push("rs".into());
534                 dirs.include.extend(root.include);
535                 dirs.exclude.extend(root.exclude);
536                 for excl in global_excludes {
537                     if dirs
538                         .include
539                         .iter()
540                         .any(|incl| incl.starts_with(excl) || excl.starts_with(incl))
541                     {
542                         dirs.exclude.push(excl.clone());
543                     }
544                 }
545
546                 vfs::loader::Entry::Directories(dirs)
547             };
548
549             if root.is_member {
550                 res.watch.push(res.load.len());
551             }
552             res.load.push(entry);
553
554             if root.is_member {
555                 local_filesets.push(fsc.len());
556             }
557             fsc.add_file_set(file_set_roots)
558         }
559
560         let fsc = fsc.build();
561         res.source_root_config = SourceRootConfig { fsc, local_filesets };
562
563         res
564     }
565 }
566
567 #[derive(Default, Debug)]
568 pub(crate) struct SourceRootConfig {
569     pub(crate) fsc: FileSetConfig,
570     pub(crate) local_filesets: Vec<usize>,
571 }
572
573 impl SourceRootConfig {
574     pub(crate) fn partition(&self, vfs: &vfs::Vfs) -> Vec<SourceRoot> {
575         let _p = profile::span("SourceRootConfig::partition");
576         self.fsc
577             .partition(vfs)
578             .into_iter()
579             .enumerate()
580             .map(|(idx, file_set)| {
581                 let is_local = self.local_filesets.contains(&idx);
582                 if is_local {
583                     SourceRoot::new_local(file_set)
584                 } else {
585                     SourceRoot::new_library(file_set)
586                 }
587             })
588             .collect()
589     }
590 }
591
592 pub(crate) fn load_proc_macro(client: Option<&ProcMacroClient>, path: &AbsPath) -> Vec<ProcMacro> {
593     return client
594         .map(|it| it.by_dylib_path(path))
595         .unwrap_or_default()
596         .into_iter()
597         .map(expander_to_proc_macro)
598         .collect();
599
600     fn expander_to_proc_macro(expander: proc_macro_api::ProcMacroProcessExpander) -> ProcMacro {
601         let name = expander.name().into();
602         let kind = match expander.kind() {
603             proc_macro_api::ProcMacroKind::CustomDerive => ProcMacroKind::CustomDerive,
604             proc_macro_api::ProcMacroKind::FuncLike => ProcMacroKind::FuncLike,
605             proc_macro_api::ProcMacroKind::Attr => ProcMacroKind::Attr,
606         };
607         let expander = Arc::new(Expander(expander));
608         ProcMacro { name, kind, expander }
609     }
610
611     #[derive(Debug)]
612     struct Expander(proc_macro_api::ProcMacroProcessExpander);
613
614     impl ProcMacroExpander for Expander {
615         fn expand(
616             &self,
617             subtree: &tt::Subtree,
618             attrs: Option<&tt::Subtree>,
619             env: &Env,
620         ) -> Result<tt::Subtree, tt::ExpansionError> {
621             let env = env.iter().map(|(k, v)| (k.to_string(), v.to_string())).collect();
622             self.0.expand(subtree, attrs, env)
623         }
624     }
625 }