]> git.lizzy.rs Git - rust.git/blob - src/tools/rust-analyzer/crates/rust-analyzer/src/reload.rs
Rollup merge of #89891 - ojeda:modular-alloc, r=Mark-Simulacrum
[rust.git] / src / tools / rust-analyzer / crates / rust-analyzer / src / reload.rs
1 //! Project loading & configuration updates.
2 //!
3 //! This is quite tricky. The main problem is time and changes -- there's no
4 //! fixed "project" rust-analyzer is working with, "current project" is itself
5 //! mutable state. For example, when the user edits `Cargo.toml` by adding a new
6 //! dependency, project model changes. What's more, switching project model is
7 //! not instantaneous -- it takes time to run `cargo metadata` and (for proc
8 //! macros) `cargo check`.
9 //!
10 //! The main guiding principle here is, as elsewhere in rust-analyzer,
11 //! robustness. We try not to assume that the project model exists or is
12 //! correct. Instead, we try to provide a best-effort service. Even if the
13 //! project is currently loading and we don't have a full project model, we
14 //! still want to respond to various  requests.
15 use std::{mem, sync::Arc};
16
17 use flycheck::{FlycheckConfig, FlycheckHandle};
18 use hir::db::DefDatabase;
19 use ide::Change;
20 use ide_db::base_db::{
21     CrateGraph, Env, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind,
22     ProcMacroLoadResult, SourceRoot, VfsPath,
23 };
24 use proc_macro_api::{MacroDylib, ProcMacroServer};
25 use project_model::{ProjectWorkspace, WorkspaceBuildScripts};
26 use syntax::SmolStr;
27 use vfs::{file_set::FileSetConfig, AbsPath, AbsPathBuf, ChangeKind};
28
29 use crate::{
30     config::{Config, FilesWatcher, LinkedProject},
31     global_state::GlobalState,
32     lsp_ext,
33     main_loop::Task,
34     op_queue::Cause,
35 };
36
37 #[derive(Debug)]
38 pub(crate) enum ProjectWorkspaceProgress {
39     Begin,
40     Report(String),
41     End(Vec<anyhow::Result<ProjectWorkspace>>),
42 }
43
44 #[derive(Debug)]
45 pub(crate) enum BuildDataProgress {
46     Begin,
47     Report(String),
48     End((Arc<Vec<ProjectWorkspace>>, Vec<anyhow::Result<WorkspaceBuildScripts>>)),
49 }
50
51 impl GlobalState {
52     pub(crate) fn is_quiescent(&self) -> bool {
53         !(self.fetch_workspaces_queue.op_in_progress()
54             || self.fetch_build_data_queue.op_in_progress()
55             || self.vfs_progress_config_version < self.vfs_config_version
56             || self.vfs_progress_n_done < self.vfs_progress_n_total)
57     }
58
59     pub(crate) fn update_configuration(&mut self, config: Config) {
60         let _p = profile::span("GlobalState::update_configuration");
61         let old_config = mem::replace(&mut self.config, Arc::new(config));
62         if self.config.lru_capacity() != old_config.lru_capacity() {
63             self.analysis_host.update_lru_capacity(self.config.lru_capacity());
64         }
65         if self.config.linked_projects() != old_config.linked_projects() {
66             self.fetch_workspaces_queue.request_op("linked projects changed".to_string())
67         } else if self.config.flycheck() != old_config.flycheck() {
68             self.reload_flycheck();
69         }
70
71         if self.analysis_host.raw_database().enable_proc_attr_macros()
72             != self.config.expand_proc_attr_macros()
73         {
74             self.analysis_host
75                 .raw_database_mut()
76                 .set_enable_proc_attr_macros(self.config.expand_proc_attr_macros());
77         }
78     }
79
80     pub(crate) fn current_status(&self) -> lsp_ext::ServerStatusParams {
81         let mut status = lsp_ext::ServerStatusParams {
82             health: lsp_ext::Health::Ok,
83             quiescent: self.is_quiescent(),
84             message: None,
85         };
86
87         if self.proc_macro_changed {
88             status.health = lsp_ext::Health::Warning;
89             status.message =
90                 Some("Reload required due to source changes of a procedural macro.".into())
91         }
92         if let Err(_) = self.fetch_build_data_error() {
93             status.health = lsp_ext::Health::Warning;
94             status.message =
95                 Some("Failed to run build scripts of some packages, check the logs.".to_string());
96         }
97         if !self.config.cargo_autoreload()
98             && self.is_quiescent()
99             && self.fetch_workspaces_queue.op_requested()
100         {
101             status.health = lsp_ext::Health::Warning;
102             status.message = Some("Workspace reload required".to_string())
103         }
104
105         if let Err(error) = self.fetch_workspace_error() {
106             status.health = lsp_ext::Health::Error;
107             status.message = Some(error)
108         }
109         status
110     }
111
112     pub(crate) fn fetch_workspaces(&mut self, cause: Cause) {
113         tracing::info!(%cause, "will fetch workspaces");
114
115         self.task_pool.handle.spawn_with_sender({
116             let linked_projects = self.config.linked_projects();
117             let detached_files = self.config.detached_files().to_vec();
118             let cargo_config = self.config.cargo();
119
120             move |sender| {
121                 let progress = {
122                     let sender = sender.clone();
123                     move |msg| {
124                         sender
125                             .send(Task::FetchWorkspace(ProjectWorkspaceProgress::Report(msg)))
126                             .unwrap()
127                     }
128                 };
129
130                 sender.send(Task::FetchWorkspace(ProjectWorkspaceProgress::Begin)).unwrap();
131
132                 let mut workspaces = linked_projects
133                     .iter()
134                     .map(|project| match project {
135                         LinkedProject::ProjectManifest(manifest) => {
136                             project_model::ProjectWorkspace::load(
137                                 manifest.clone(),
138                                 &cargo_config,
139                                 &progress,
140                             )
141                         }
142                         LinkedProject::InlineJsonProject(it) => {
143                             project_model::ProjectWorkspace::load_inline(
144                                 it.clone(),
145                                 cargo_config.target.as_deref(),
146                                 &cargo_config,
147                             )
148                         }
149                     })
150                     .collect::<Vec<_>>();
151
152                 if !detached_files.is_empty() {
153                     workspaces
154                         .push(project_model::ProjectWorkspace::load_detached_files(detached_files));
155                 }
156
157                 tracing::info!("did fetch workspaces {:?}", workspaces);
158                 sender
159                     .send(Task::FetchWorkspace(ProjectWorkspaceProgress::End(workspaces)))
160                     .unwrap();
161             }
162         });
163     }
164
165     pub(crate) fn fetch_build_data(&mut self, cause: Cause) {
166         tracing::info!(%cause, "will fetch build data");
167         let workspaces = Arc::clone(&self.workspaces);
168         let config = self.config.cargo();
169         self.task_pool.handle.spawn_with_sender(move |sender| {
170             sender.send(Task::FetchBuildData(BuildDataProgress::Begin)).unwrap();
171
172             let progress = {
173                 let sender = sender.clone();
174                 move |msg| {
175                     sender.send(Task::FetchBuildData(BuildDataProgress::Report(msg))).unwrap()
176                 }
177             };
178             let mut res = Vec::new();
179             for ws in workspaces.iter() {
180                 res.push(ws.run_build_scripts(&config, &progress));
181             }
182             sender.send(Task::FetchBuildData(BuildDataProgress::End((workspaces, res)))).unwrap();
183         });
184     }
185
186     pub(crate) fn switch_workspaces(&mut self, cause: Cause) {
187         let _p = profile::span("GlobalState::switch_workspaces");
188         tracing::info!(%cause, "will switch workspaces");
189
190         if let Err(error_message) = self.fetch_workspace_error() {
191             self.show_and_log_error(error_message, None);
192             if !self.workspaces.is_empty() {
193                 // It only makes sense to switch to a partially broken workspace
194                 // if we don't have any workspace at all yet.
195                 return;
196             }
197         }
198
199         if let Err(error) = self.fetch_build_data_error() {
200             self.show_and_log_error("failed to run build scripts".to_string(), Some(error));
201         }
202
203         let workspaces = self
204             .fetch_workspaces_queue
205             .last_op_result()
206             .iter()
207             .filter_map(|res| res.as_ref().ok().cloned())
208             .collect::<Vec<_>>();
209
210         fn eq_ignore_build_data<'a>(
211             left: &'a ProjectWorkspace,
212             right: &'a ProjectWorkspace,
213         ) -> bool {
214             let key = |p: &'a ProjectWorkspace| match p {
215                 ProjectWorkspace::Cargo {
216                     cargo,
217                     sysroot,
218                     rustc,
219                     rustc_cfg,
220                     cfg_overrides,
221
222                     build_scripts: _,
223                     toolchain: _,
224                 } => Some((cargo, sysroot, rustc, rustc_cfg, cfg_overrides)),
225                 _ => None,
226             };
227             match (key(left), key(right)) {
228                 (Some(lk), Some(rk)) => lk == rk,
229                 _ => left == right,
230             }
231         }
232
233         let same_workspaces = workspaces.len() == self.workspaces.len()
234             && workspaces
235                 .iter()
236                 .zip(self.workspaces.iter())
237                 .all(|(l, r)| eq_ignore_build_data(l, r));
238
239         if same_workspaces {
240             let (workspaces, build_scripts) = self.fetch_build_data_queue.last_op_result();
241             if Arc::ptr_eq(workspaces, &self.workspaces) {
242                 tracing::debug!("set build scripts to workspaces");
243
244                 let workspaces = workspaces
245                     .iter()
246                     .cloned()
247                     .zip(build_scripts)
248                     .map(|(mut ws, bs)| {
249                         ws.set_build_scripts(bs.as_ref().ok().cloned().unwrap_or_default());
250                         ws
251                     })
252                     .collect::<Vec<_>>();
253
254                 // Workspaces are the same, but we've updated build data.
255                 self.workspaces = Arc::new(workspaces);
256             } else {
257                 tracing::info!("build scripts do not match the version of the active workspace");
258                 // Current build scripts do not match the version of the active
259                 // workspace, so there's nothing for us to update.
260                 return;
261             }
262         } else {
263             tracing::debug!("abandon build scripts for workspaces");
264
265             // Here, we completely changed the workspace (Cargo.toml edit), so
266             // we don't care about build-script results, they are stale.
267             self.workspaces = Arc::new(workspaces)
268         }
269
270         if let FilesWatcher::Client = self.config.files().watcher {
271             let registration_options = lsp_types::DidChangeWatchedFilesRegistrationOptions {
272                 watchers: self
273                     .workspaces
274                     .iter()
275                     .flat_map(|ws| ws.to_roots())
276                     .filter(|it| it.is_local)
277                     .flat_map(|root| {
278                         root.include.into_iter().flat_map(|it| {
279                             [
280                                 format!("{}/**/*.rs", it.display()),
281                                 format!("{}/**/Cargo.toml", it.display()),
282                                 format!("{}/**/Cargo.lock", it.display()),
283                             ]
284                         })
285                     })
286                     .map(|glob_pattern| lsp_types::FileSystemWatcher { glob_pattern, kind: None })
287                     .collect(),
288             };
289             let registration = lsp_types::Registration {
290                 id: "workspace/didChangeWatchedFiles".to_string(),
291                 method: "workspace/didChangeWatchedFiles".to_string(),
292                 register_options: Some(serde_json::to_value(registration_options).unwrap()),
293             };
294             self.send_request::<lsp_types::request::RegisterCapability>(
295                 lsp_types::RegistrationParams { registrations: vec![registration] },
296                 |_, _| (),
297             );
298         }
299
300         let mut change = Change::new();
301
302         let files_config = self.config.files();
303         let project_folders = ProjectFolders::new(&self.workspaces, &files_config.exclude);
304
305         let standalone_server_name =
306             format!("rust-analyzer-proc-macro-srv{}", std::env::consts::EXE_SUFFIX);
307
308         if self.proc_macro_clients.is_empty() {
309             if let Some((path, args)) = self.config.proc_macro_srv() {
310                 tracing::info!("Spawning proc-macro servers");
311                 self.proc_macro_clients = self
312                     .workspaces
313                     .iter()
314                     .map(|ws| {
315                         let mut args = args.clone();
316                         let mut path = path.clone();
317
318                         if let ProjectWorkspace::Cargo { sysroot, .. }
319                         | ProjectWorkspace::Json { sysroot, .. } = ws
320                         {
321                             tracing::debug!("Found a cargo workspace...");
322                             if let Some(sysroot) = sysroot.as_ref() {
323                                 tracing::debug!("Found a cargo workspace with a sysroot...");
324                                 let server_path =
325                                     sysroot.root().join("libexec").join(&standalone_server_name);
326                                 if std::fs::metadata(&server_path).is_ok() {
327                                     tracing::debug!(
328                                         "And the server exists at {}",
329                                         server_path.display()
330                                     );
331                                     path = server_path;
332                                     args = vec![];
333                                 } else {
334                                     tracing::debug!(
335                                         "And the server does not exist at {}",
336                                         server_path.display()
337                                     );
338                                 }
339                             }
340                         }
341
342                         tracing::info!(?args, "Using proc-macro server at {}", path.display(),);
343                         ProcMacroServer::spawn(path.clone(), args.clone()).map_err(|err| {
344                             let error = format!(
345                                 "Failed to run proc-macro server from path {}, error: {:?}",
346                                 path.display(),
347                                 err
348                             );
349                             tracing::error!(error);
350                             error
351                         })
352                     })
353                     .collect()
354             };
355         }
356
357         let watch = match files_config.watcher {
358             FilesWatcher::Client => vec![],
359             FilesWatcher::Server => project_folders.watch,
360         };
361         self.vfs_config_version += 1;
362         self.loader.handle.set_config(vfs::loader::Config {
363             load: project_folders.load,
364             watch,
365             version: self.vfs_config_version,
366         });
367
368         // Create crate graph from all the workspaces
369         let crate_graph = {
370             let dummy_replacements = self.config.dummy_replacements();
371
372             let vfs = &mut self.vfs.write().0;
373             let loader = &mut self.loader;
374             let mem_docs = &self.mem_docs;
375             let mut load = move |path: &AbsPath| {
376                 let _p = profile::span("GlobalState::load");
377                 let vfs_path = vfs::VfsPath::from(path.to_path_buf());
378                 if !mem_docs.contains(&vfs_path) {
379                     let contents = loader.handle.load_sync(path);
380                     vfs.set_file_contents(vfs_path.clone(), contents);
381                 }
382                 let res = vfs.file_id(&vfs_path);
383                 if res.is_none() {
384                     tracing::warn!("failed to load {}", path.display())
385                 }
386                 res
387             };
388
389             let mut crate_graph = CrateGraph::default();
390             for (idx, ws) in self.workspaces.iter().enumerate() {
391                 let proc_macro_client = match self.proc_macro_clients.get(idx) {
392                     Some(res) => res.as_ref().map_err(|e| &**e),
393                     None => Err("Proc macros are disabled"),
394                 };
395                 let mut load_proc_macro = move |crate_name: &str, path: &AbsPath| {
396                     load_proc_macro(
397                         proc_macro_client,
398                         path,
399                         dummy_replacements.get(crate_name).map(|v| &**v).unwrap_or_default(),
400                     )
401                 };
402                 crate_graph.extend(ws.to_crate_graph(
403                     &mut load_proc_macro,
404                     &mut load,
405                     &self.config.cargo(),
406                 ));
407             }
408             crate_graph
409         };
410         change.set_crate_graph(crate_graph);
411
412         self.source_root_config = project_folders.source_root_config;
413
414         self.analysis_host.apply_change(change);
415         self.process_changes();
416         self.reload_flycheck();
417         tracing::info!("did switch workspaces");
418     }
419
420     fn fetch_workspace_error(&self) -> Result<(), String> {
421         let mut buf = String::new();
422
423         for ws in self.fetch_workspaces_queue.last_op_result() {
424             if let Err(err) = ws {
425                 stdx::format_to!(buf, "rust-analyzer failed to load workspace: {:#}\n", err);
426             }
427         }
428
429         if buf.is_empty() {
430             return Ok(());
431         }
432
433         Err(buf)
434     }
435
436     fn fetch_build_data_error(&self) -> Result<(), String> {
437         let mut buf = String::new();
438
439         for ws in &self.fetch_build_data_queue.last_op_result().1 {
440             match ws {
441                 Ok(data) => match data.error() {
442                     Some(stderr) => stdx::format_to!(buf, "{:#}\n", stderr),
443                     _ => (),
444                 },
445                 // io errors
446                 Err(err) => stdx::format_to!(buf, "{:#}\n", err),
447             }
448         }
449
450         if buf.is_empty() {
451             Ok(())
452         } else {
453             Err(buf)
454         }
455     }
456
457     fn reload_flycheck(&mut self) {
458         let _p = profile::span("GlobalState::reload_flycheck");
459         let config = match self.config.flycheck() {
460             Some(it) => it,
461             None => {
462                 self.flycheck = Vec::new();
463                 self.diagnostics.clear_check_all();
464                 return;
465             }
466         };
467
468         let sender = self.flycheck_sender.clone();
469         self.flycheck = self
470             .workspaces
471             .iter()
472             .enumerate()
473             .filter_map(|(id, w)| match w {
474                 ProjectWorkspace::Cargo { cargo, .. } => Some((id, cargo.workspace_root())),
475                 ProjectWorkspace::Json { project, .. } => {
476                     // Enable flychecks for json projects if a custom flycheck command was supplied
477                     // in the workspace configuration.
478                     match config {
479                         FlycheckConfig::CustomCommand { .. } => Some((id, project.path())),
480                         _ => None,
481                     }
482                 }
483                 ProjectWorkspace::DetachedFiles { .. } => None,
484             })
485             .map(|(id, root)| {
486                 let sender = sender.clone();
487                 FlycheckHandle::spawn(
488                     id,
489                     Box::new(move |msg| sender.send(msg).unwrap()),
490                     config.clone(),
491                     root.to_path_buf(),
492                 )
493             })
494             .collect();
495     }
496 }
497
498 #[derive(Default)]
499 pub(crate) struct ProjectFolders {
500     pub(crate) load: Vec<vfs::loader::Entry>,
501     pub(crate) watch: Vec<usize>,
502     pub(crate) source_root_config: SourceRootConfig,
503 }
504
505 impl ProjectFolders {
506     pub(crate) fn new(
507         workspaces: &[ProjectWorkspace],
508         global_excludes: &[AbsPathBuf],
509     ) -> ProjectFolders {
510         let mut res = ProjectFolders::default();
511         let mut fsc = FileSetConfig::builder();
512         let mut local_filesets = vec![];
513
514         for root in workspaces.iter().flat_map(|ws| ws.to_roots()) {
515             let file_set_roots: Vec<VfsPath> =
516                 root.include.iter().cloned().map(VfsPath::from).collect();
517
518             let entry = {
519                 let mut dirs = vfs::loader::Directories::default();
520                 dirs.extensions.push("rs".into());
521                 dirs.include.extend(root.include);
522                 dirs.exclude.extend(root.exclude);
523                 for excl in global_excludes {
524                     if dirs
525                         .include
526                         .iter()
527                         .any(|incl| incl.starts_with(excl) || excl.starts_with(incl))
528                     {
529                         dirs.exclude.push(excl.clone());
530                     }
531                 }
532
533                 vfs::loader::Entry::Directories(dirs)
534             };
535
536             if root.is_local {
537                 res.watch.push(res.load.len());
538             }
539             res.load.push(entry);
540
541             if root.is_local {
542                 local_filesets.push(fsc.len());
543             }
544             fsc.add_file_set(file_set_roots)
545         }
546
547         let fsc = fsc.build();
548         res.source_root_config = SourceRootConfig { fsc, local_filesets };
549
550         res
551     }
552 }
553
554 #[derive(Default, Debug)]
555 pub(crate) struct SourceRootConfig {
556     pub(crate) fsc: FileSetConfig,
557     pub(crate) local_filesets: Vec<usize>,
558 }
559
560 impl SourceRootConfig {
561     pub(crate) fn partition(&self, vfs: &vfs::Vfs) -> Vec<SourceRoot> {
562         let _p = profile::span("SourceRootConfig::partition");
563         self.fsc
564             .partition(vfs)
565             .into_iter()
566             .enumerate()
567             .map(|(idx, file_set)| {
568                 let is_local = self.local_filesets.contains(&idx);
569                 if is_local {
570                     SourceRoot::new_local(file_set)
571                 } else {
572                     SourceRoot::new_library(file_set)
573                 }
574             })
575             .collect()
576     }
577 }
578
579 /// Load the proc-macros for the given lib path, replacing all expanders whose names are in `dummy_replace`
580 /// with an identity dummy expander.
581 pub(crate) fn load_proc_macro(
582     server: Result<&ProcMacroServer, &str>,
583     path: &AbsPath,
584     dummy_replace: &[Box<str>],
585 ) -> ProcMacroLoadResult {
586     let res: Result<Vec<_>, String> = (|| {
587         let dylib = MacroDylib::new(path.to_path_buf())
588             .map_err(|io| format!("Proc-macro dylib loading failed: {io}"))?;
589         let server = server.map_err(ToOwned::to_owned)?;
590         let vec = server.load_dylib(dylib).map_err(|e| format!("{e}"))?;
591         if vec.is_empty() {
592             return Err("proc macro library returned no proc macros".to_string());
593         }
594         Ok(vec
595             .into_iter()
596             .map(|expander| expander_to_proc_macro(expander, dummy_replace))
597             .collect())
598     })();
599     return match res {
600         Ok(proc_macros) => {
601             tracing::info!(
602                 "Loaded proc-macros for {}: {:?}",
603                 path.display(),
604                 proc_macros.iter().map(|it| it.name.clone()).collect::<Vec<_>>()
605             );
606             Ok(proc_macros)
607         }
608         Err(e) => {
609             tracing::warn!("proc-macro loading for {} failed: {e}", path.display());
610             Err(e)
611         }
612     };
613
614     fn expander_to_proc_macro(
615         expander: proc_macro_api::ProcMacro,
616         dummy_replace: &[Box<str>],
617     ) -> ProcMacro {
618         let name = SmolStr::from(expander.name());
619         let kind = match expander.kind() {
620             proc_macro_api::ProcMacroKind::CustomDerive => ProcMacroKind::CustomDerive,
621             proc_macro_api::ProcMacroKind::FuncLike => ProcMacroKind::FuncLike,
622             proc_macro_api::ProcMacroKind::Attr => ProcMacroKind::Attr,
623         };
624         let expander: Arc<dyn ProcMacroExpander> =
625             if dummy_replace.iter().any(|replace| &**replace == name) {
626                 match kind {
627                     ProcMacroKind::Attr => Arc::new(IdentityExpander),
628                     _ => Arc::new(EmptyExpander),
629                 }
630             } else {
631                 Arc::new(Expander(expander))
632             };
633         ProcMacro { name, kind, expander }
634     }
635
636     #[derive(Debug)]
637     struct Expander(proc_macro_api::ProcMacro);
638
639     impl ProcMacroExpander for Expander {
640         fn expand(
641             &self,
642             subtree: &tt::Subtree,
643             attrs: Option<&tt::Subtree>,
644             env: &Env,
645         ) -> Result<tt::Subtree, ProcMacroExpansionError> {
646             let env = env.iter().map(|(k, v)| (k.to_string(), v.to_string())).collect();
647             match self.0.expand(subtree, attrs, env) {
648                 Ok(Ok(subtree)) => Ok(subtree),
649                 Ok(Err(err)) => Err(ProcMacroExpansionError::Panic(err.0)),
650                 Err(err) => Err(ProcMacroExpansionError::System(err.to_string())),
651             }
652         }
653     }
654
655     /// Dummy identity expander, used for attribute proc-macros that are deliberately ignored by the user.
656     #[derive(Debug)]
657     struct IdentityExpander;
658
659     impl ProcMacroExpander for IdentityExpander {
660         fn expand(
661             &self,
662             subtree: &tt::Subtree,
663             _: Option<&tt::Subtree>,
664             _: &Env,
665         ) -> Result<tt::Subtree, ProcMacroExpansionError> {
666             Ok(subtree.clone())
667         }
668     }
669
670     /// Empty expander, used for proc-macros that are deliberately ignored by the user.
671     #[derive(Debug)]
672     struct EmptyExpander;
673
674     impl ProcMacroExpander for EmptyExpander {
675         fn expand(
676             &self,
677             _: &tt::Subtree,
678             _: Option<&tt::Subtree>,
679             _: &Env,
680         ) -> Result<tt::Subtree, ProcMacroExpansionError> {
681             Ok(tt::Subtree::default())
682         }
683     }
684 }
685
686 pub(crate) fn should_refresh_for_change(path: &AbsPath, change_kind: ChangeKind) -> bool {
687     const IMPLICIT_TARGET_FILES: &[&str] = &["build.rs", "src/main.rs", "src/lib.rs"];
688     const IMPLICIT_TARGET_DIRS: &[&str] = &["src/bin", "examples", "tests", "benches"];
689
690     let file_name = match path.file_name().unwrap_or_default().to_str() {
691         Some(it) => it,
692         None => return false,
693     };
694
695     if let "Cargo.toml" | "Cargo.lock" = file_name {
696         return true;
697     }
698     if change_kind == ChangeKind::Modify {
699         return false;
700     }
701
702     // .cargo/config{.toml}
703     if path.extension().unwrap_or_default() != "rs" {
704         let is_cargo_config = matches!(file_name, "config.toml" | "config")
705             && path.parent().map(|parent| parent.as_ref().ends_with(".cargo")).unwrap_or(false);
706         return is_cargo_config;
707     }
708
709     if IMPLICIT_TARGET_FILES.iter().any(|it| path.as_ref().ends_with(it)) {
710         return true;
711     }
712     let parent = match path.parent() {
713         Some(it) => it,
714         None => return false,
715     };
716     if IMPLICIT_TARGET_DIRS.iter().any(|it| parent.as_ref().ends_with(it)) {
717         return true;
718     }
719     if file_name == "main.rs" {
720         let grand_parent = match parent.parent() {
721             Some(it) => it,
722             None => return false,
723         };
724         if IMPLICIT_TARGET_DIRS.iter().any(|it| grand_parent.as_ref().ends_with(it)) {
725             return true;
726         }
727     }
728     false
729 }