]> git.lizzy.rs Git - rust.git/commitdiff
incr: cache dwarf objects in work products
authorDavid Wood <david.wood@huawei.com>
Mon, 4 Jul 2022 13:38:42 +0000 (14:38 +0100)
committerDavid Wood <david.wood@huawei.com>
Wed, 6 Jul 2022 10:15:13 +0000 (11:15 +0100)
Cache DWARF objects alongside object files in work products when those
exist so that DWARF object files are available for thorin in packed mode
in incremental scenarios.

Signed-off-by: David Wood <david.wood@huawei.com>
compiler/rustc_codegen_cranelift/src/driver/aot.rs
compiler/rustc_codegen_ssa/src/back/write.rs
compiler/rustc_incremental/src/persist/load.rs
compiler/rustc_incremental/src/persist/save.rs
compiler/rustc_incremental/src/persist/work_product.rs
compiler/rustc_query_system/src/dep_graph/graph.rs
src/test/incremental/split_debuginfo_cached.rs [new file with mode: 0644]

index 05457ce15e9a7d8bfce0d6747bd888373e25f033..50d8fc30d7d7cabe13275e9a3c099834abb37c69 100644 (file)
@@ -66,7 +66,11 @@ fn emit_module(
     let work_product = if backend_config.disable_incr_cache {
         None
     } else {
-        rustc_incremental::copy_cgu_workproduct_to_incr_comp_cache_dir(tcx.sess, &name, &tmp_file)
+        rustc_incremental::copy_cgu_workproduct_to_incr_comp_cache_dir(
+            tcx.sess,
+            &name,
+            &[("o", &tmp_file)],
+        )
     };
 
     ModuleCodegenResult(
@@ -82,7 +86,10 @@ fn reuse_workproduct_for_cgu(
 ) -> CompiledModule {
     let work_product = cgu.previous_work_product(tcx);
     let obj_out = tcx.output_filenames(()).temp_path(OutputType::Object, Some(cgu.name().as_str()));
-    let source_file = rustc_incremental::in_incr_comp_dir_sess(&tcx.sess, &work_product.saved_file);
+    let source_file = rustc_incremental::in_incr_comp_dir_sess(
+        &tcx.sess,
+        &work_product.saved_files.get("o").expect("no saved object file in work product"),
+    );
     if let Err(err) = rustc_fs_util::link_or_copy(&source_file, &obj_out) {
         tcx.sess.err(&format!(
             "unable to copy {} to {}: {}",
index 632f07c5c2d80d63ba5515f0550f5e0bfcca0a48..f4a5cac872e0516c2d5b7e483bd6ad3f9840fbaf 100644 (file)
@@ -494,12 +494,18 @@ fn copy_all_cgu_workproducts_to_incr_comp_cache_dir(
     let _timer = sess.timer("copy_all_cgu_workproducts_to_incr_comp_cache_dir");
 
     for module in compiled_modules.modules.iter().filter(|m| m.kind == ModuleKind::Regular) {
-        if let Some(path) = &module.object {
-            if let Some((id, product)) =
-                copy_cgu_workproduct_to_incr_comp_cache_dir(sess, &module.name, path)
-            {
-                work_products.insert(id, product);
-            }
+        let mut files = Vec::new();
+        if let Some(object_file_path) = &module.object {
+            files.push(("o", object_file_path.as_path()));
+        }
+        if let Some(dwarf_object_file_path) = &module.dwarf_object {
+            files.push(("dwo", dwarf_object_file_path.as_path()));
+        }
+
+        if let Some((id, product)) =
+            copy_cgu_workproduct_to_incr_comp_cache_dir(sess, &module.name, files.as_slice())
+        {
+            work_products.insert(id, product);
         }
     }
 
@@ -856,29 +862,50 @@ fn execute_copy_from_cache_work_item<B: ExtraBackendMethods>(
     assert!(module_config.emit_obj != EmitObj::None);
 
     let incr_comp_session_dir = cgcx.incr_comp_session_dir.as_ref().unwrap();
-    let obj_out = cgcx.output_filenames.temp_path(OutputType::Object, Some(&module.name));
-    let source_file = in_incr_comp_dir(&incr_comp_session_dir, &module.source.saved_file);
-    debug!(
-        "copying pre-existing module `{}` from {:?} to {}",
-        module.name,
-        source_file,
-        obj_out.display()
+
+    let load_from_incr_comp_dir = |output_path: PathBuf, saved_path: &str| {
+        let source_file = in_incr_comp_dir(&incr_comp_session_dir, saved_path);
+        debug!(
+            "copying pre-existing module `{}` from {:?} to {}",
+            module.name,
+            source_file,
+            output_path.display()
+        );
+        match link_or_copy(&source_file, &output_path) {
+            Ok(_) => Some(output_path),
+            Err(err) => {
+                let diag_handler = cgcx.create_diag_handler();
+                diag_handler.err(&format!(
+                    "unable to copy {} to {}: {}",
+                    source_file.display(),
+                    output_path.display(),
+                    err
+                ));
+                None
+            }
+        }
+    };
+
+    let object = load_from_incr_comp_dir(
+        cgcx.output_filenames.temp_path(OutputType::Object, Some(&module.name)),
+        &module.source.saved_files.get("o").expect("no saved object file in work product"),
     );
-    if let Err(err) = link_or_copy(&source_file, &obj_out) {
-        let diag_handler = cgcx.create_diag_handler();
-        diag_handler.err(&format!(
-            "unable to copy {} to {}: {}",
-            source_file.display(),
-            obj_out.display(),
-            err
-        ));
-    }
+    let dwarf_object =
+        module.source.saved_files.get("dwo").as_ref().and_then(|saved_dwarf_object_file| {
+            let dwarf_obj_out = cgcx
+                .output_filenames
+                .split_dwarf_path(cgcx.split_debuginfo, cgcx.split_dwarf_kind, Some(&module.name))
+                .expect(
+                    "saved dwarf object in work product but `split_dwarf_path` returned `None`",
+                );
+            load_from_incr_comp_dir(dwarf_obj_out, &saved_dwarf_object_file)
+        });
 
     WorkItemResult::Compiled(CompiledModule {
         name: module.name,
         kind: ModuleKind::Regular,
-        object: Some(obj_out),
-        dwarf_object: None,
+        object,
+        dwarf_object,
         bytecode: None,
     })
 }
index 9c325faae80588ec57b9a3496e86a8a7309dbad6..f59d8d596b98ec1f9bcf8d50a36d0967465d7ce7 100644 (file)
@@ -161,19 +161,13 @@ pub fn load_dep_graph(sess: &Session) -> DepGraphFuture {
                 Decodable::decode(&mut work_product_decoder);
 
             for swp in work_products {
-                let mut all_files_exist = true;
-                let path = in_incr_comp_dir_sess(sess, &swp.work_product.saved_file);
-                if !path.exists() {
-                    all_files_exist = false;
-
-                    if sess.opts.debugging_opts.incremental_info {
-                        eprintln!(
-                            "incremental: could not find file for work \
-                                    product: {}",
-                            path.display()
-                        );
+                let all_files_exist = swp.work_product.saved_files.iter().all(|(_, path)| {
+                    let exists = in_incr_comp_dir_sess(sess, path).exists();
+                    if !exists && sess.opts.debugging_opts.incremental_info {
+                        eprintln!("incremental: could not find file for work product: {path}",);
                     }
-                }
+                    exists
+                });
 
                 if all_files_exist {
                     debug!("reconcile_work_products: all files for {:?} exist", swp);
index 79836d66011a27e7bd55ae96ef51be0e08826385..4059b7cfc8eb9eb3d2ca072b795fffe2e619996a 100644 (file)
@@ -108,16 +108,17 @@ pub fn save_work_product_index(
     for (id, wp) in previous_work_products.iter() {
         if !new_work_products.contains_key(id) {
             work_product::delete_workproduct_files(sess, wp);
-            debug_assert!(!in_incr_comp_dir_sess(sess, &wp.saved_file).exists());
+            debug_assert!(
+                !wp.saved_files.iter().all(|(_, path)| in_incr_comp_dir_sess(sess, path).exists())
+            );
         }
     }
 
     // Check that we did not delete one of the current work-products:
     debug_assert!({
-        new_work_products
-            .iter()
-            .map(|(_, wp)| in_incr_comp_dir_sess(sess, &wp.saved_file))
-            .all(|path| path.exists())
+        new_work_products.iter().all(|(_, wp)| {
+            wp.saved_files.iter().all(|(_, path)| in_incr_comp_dir_sess(sess, path).exists())
+        })
     });
 }
 
index 4789c0f581fdb952971d28d748ceeedb1c0c72ea..1b184eca964c34fa5247205613f3aadd9ea94f36 100644 (file)
@@ -3,6 +3,7 @@
 //! [work products]: WorkProduct
 
 use crate::persist::fs::*;
+use rustc_data_structures::stable_map::FxHashMap;
 use rustc_fs_util::link_or_copy;
 use rustc_middle::dep_graph::{WorkProduct, WorkProductId};
 use rustc_session::Session;
 pub fn copy_cgu_workproduct_to_incr_comp_cache_dir(
     sess: &Session,
     cgu_name: &str,
-    path: &Path,
+    files: &[(&'static str, &Path)],
 ) -> Option<(WorkProductId, WorkProduct)> {
-    debug!("copy_cgu_workproduct_to_incr_comp_cache_dir({:?},{:?})", cgu_name, path);
+    debug!(?cgu_name, ?files);
     sess.opts.incremental.as_ref()?;
 
-    let file_name = format!("{}.o", cgu_name);
-    let path_in_incr_dir = in_incr_comp_dir_sess(sess, &file_name);
-    let saved_file = match link_or_copy(path, &path_in_incr_dir) {
-        Ok(_) => file_name,
-        Err(err) => {
-            sess.warn(&format!(
-                "error copying object file `{}` to incremental directory as `{}`: {}",
-                path.display(),
-                path_in_incr_dir.display(),
-                err
-            ));
-            return None;
+    let mut saved_files = FxHashMap::default();
+    for (ext, path) in files {
+        let file_name = format!("{cgu_name}.{ext}");
+        let path_in_incr_dir = in_incr_comp_dir_sess(sess, &file_name);
+        match link_or_copy(path, &path_in_incr_dir) {
+            Ok(_) => {
+                let _ = saved_files.insert(ext.to_string(), file_name);
+            }
+            Err(err) => {
+                sess.warn(&format!(
+                    "error copying object file `{}` to incremental directory as `{}`: {}",
+                    path.display(),
+                    path_in_incr_dir.display(),
+                    err
+                ));
+            }
         }
-    };
-
-    let work_product = WorkProduct { cgu_name: cgu_name.to_string(), saved_file };
+    }
 
+    let work_product = WorkProduct { cgu_name: cgu_name.to_string(), saved_files };
+    debug!(?work_product);
     let work_product_id = WorkProductId::from_cgu_name(cgu_name);
     Some((work_product_id, work_product))
 }
 
 /// Removes files for a given work product.
 pub fn delete_workproduct_files(sess: &Session, work_product: &WorkProduct) {
-    let path = in_incr_comp_dir_sess(sess, &work_product.saved_file);
-    match std_fs::remove_file(&path) {
-        Ok(()) => {}
-        Err(err) => {
+    for (_, path) in &work_product.saved_files {
+        let path = in_incr_comp_dir_sess(sess, path);
+        if let Err(err) = std_fs::remove_file(&path) {
             sess.warn(&format!(
                 "file-system error deleting outdated file `{}`: {}",
                 path.display(),
index 341cf8f827bc9b892287edc86668562973d7971e..3291717c550df27d2a5a7b3f3ba51b61c1d0f2f2 100644 (file)
@@ -886,8 +886,12 @@ pub(crate) fn next_virtual_depnode_index(&self) -> DepNodeIndex {
 #[derive(Clone, Debug, Encodable, Decodable)]
 pub struct WorkProduct {
     pub cgu_name: String,
-    /// Saved file associated with this CGU.
-    pub saved_file: String,
+    /// Saved files associated with this CGU. In each key/value pair, the value is the path to the
+    /// saved file and the key is some identifier for the type of file being saved.
+    ///
+    /// By convention, file extensions are currently used as identifiers, i.e. the key "o" maps to
+    /// the object file's path, and "dwo" to the dwarf object file's path.
+    pub saved_files: FxHashMap<String, String>,
 }
 
 // Index type for `DepNodeData`'s edges.
diff --git a/src/test/incremental/split_debuginfo_cached.rs b/src/test/incremental/split_debuginfo_cached.rs
new file mode 100644 (file)
index 0000000..25c802d
--- /dev/null
@@ -0,0 +1,25 @@
+// Check that compiling with packed Split DWARF twice succeeds. This should confirm that DWARF
+// objects are cached as work products and available to the incremental compilation for `thorin` to
+// pack into a DWARF package.
+
+// ignore-tidy-linelength
+// only-x86_64-unknown-linux-gnu
+// revisions:rpass1 rpass2
+
+// [rpass1]compile-flags: -g -Zquery-dep-graph -Zunstable-options -Csplit-debuginfo=packed -Zsplit-dwarf-kind=split
+// [rpass2]compile-flags: -g -Zquery-dep-graph -Zunstable-options -Csplit-debuginfo=packed -Zsplit-dwarf-kind=split
+
+#![feature(rustc_attrs)]
+// For `rpass2`, nothing has changed so everything should re-used.
+#![rustc_partition_reused(module = "split_debuginfo_cached", cfg = "rpass2")]
+#![rustc_partition_reused(module = "split_debuginfo_cached-another_module", cfg = "rpass2")]
+
+mod another_module {
+    pub fn foo() -> &'static str {
+        "hello world"
+    }
+}
+
+pub fn main() {
+    println!("{}", another_module::foo());
+}