]> git.lizzy.rs Git - rust.git/blob - compiler/rustc_codegen_ssa/src/back/archive.rs
Merge commit '598f0909568a51de8a2d1148f55a644fd8dffad0' into sync_cg_clif-2023-01-24
[rust.git] / compiler / rustc_codegen_ssa / src / back / archive.rs
1 use rustc_data_structures::fx::FxHashSet;
2 use rustc_data_structures::memmap::Mmap;
3 use rustc_session::cstore::DllImport;
4 use rustc_session::Session;
5 use rustc_span::symbol::Symbol;
6
7 use super::metadata::search_for_section;
8
9 pub use ar_archive_writer::get_native_object_symbols;
10 use ar_archive_writer::{write_archive_to_stream, ArchiveKind, NewArchiveMember};
11 use object::read::archive::ArchiveFile;
12 use object::read::macho::FatArch;
13 use tempfile::Builder as TempFileBuilder;
14
15 use std::error::Error;
16 use std::fs::File;
17 use std::io;
18 use std::path::{Path, PathBuf};
19
20 // Re-exporting for rustc_codegen_llvm::back::archive
21 pub use crate::errors::{ArchiveBuildFailure, ExtractBundledLibsError, UnknownArchiveKind};
22
23 pub trait ArchiveBuilderBuilder {
24     fn new_archive_builder<'a>(&self, sess: &'a Session) -> Box<dyn ArchiveBuilder<'a> + 'a>;
25
26     /// Creates a DLL Import Library <https://docs.microsoft.com/en-us/windows/win32/dlls/dynamic-link-library-creation#creating-an-import-library>.
27     /// and returns the path on disk to that import library.
28     /// This functions doesn't take `self` so that it can be called from
29     /// `linker_with_args`, which is specialized on `ArchiveBuilder` but
30     /// doesn't take or create an instance of that type.
31     fn create_dll_import_lib(
32         &self,
33         sess: &Session,
34         lib_name: &str,
35         dll_imports: &[DllImport],
36         tmpdir: &Path,
37         is_direct_dependency: bool,
38     ) -> PathBuf;
39
40     fn extract_bundled_libs<'a>(
41         &'a self,
42         rlib: &'a Path,
43         outdir: &Path,
44         bundled_lib_file_names: &FxHashSet<Symbol>,
45     ) -> Result<(), ExtractBundledLibsError<'_>> {
46         let archive_map = unsafe {
47             Mmap::map(
48                 File::open(rlib)
49                     .map_err(|e| ExtractBundledLibsError::OpenFile { rlib, error: Box::new(e) })?,
50             )
51             .map_err(|e| ExtractBundledLibsError::MmapFile { rlib, error: Box::new(e) })?
52         };
53         let archive = ArchiveFile::parse(&*archive_map)
54             .map_err(|e| ExtractBundledLibsError::ParseArchive { rlib, error: Box::new(e) })?;
55
56         for entry in archive.members() {
57             let entry = entry
58                 .map_err(|e| ExtractBundledLibsError::ReadEntry { rlib, error: Box::new(e) })?;
59             let data = entry
60                 .data(&*archive_map)
61                 .map_err(|e| ExtractBundledLibsError::ArchiveMember { rlib, error: Box::new(e) })?;
62             let name = std::str::from_utf8(entry.name())
63                 .map_err(|e| ExtractBundledLibsError::ConvertName { rlib, error: Box::new(e) })?;
64             if !bundled_lib_file_names.contains(&Symbol::intern(name)) {
65                 continue; // We need to extract only native libraries.
66             }
67             let data = search_for_section(rlib, data, ".bundled_lib").map_err(|e| {
68                 ExtractBundledLibsError::ExtractSection { rlib, error: Box::<dyn Error>::from(e) }
69             })?;
70             std::fs::write(&outdir.join(&name), data)
71                 .map_err(|e| ExtractBundledLibsError::WriteFile { rlib, error: Box::new(e) })?;
72         }
73         Ok(())
74     }
75 }
76
77 pub trait ArchiveBuilder<'a> {
78     fn add_file(&mut self, path: &Path);
79
80     fn add_archive(
81         &mut self,
82         archive: &Path,
83         skip: Box<dyn FnMut(&str) -> bool + 'static>,
84     ) -> io::Result<()>;
85
86     fn build(self: Box<Self>, output: &Path) -> bool;
87 }
88
89 #[must_use = "must call build() to finish building the archive"]
90 pub struct ArArchiveBuilder<'a> {
91     sess: &'a Session,
92     get_object_symbols:
93         fn(buf: &[u8], f: &mut dyn FnMut(&[u8]) -> io::Result<()>) -> io::Result<bool>,
94
95     src_archives: Vec<(PathBuf, Mmap)>,
96     // Don't use an `HashMap` here, as the order is important. `lib.rmeta` needs
97     // to be at the end of an archive in some cases for linkers to not get confused.
98     entries: Vec<(Vec<u8>, ArchiveEntry)>,
99 }
100
101 #[derive(Debug)]
102 enum ArchiveEntry {
103     FromArchive { archive_index: usize, file_range: (u64, u64) },
104     File(PathBuf),
105 }
106
107 impl<'a> ArArchiveBuilder<'a> {
108     pub fn new(
109         sess: &'a Session,
110         get_object_symbols: fn(
111             buf: &[u8],
112             f: &mut dyn FnMut(&[u8]) -> io::Result<()>,
113         ) -> io::Result<bool>,
114     ) -> ArArchiveBuilder<'a> {
115         ArArchiveBuilder { sess, get_object_symbols, src_archives: vec![], entries: vec![] }
116     }
117 }
118
119 fn try_filter_fat_archs<'a>(
120     archs: object::read::Result<&[impl FatArch]>,
121     target_arch: object::Architecture,
122     archive_map_data: &'a [u8],
123 ) -> io::Result<Option<(&'a [u8], u64)>> {
124     let archs = archs.map_err(|e| io::Error::new(io::ErrorKind::Other, e))?;
125
126     let desired = match archs.iter().find(|a| a.architecture() == target_arch) {
127         Some(a) => a,
128         None => return Ok(None),
129     };
130
131     Ok(Some((
132         desired.data(archive_map_data).map_err(|e| io::Error::new(io::ErrorKind::Other, e))?,
133         desired.offset().into(),
134     )))
135 }
136
137 pub fn try_extract_macho_fat_archive<'a>(
138     sess: &Session,
139     archive_bytes: &'a [u8],
140 ) -> io::Result<Option<(&'a [u8], u64)>> {
141     let target_arch = match sess.target.arch.as_ref() {
142         "aarch64" => object::Architecture::Aarch64,
143         "x86_64" => object::Architecture::X86_64,
144         _ => return Ok(None),
145     };
146
147     match object::macho::FatHeader::parse(archive_bytes) {
148         Ok(h) if h.magic.get(object::endian::BigEndian) == object::macho::FAT_MAGIC => {
149             let archs = object::macho::FatHeader::parse_arch32(archive_bytes);
150             try_filter_fat_archs(archs, target_arch, archive_bytes)
151         }
152         Ok(h) if h.magic.get(object::endian::BigEndian) == object::macho::FAT_MAGIC_64 => {
153             let archs = object::macho::FatHeader::parse_arch64(archive_bytes);
154             try_filter_fat_archs(archs, target_arch, archive_bytes)
155         }
156         // Not a FatHeader at all, just return None.
157         _ => Ok(None),
158     }
159 }
160
161 impl<'a> ArchiveBuilder<'a> for ArArchiveBuilder<'a> {
162     fn add_archive(
163         &mut self,
164         archive_path: &Path,
165         mut skip: Box<dyn FnMut(&str) -> bool + 'static>,
166     ) -> io::Result<()> {
167         let archive_map = unsafe { Mmap::map(File::open(&archive_path)?)? };
168         if self.src_archives.iter().any(|archive| archive.0 == archive_path) {
169             return Ok(());
170         }
171
172         let (archive_bytes, offset) = if self.sess.target.llvm_target.contains("-apple-macosx") {
173             if let Some((sub_archive, archive_offset)) =
174                 try_extract_macho_fat_archive(&self.sess, &*archive_map)?
175             {
176                 (sub_archive, Some(archive_offset))
177             } else {
178                 (&*archive_map, None)
179             }
180         } else {
181             (&*archive_map, None)
182         };
183
184         let archive = ArchiveFile::parse(&*archive_bytes)
185             .map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?;
186         let archive_index = self.src_archives.len();
187
188         for entry in archive.members() {
189             let entry = entry.map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?;
190             let file_name = String::from_utf8(entry.name().to_vec())
191                 .map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?;
192             if !skip(&file_name) {
193                 let mut range = entry.file_range();
194                 if let Some(offset) = offset {
195                     range.0 += offset;
196                 }
197                 self.entries.push((
198                     file_name.into_bytes(),
199                     ArchiveEntry::FromArchive { archive_index, file_range: range },
200                 ));
201             }
202         }
203
204         self.src_archives.push((archive_path.to_owned(), archive_map));
205         Ok(())
206     }
207
208     /// Adds an arbitrary file to this archive
209     fn add_file(&mut self, file: &Path) {
210         self.entries.push((
211             file.file_name().unwrap().to_str().unwrap().to_string().into_bytes(),
212             ArchiveEntry::File(file.to_owned()),
213         ));
214     }
215
216     /// Combine the provided files, rlibs, and native libraries into a single
217     /// `Archive`.
218     fn build(self: Box<Self>, output: &Path) -> bool {
219         let sess = self.sess;
220         match self.build_inner(output) {
221             Ok(any_members) => any_members,
222             Err(e) => sess.emit_fatal(ArchiveBuildFailure { error: e }),
223         }
224     }
225 }
226
227 impl<'a> ArArchiveBuilder<'a> {
228     fn build_inner(self, output: &Path) -> io::Result<bool> {
229         let archive_kind = match &*self.sess.target.archive_format {
230             "gnu" => ArchiveKind::Gnu,
231             "bsd" => ArchiveKind::Bsd,
232             "darwin" => ArchiveKind::Darwin,
233             "coff" => ArchiveKind::Coff,
234             kind => {
235                 self.sess.emit_fatal(UnknownArchiveKind { kind });
236             }
237         };
238
239         let mut entries = Vec::new();
240
241         for (entry_name, entry) in self.entries {
242             let data =
243                 match entry {
244                     ArchiveEntry::FromArchive { archive_index, file_range } => {
245                         let src_archive = &self.src_archives[archive_index];
246
247                         let data = &src_archive.1
248                             [file_range.0 as usize..file_range.0 as usize + file_range.1 as usize];
249
250                         Box::new(data) as Box<dyn AsRef<[u8]>>
251                     }
252                     ArchiveEntry::File(file) => unsafe {
253                         Box::new(
254                             Mmap::map(File::open(file).map_err(|err| {
255                                 io_error_context("failed to open object file", err)
256                             })?)
257                             .map_err(|err| io_error_context("failed to map object file", err))?,
258                         ) as Box<dyn AsRef<[u8]>>
259                     },
260                 };
261
262             entries.push(NewArchiveMember {
263                 buf: data,
264                 get_symbols: self.get_object_symbols,
265                 member_name: String::from_utf8(entry_name).unwrap(),
266                 mtime: 0,
267                 uid: 0,
268                 gid: 0,
269                 perms: 0o644,
270             })
271         }
272
273         // Write to a temporary file first before atomically renaming to the final name.
274         // This prevents programs (including rustc) from attempting to read a partial archive.
275         // It also enables writing an archive with the same filename as a dependency on Windows as
276         // required by a test.
277         let mut archive_tmpfile = TempFileBuilder::new()
278             .suffix(".temp-archive")
279             .tempfile_in(output.parent().unwrap_or_else(|| Path::new("")))
280             .map_err(|err| io_error_context("couldn't create a temp file", err))?;
281
282         write_archive_to_stream(
283             archive_tmpfile.as_file_mut(),
284             &entries,
285             true,
286             archive_kind,
287             true,
288             false,
289         )?;
290
291         let any_entries = !entries.is_empty();
292         drop(entries);
293         // Drop src_archives to unmap all input archives, which is necessary if we want to write the
294         // output archive to the same location as an input archive on Windows.
295         drop(self.src_archives);
296
297         archive_tmpfile
298             .persist(output)
299             .map_err(|err| io_error_context("failed to rename archive file", err.error))?;
300
301         Ok(any_entries)
302     }
303 }
304
305 fn io_error_context(context: &str, err: io::Error) -> io::Error {
306     io::Error::new(io::ErrorKind::Other, format!("{context}: {err}"))
307 }