]> git.lizzy.rs Git - rust.git/blob - compiler/rustc_codegen_ssa/src/back/archive.rs
Auto merge of #104535 - mikebenfield:discr-fix, r=pnkfelix
[rust.git] / compiler / rustc_codegen_ssa / src / back / archive.rs
1 use rustc_data_structures::fx::FxHashSet;
2 use rustc_data_structures::memmap::Mmap;
3 use rustc_session::cstore::DllImport;
4 use rustc_session::Session;
5 use rustc_span::symbol::Symbol;
6
7 use super::metadata::search_for_section;
8
9 pub use ar_archive_writer::get_native_object_symbols;
10 use ar_archive_writer::{write_archive_to_stream, ArchiveKind, NewArchiveMember};
11 use object::read::archive::ArchiveFile;
12 use object::read::macho::FatArch;
13 use tempfile::Builder as TempFileBuilder;
14
15 use std::error::Error;
16 use std::fs::File;
17 use std::io::{self, Write};
18 use std::path::{Path, PathBuf};
19
20 // Re-exporting for rustc_codegen_llvm::back::archive
21 pub use crate::errors::{ArchiveBuildFailure, ExtractBundledLibsError, UnknownArchiveKind};
22
23 pub trait ArchiveBuilderBuilder {
24     fn new_archive_builder<'a>(&self, sess: &'a Session) -> Box<dyn ArchiveBuilder<'a> + 'a>;
25
26     /// Creates a DLL Import Library <https://docs.microsoft.com/en-us/windows/win32/dlls/dynamic-link-library-creation#creating-an-import-library>.
27     /// and returns the path on disk to that import library.
28     /// This functions doesn't take `self` so that it can be called from
29     /// `linker_with_args`, which is specialized on `ArchiveBuilder` but
30     /// doesn't take or create an instance of that type.
31     fn create_dll_import_lib(
32         &self,
33         sess: &Session,
34         lib_name: &str,
35         dll_imports: &[DllImport],
36         tmpdir: &Path,
37         is_direct_dependency: bool,
38     ) -> PathBuf;
39
40     fn extract_bundled_libs<'a>(
41         &'a self,
42         rlib: &'a Path,
43         outdir: &Path,
44         bundled_lib_file_names: &FxHashSet<Symbol>,
45     ) -> Result<(), ExtractBundledLibsError<'_>> {
46         let archive_map = unsafe {
47             Mmap::map(
48                 File::open(rlib)
49                     .map_err(|e| ExtractBundledLibsError::OpenFile { rlib, error: Box::new(e) })?,
50             )
51             .map_err(|e| ExtractBundledLibsError::MmapFile { rlib, error: Box::new(e) })?
52         };
53         let archive = ArchiveFile::parse(&*archive_map)
54             .map_err(|e| ExtractBundledLibsError::ParseArchive { rlib, error: Box::new(e) })?;
55
56         for entry in archive.members() {
57             let entry = entry
58                 .map_err(|e| ExtractBundledLibsError::ReadEntry { rlib, error: Box::new(e) })?;
59             let data = entry
60                 .data(&*archive_map)
61                 .map_err(|e| ExtractBundledLibsError::ArchiveMember { rlib, error: Box::new(e) })?;
62             let name = std::str::from_utf8(entry.name())
63                 .map_err(|e| ExtractBundledLibsError::ConvertName { rlib, error: Box::new(e) })?;
64             if !bundled_lib_file_names.contains(&Symbol::intern(name)) {
65                 continue; // We need to extract only native libraries.
66             }
67             let data = search_for_section(rlib, data, ".bundled_lib").map_err(|e| {
68                 ExtractBundledLibsError::ExtractSection { rlib, error: Box::<dyn Error>::from(e) }
69             })?;
70             std::fs::write(&outdir.join(&name), data)
71                 .map_err(|e| ExtractBundledLibsError::WriteFile { rlib, error: Box::new(e) })?;
72         }
73         Ok(())
74     }
75 }
76
77 pub trait ArchiveBuilder<'a> {
78     fn add_file(&mut self, path: &Path);
79
80     fn add_archive(
81         &mut self,
82         archive: &Path,
83         skip: Box<dyn FnMut(&str) -> bool + 'static>,
84     ) -> io::Result<()>;
85
86     fn build(self: Box<Self>, output: &Path) -> bool;
87 }
88
89 #[must_use = "must call build() to finish building the archive"]
90 pub struct ArArchiveBuilder<'a> {
91     sess: &'a Session,
92     get_object_symbols:
93         fn(buf: &[u8], f: &mut dyn FnMut(&[u8]) -> io::Result<()>) -> io::Result<bool>,
94
95     src_archives: Vec<(PathBuf, Mmap)>,
96     // Don't use an `HashMap` here, as the order is important. `lib.rmeta` needs
97     // to be at the end of an archive in some cases for linkers to not get confused.
98     entries: Vec<(Vec<u8>, ArchiveEntry)>,
99 }
100
101 #[derive(Debug)]
102 enum ArchiveEntry {
103     FromArchive { archive_index: usize, file_range: (u64, u64) },
104     File(PathBuf),
105 }
106
107 impl<'a> ArArchiveBuilder<'a> {
108     pub fn new(
109         sess: &'a Session,
110         get_object_symbols: fn(
111             buf: &[u8],
112             f: &mut dyn FnMut(&[u8]) -> io::Result<()>,
113         ) -> io::Result<bool>,
114     ) -> ArArchiveBuilder<'a> {
115         ArArchiveBuilder { sess, get_object_symbols, src_archives: vec![], entries: vec![] }
116     }
117 }
118
119 fn try_filter_fat_archs(
120     archs: object::read::Result<&[impl FatArch]>,
121     target_arch: object::Architecture,
122     archive_path: &Path,
123     archive_map_data: &[u8],
124 ) -> io::Result<Option<PathBuf>> {
125     let archs = archs.map_err(|e| io::Error::new(io::ErrorKind::Other, e))?;
126
127     let desired = match archs.iter().filter(|a| a.architecture() == target_arch).next() {
128         Some(a) => a,
129         None => return Ok(None),
130     };
131
132     let (mut new_f, extracted_path) = tempfile::Builder::new()
133         .suffix(archive_path.file_name().unwrap())
134         .tempfile()?
135         .keep()
136         .unwrap();
137
138     new_f.write_all(
139         desired.data(archive_map_data).map_err(|e| io::Error::new(io::ErrorKind::Other, e))?,
140     )?;
141
142     Ok(Some(extracted_path))
143 }
144
145 pub fn try_extract_macho_fat_archive(
146     sess: &Session,
147     archive_path: &Path,
148 ) -> io::Result<Option<PathBuf>> {
149     let archive_map = unsafe { Mmap::map(File::open(&archive_path)?)? };
150     let target_arch = match sess.target.arch.as_ref() {
151         "aarch64" => object::Architecture::Aarch64,
152         "x86_64" => object::Architecture::X86_64,
153         _ => return Ok(None),
154     };
155
156     match object::macho::FatHeader::parse(&*archive_map) {
157         Ok(h) if h.magic.get(object::endian::BigEndian) == object::macho::FAT_MAGIC => {
158             let archs = object::macho::FatHeader::parse_arch32(&*archive_map);
159             try_filter_fat_archs(archs, target_arch, archive_path, &*archive_map)
160         }
161         Ok(h) if h.magic.get(object::endian::BigEndian) == object::macho::FAT_MAGIC_64 => {
162             let archs = object::macho::FatHeader::parse_arch64(&*archive_map);
163             try_filter_fat_archs(archs, target_arch, archive_path, &*archive_map)
164         }
165         // Not a FatHeader at all, just return None.
166         _ => Ok(None),
167     }
168 }
169
170 impl<'a> ArchiveBuilder<'a> for ArArchiveBuilder<'a> {
171     fn add_archive(
172         &mut self,
173         archive_path: &Path,
174         mut skip: Box<dyn FnMut(&str) -> bool + 'static>,
175     ) -> io::Result<()> {
176         let mut archive_path = archive_path.to_path_buf();
177         if self.sess.target.llvm_target.contains("-apple-macosx") {
178             if let Some(new_archive_path) =
179                 try_extract_macho_fat_archive(&self.sess, &archive_path)?
180             {
181                 archive_path = new_archive_path
182             }
183         }
184
185         if self.src_archives.iter().any(|archive| archive.0 == archive_path) {
186             return Ok(());
187         }
188
189         let archive_map = unsafe { Mmap::map(File::open(&archive_path)?)? };
190         let archive = ArchiveFile::parse(&*archive_map)
191             .map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?;
192         let archive_index = self.src_archives.len();
193
194         for entry in archive.members() {
195             let entry = entry.map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?;
196             let file_name = String::from_utf8(entry.name().to_vec())
197                 .map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?;
198             if !skip(&file_name) {
199                 self.entries.push((
200                     file_name.into_bytes(),
201                     ArchiveEntry::FromArchive { archive_index, file_range: entry.file_range() },
202                 ));
203             }
204         }
205
206         self.src_archives.push((archive_path.to_owned(), archive_map));
207         Ok(())
208     }
209
210     /// Adds an arbitrary file to this archive
211     fn add_file(&mut self, file: &Path) {
212         self.entries.push((
213             file.file_name().unwrap().to_str().unwrap().to_string().into_bytes(),
214             ArchiveEntry::File(file.to_owned()),
215         ));
216     }
217
218     /// Combine the provided files, rlibs, and native libraries into a single
219     /// `Archive`.
220     fn build(self: Box<Self>, output: &Path) -> bool {
221         let sess = self.sess;
222         match self.build_inner(output) {
223             Ok(any_members) => any_members,
224             Err(e) => sess.emit_fatal(ArchiveBuildFailure { error: e }),
225         }
226     }
227 }
228
229 impl<'a> ArArchiveBuilder<'a> {
230     fn build_inner(self, output: &Path) -> io::Result<bool> {
231         let archive_kind = match &*self.sess.target.archive_format {
232             "gnu" => ArchiveKind::Gnu,
233             "bsd" => ArchiveKind::Bsd,
234             "darwin" => ArchiveKind::Darwin,
235             "coff" => ArchiveKind::Coff,
236             kind => {
237                 self.sess.emit_fatal(UnknownArchiveKind { kind });
238             }
239         };
240
241         let mut entries = Vec::new();
242
243         for (entry_name, entry) in self.entries {
244             let data =
245                 match entry {
246                     ArchiveEntry::FromArchive { archive_index, file_range } => {
247                         let src_archive = &self.src_archives[archive_index];
248
249                         let data = &src_archive.1
250                             [file_range.0 as usize..file_range.0 as usize + file_range.1 as usize];
251
252                         Box::new(data) as Box<dyn AsRef<[u8]>>
253                     }
254                     ArchiveEntry::File(file) => unsafe {
255                         Box::new(
256                             Mmap::map(File::open(file).map_err(|err| {
257                                 io_error_context("failed to open object file", err)
258                             })?)
259                             .map_err(|err| io_error_context("failed to map object file", err))?,
260                         ) as Box<dyn AsRef<[u8]>>
261                     },
262                 };
263
264             entries.push(NewArchiveMember {
265                 buf: data,
266                 get_symbols: self.get_object_symbols,
267                 member_name: String::from_utf8(entry_name).unwrap(),
268                 mtime: 0,
269                 uid: 0,
270                 gid: 0,
271                 perms: 0o644,
272             })
273         }
274
275         // Write to a temporary file first before atomically renaming to the final name.
276         // This prevents programs (including rustc) from attempting to read a partial archive.
277         // It also enables writing an archive with the same filename as a dependency on Windows as
278         // required by a test.
279         let mut archive_tmpfile = TempFileBuilder::new()
280             .suffix(".temp-archive")
281             .tempfile_in(output.parent().unwrap_or_else(|| Path::new("")))
282             .map_err(|err| io_error_context("couldn't create a temp file", err))?;
283
284         write_archive_to_stream(
285             archive_tmpfile.as_file_mut(),
286             &entries,
287             true,
288             archive_kind,
289             true,
290             false,
291         )?;
292
293         let any_entries = !entries.is_empty();
294         drop(entries);
295         // Drop src_archives to unmap all input archives, which is necessary if we want to write the
296         // output archive to the same location as an input archive on Windows.
297         drop(self.src_archives);
298
299         archive_tmpfile
300             .persist(output)
301             .map_err(|err| io_error_context("failed to rename archive file", err.error))?;
302
303         Ok(any_entries)
304     }
305 }
306
307 fn io_error_context(context: &str, err: io::Error) -> io::Error {
308     io::Error::new(io::ErrorKind::Other, format!("{context}: {err}"))
309 }