]> git.lizzy.rs Git - rust.git/blob - compiler/rustc_codegen_ssa/src/back/archive.rs
Rewrite LLVM's archive writer in Rust
[rust.git] / compiler / rustc_codegen_ssa / src / back / archive.rs
1 use rustc_data_structures::fx::FxHashSet;
2 use rustc_data_structures::memmap::Mmap;
3 use rustc_session::cstore::DllImport;
4 use rustc_session::Session;
5 use rustc_span::symbol::Symbol;
6
7 use super::metadata::search_for_section;
8
9 pub use ar_archive_writer::get_native_object_symbols;
10 use ar_archive_writer::{write_archive_to_stream, ArchiveKind, NewArchiveMember};
11 use object::read::archive::ArchiveFile;
12 use object::read::macho::FatArch;
13
14 use std::error::Error;
15 use std::fs::File;
16 use std::io::{self, Write};
17 use std::path::{Path, PathBuf};
18
19 // Re-exporting for rustc_codegen_llvm::back::archive
20 pub use crate::errors::{ArchiveBuildFailure, ExtractBundledLibsError, UnknownArchiveKind};
21
22 pub trait ArchiveBuilderBuilder {
23     fn new_archive_builder<'a>(&self, sess: &'a Session) -> Box<dyn ArchiveBuilder<'a> + 'a>;
24
25     /// Creates a DLL Import Library <https://docs.microsoft.com/en-us/windows/win32/dlls/dynamic-link-library-creation#creating-an-import-library>.
26     /// and returns the path on disk to that import library.
27     /// This functions doesn't take `self` so that it can be called from
28     /// `linker_with_args`, which is specialized on `ArchiveBuilder` but
29     /// doesn't take or create an instance of that type.
30     fn create_dll_import_lib(
31         &self,
32         sess: &Session,
33         lib_name: &str,
34         dll_imports: &[DllImport],
35         tmpdir: &Path,
36         is_direct_dependency: bool,
37     ) -> PathBuf;
38
39     fn extract_bundled_libs<'a>(
40         &'a self,
41         rlib: &'a Path,
42         outdir: &Path,
43         bundled_lib_file_names: &FxHashSet<Symbol>,
44     ) -> Result<(), ExtractBundledLibsError<'_>> {
45         let archive_map = unsafe {
46             Mmap::map(
47                 File::open(rlib)
48                     .map_err(|e| ExtractBundledLibsError::OpenFile { rlib, error: Box::new(e) })?,
49             )
50             .map_err(|e| ExtractBundledLibsError::MmapFile { rlib, error: Box::new(e) })?
51         };
52         let archive = ArchiveFile::parse(&*archive_map)
53             .map_err(|e| ExtractBundledLibsError::ParseArchive { rlib, error: Box::new(e) })?;
54
55         for entry in archive.members() {
56             let entry = entry
57                 .map_err(|e| ExtractBundledLibsError::ReadEntry { rlib, error: Box::new(e) })?;
58             let data = entry
59                 .data(&*archive_map)
60                 .map_err(|e| ExtractBundledLibsError::ArchiveMember { rlib, error: Box::new(e) })?;
61             let name = std::str::from_utf8(entry.name())
62                 .map_err(|e| ExtractBundledLibsError::ConvertName { rlib, error: Box::new(e) })?;
63             if !bundled_lib_file_names.contains(&Symbol::intern(name)) {
64                 continue; // We need to extract only native libraries.
65             }
66             let data = search_for_section(rlib, data, ".bundled_lib").map_err(|e| {
67                 ExtractBundledLibsError::ExtractSection { rlib, error: Box::<dyn Error>::from(e) }
68             })?;
69             std::fs::write(&outdir.join(&name), data)
70                 .map_err(|e| ExtractBundledLibsError::WriteFile { rlib, error: Box::new(e) })?;
71         }
72         Ok(())
73     }
74 }
75
76 pub trait ArchiveBuilder<'a> {
77     fn add_file(&mut self, path: &Path);
78
79     fn add_archive(
80         &mut self,
81         archive: &Path,
82         skip: Box<dyn FnMut(&str) -> bool + 'static>,
83     ) -> io::Result<()>;
84
85     fn build(self: Box<Self>, output: &Path) -> bool;
86 }
87
88 #[must_use = "must call build() to finish building the archive"]
89 pub struct ArArchiveBuilder<'a> {
90     sess: &'a Session,
91     get_object_symbols:
92         fn(buf: &[u8], f: &mut dyn FnMut(&[u8]) -> io::Result<()>) -> io::Result<bool>,
93
94     src_archives: Vec<(PathBuf, Mmap)>,
95     // Don't use an `HashMap` here, as the order is important. `lib.rmeta` needs
96     // to be at the end of an archive in some cases for linkers to not get confused.
97     entries: Vec<(Vec<u8>, ArchiveEntry)>,
98 }
99
100 #[derive(Debug)]
101 enum ArchiveEntry {
102     FromArchive { archive_index: usize, file_range: (u64, u64) },
103     File(PathBuf),
104 }
105
106 impl<'a> ArArchiveBuilder<'a> {
107     pub fn new(
108         sess: &'a Session,
109         get_object_symbols: fn(
110             buf: &[u8],
111             f: &mut dyn FnMut(&[u8]) -> io::Result<()>,
112         ) -> io::Result<bool>,
113     ) -> ArArchiveBuilder<'a> {
114         ArArchiveBuilder { sess, get_object_symbols, src_archives: vec![], entries: vec![] }
115     }
116 }
117
118 fn try_filter_fat_archs(
119     archs: object::read::Result<&[impl FatArch]>,
120     target_arch: object::Architecture,
121     archive_path: &Path,
122     archive_map_data: &[u8],
123 ) -> io::Result<Option<PathBuf>> {
124     let archs = archs.map_err(|e| io::Error::new(io::ErrorKind::Other, e))?;
125
126     let desired = match archs.iter().filter(|a| a.architecture() == target_arch).next() {
127         Some(a) => a,
128         None => return Ok(None),
129     };
130
131     let (mut new_f, extracted_path) = tempfile::Builder::new()
132         .suffix(archive_path.file_name().unwrap())
133         .tempfile()?
134         .keep()
135         .unwrap();
136
137     new_f.write_all(
138         desired.data(archive_map_data).map_err(|e| io::Error::new(io::ErrorKind::Other, e))?,
139     )?;
140
141     Ok(Some(extracted_path))
142 }
143
144 pub fn try_extract_macho_fat_archive(
145     sess: &Session,
146     archive_path: &Path,
147 ) -> io::Result<Option<PathBuf>> {
148     let archive_map = unsafe { Mmap::map(File::open(&archive_path)?)? };
149     let target_arch = match sess.target.arch.as_ref() {
150         "aarch64" => object::Architecture::Aarch64,
151         "x86_64" => object::Architecture::X86_64,
152         _ => return Ok(None),
153     };
154
155     match object::macho::FatHeader::parse(&*archive_map) {
156         Ok(h) if h.magic.get(object::endian::BigEndian) == object::macho::FAT_MAGIC => {
157             let archs = object::macho::FatHeader::parse_arch32(&*archive_map);
158             try_filter_fat_archs(archs, target_arch, archive_path, &*archive_map)
159         }
160         Ok(h) if h.magic.get(object::endian::BigEndian) == object::macho::FAT_MAGIC_64 => {
161             let archs = object::macho::FatHeader::parse_arch64(&*archive_map);
162             try_filter_fat_archs(archs, target_arch, archive_path, &*archive_map)
163         }
164         // Not a FatHeader at all, just return None.
165         _ => Ok(None),
166     }
167 }
168
169 impl<'a> ArchiveBuilder<'a> for ArArchiveBuilder<'a> {
170     fn add_archive(
171         &mut self,
172         archive_path: &Path,
173         mut skip: Box<dyn FnMut(&str) -> bool + 'static>,
174     ) -> io::Result<()> {
175         let mut archive_path = archive_path.to_path_buf();
176         if self.sess.target.llvm_target.contains("-apple-macosx") {
177             if let Some(new_archive_path) =
178                 try_extract_macho_fat_archive(&self.sess, &archive_path)?
179             {
180                 archive_path = new_archive_path
181             }
182         }
183
184         if self.src_archives.iter().any(|archive| archive.0 == archive_path) {
185             return Ok(());
186         }
187
188         let archive_map = unsafe { Mmap::map(File::open(&archive_path)?)? };
189         let archive = ArchiveFile::parse(&*archive_map)
190             .map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?;
191         let archive_index = self.src_archives.len();
192
193         for entry in archive.members() {
194             let entry = entry.map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?;
195             let file_name = String::from_utf8(entry.name().to_vec())
196                 .map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?;
197             if !skip(&file_name) {
198                 self.entries.push((
199                     file_name.into_bytes(),
200                     ArchiveEntry::FromArchive { archive_index, file_range: entry.file_range() },
201                 ));
202             }
203         }
204
205         self.src_archives.push((archive_path.to_owned(), archive_map));
206         Ok(())
207     }
208
209     /// Adds an arbitrary file to this archive
210     fn add_file(&mut self, file: &Path) {
211         self.entries.push((
212             file.file_name().unwrap().to_str().unwrap().to_string().into_bytes(),
213             ArchiveEntry::File(file.to_owned()),
214         ));
215     }
216
217     /// Combine the provided files, rlibs, and native libraries into a single
218     /// `Archive`.
219     fn build(self: Box<Self>, output: &Path) -> bool {
220         let sess = self.sess;
221         match self.build_inner(output) {
222             Ok(any_members) => any_members,
223             Err(e) => sess.emit_fatal(ArchiveBuildFailure { error: e }),
224         }
225     }
226 }
227
228 impl<'a> ArArchiveBuilder<'a> {
229     fn build_inner(self, output: &Path) -> io::Result<bool> {
230         let archive_kind = match &*self.sess.target.archive_format {
231             "gnu" => ArchiveKind::Gnu,
232             "bsd" => ArchiveKind::Bsd,
233             "darwin" => ArchiveKind::Darwin,
234             "coff" => ArchiveKind::Coff,
235             kind => {
236                 self.sess.emit_fatal(UnknownArchiveKind { kind });
237             }
238         };
239
240         let mut entries = Vec::new();
241
242         for (entry_name, entry) in self.entries {
243             let data =
244                 match entry {
245                     ArchiveEntry::FromArchive { archive_index, file_range } => {
246                         let src_archive = &self.src_archives[archive_index];
247
248                         let data = &src_archive.1
249                             [file_range.0 as usize..file_range.0 as usize + file_range.1 as usize];
250
251                         Box::new(data) as Box<dyn AsRef<[u8]>>
252                     }
253                     ArchiveEntry::File(file) => unsafe {
254                         Box::new(
255                             Mmap::map(File::open(file).map_err(|err| {
256                                 io_error_context("failed to open object file", err)
257                             })?)
258                             .map_err(|err| io_error_context("failed to map object file", err))?,
259                         ) as Box<dyn AsRef<[u8]>>
260                     },
261                 };
262
263             entries.push(NewArchiveMember {
264                 buf: data,
265                 get_symbols: self.get_object_symbols,
266                 member_name: String::from_utf8(entry_name).unwrap(),
267                 mtime: 0,
268                 uid: 0,
269                 gid: 0,
270                 perms: 0o644,
271             })
272         }
273
274         let mut w = File::create(output)
275             .map_err(|err| io_error_context("failed to create archive file", err))?;
276
277         write_archive_to_stream(&mut w, &entries, true, archive_kind, true, false)?;
278
279         Ok(!entries.is_empty())
280     }
281 }
282
283 fn io_error_context(context: &str, err: io::Error) -> io::Error {
284     io::Error::new(io::ErrorKind::Other, format!("{context}: {err}"))
285 }