3 use rustc_data_structures::fingerprint::Fingerprint;
4 use rustc_hir::def::{CtorKind, CtorOf};
5 use rustc_index::vec::Idx;
6 use rustc_middle::ty::ParameterizedOverTcx;
7 use rustc_serialize::opaque::FileEncoder;
8 use rustc_serialize::Encoder as _;
9 use rustc_span::hygiene::MacroKind;
10 use std::marker::PhantomData;
11 use std::num::NonZeroUsize;
13 pub(super) trait IsDefault: Default {
14 fn is_default(&self) -> bool;
17 impl<T> IsDefault for Option<T> {
18 fn is_default(&self) -> bool {
23 impl IsDefault for AttrFlags {
24 fn is_default(&self) -> bool {
29 impl IsDefault for bool {
30 fn is_default(&self) -> bool {
35 impl IsDefault for u32 {
36 fn is_default(&self) -> bool {
41 impl<T> IsDefault for LazyArray<T> {
42 fn is_default(&self) -> bool {
47 impl IsDefault for DefPathHash {
48 fn is_default(&self) -> bool {
49 self.0 == Fingerprint::ZERO
53 /// Helper trait, for encoding to, and decoding from, a fixed number of bytes.
54 /// Used mainly for Lazy positions and lengths.
55 /// Unchecked invariant: `Self::default()` should encode as `[0; BYTE_LEN]`,
56 /// but this has no impact on safety.
57 pub(super) trait FixedSizeEncoding: IsDefault {
58 /// This should be `[u8; BYTE_LEN]`;
59 /// Cannot use an associated `const BYTE_LEN: usize` instead due to const eval limitations.
62 fn from_bytes(b: &Self::ByteArray) -> Self;
63 fn write_to_bytes(self, b: &mut Self::ByteArray);
66 /// This implementation is not used generically, but for reading/writing
67 /// concrete `u32` fields in `Lazy*` structures, which may be zero.
68 impl FixedSizeEncoding for u32 {
69 type ByteArray = [u8; 4];
72 fn from_bytes(b: &[u8; 4]) -> Self {
73 Self::from_le_bytes(*b)
77 fn write_to_bytes(self, b: &mut [u8; 4]) {
78 *b = self.to_le_bytes();
82 macro_rules! fixed_size_enum {
83 ($ty:ty { $(($($pat:tt)*))* }) => {
84 impl FixedSizeEncoding for Option<$ty> {
85 type ByteArray = [u8;1];
88 fn from_bytes(b: &[u8;1]) -> Self {
94 $(${index()} => Some($($pat)*),)*
95 _ => panic!("Unexpected {} code: {:?}", stringify!($ty), b[0]),
100 fn write_to_bytes(self, b: &mut [u8;1]) {
103 None => unreachable!(),
104 $(Some($($pat)*) => 1 + ${index()},)*
135 ( ImplTraitPlaceholder )
142 ( Static(ast::Mutability::Not) )
143 ( Static(ast::Mutability::Mut) )
144 ( Ctor(CtorOf::Struct, CtorKind::Fn) )
145 ( Ctor(CtorOf::Struct, CtorKind::Const) )
146 ( Ctor(CtorOf::Variant, CtorKind::Fn) )
147 ( Ctor(CtorOf::Variant, CtorKind::Const) )
148 ( Macro(MacroKind::Bang) )
149 ( Macro(MacroKind::Attr) )
150 ( Macro(MacroKind::Derive) )
172 ( Default { has_value: false } )
173 ( Default { has_value: true } )
185 ty::AssocItemContainer {
199 // We directly encode `DefPathHash` because a `LazyValue` would incur a 25% cost.
200 impl FixedSizeEncoding for DefPathHash {
201 type ByteArray = [u8; 16];
204 fn from_bytes(b: &[u8; 16]) -> Self {
205 DefPathHash(Fingerprint::from_le_bytes(*b))
209 fn write_to_bytes(self, b: &mut [u8; 16]) {
210 debug_assert!(!self.is_default());
211 *b = self.0.to_le_bytes();
215 // We directly encode RawDefId because using a `LazyValue` would incur a 50% overhead in the worst case.
216 impl FixedSizeEncoding for Option<RawDefId> {
217 type ByteArray = [u8; 8];
220 fn from_bytes(b: &[u8; 8]) -> Self {
221 let krate = u32::from_le_bytes(b[0..4].try_into().unwrap());
225 let index = u32::from_le_bytes(b[4..8].try_into().unwrap());
226 Some(RawDefId { krate: krate - 1, index })
230 fn write_to_bytes(self, b: &mut [u8; 8]) {
232 None => unreachable!(),
233 Some(RawDefId { krate, index }) => {
234 // CrateNum is less than `CrateNum::MAX_AS_U32`.
235 debug_assert!(krate < u32::MAX);
236 b[0..4].copy_from_slice(&(1 + krate).to_le_bytes());
237 b[4..8].copy_from_slice(&index.to_le_bytes());
243 impl FixedSizeEncoding for AttrFlags {
244 type ByteArray = [u8; 1];
247 fn from_bytes(b: &[u8; 1]) -> Self {
248 AttrFlags::from_bits_truncate(b[0])
252 fn write_to_bytes(self, b: &mut [u8; 1]) {
253 debug_assert!(!self.is_default());
258 impl FixedSizeEncoding for bool {
259 type ByteArray = [u8; 1];
262 fn from_bytes(b: &[u8; 1]) -> Self {
267 fn write_to_bytes(self, b: &mut [u8; 1]) {
268 debug_assert!(!self.is_default());
273 // NOTE(eddyb) there could be an impl for `usize`, which would enable a more
274 // generic `LazyValue<T>` impl, but in the general case we might not need / want
275 // to fit every `usize` in `u32`.
276 impl<T> FixedSizeEncoding for Option<LazyValue<T>> {
277 type ByteArray = [u8; 4];
280 fn from_bytes(b: &[u8; 4]) -> Self {
281 let position = NonZeroUsize::new(u32::from_bytes(b) as usize)?;
282 Some(LazyValue::from_position(position))
286 fn write_to_bytes(self, b: &mut [u8; 4]) {
288 None => unreachable!(),
290 let position = lazy.position.get();
291 let position: u32 = position.try_into().unwrap();
292 position.write_to_bytes(b)
298 impl<T> LazyArray<T> {
300 fn write_to_bytes_impl(self, b: &mut [u8; 8]) {
301 let ([position_bytes, meta_bytes],[])= b.as_chunks_mut::<4>() else { panic!() };
303 let position = self.position.get();
304 let position: u32 = position.try_into().unwrap();
305 position.write_to_bytes(position_bytes);
307 let len = self.num_elems;
308 let len: u32 = len.try_into().unwrap();
309 len.write_to_bytes(meta_bytes);
312 fn from_bytes_impl(position_bytes: &[u8; 4], meta_bytes: &[u8; 4]) -> Option<LazyArray<T>> {
313 let position = NonZeroUsize::new(u32::from_bytes(position_bytes) as usize)?;
314 let len = u32::from_bytes(meta_bytes) as usize;
315 Some(LazyArray::from_position_and_num_elems(position, len))
319 impl<T> FixedSizeEncoding for LazyArray<T> {
320 type ByteArray = [u8; 8];
323 fn from_bytes(b: &[u8; 8]) -> Self {
324 let ([position_bytes, meta_bytes],[])= b.as_chunks::<4>() else { panic!() };
325 if *meta_bytes == [0; 4] {
326 return Default::default();
328 LazyArray::from_bytes_impl(position_bytes, meta_bytes).unwrap()
332 fn write_to_bytes(self, b: &mut [u8; 8]) {
333 assert!(!self.is_default());
334 self.write_to_bytes_impl(b)
338 impl<T> FixedSizeEncoding for Option<LazyArray<T>> {
339 type ByteArray = [u8; 8];
342 fn from_bytes(b: &[u8; 8]) -> Self {
343 let ([position_bytes, meta_bytes],[])= b.as_chunks::<4>() else { panic!() };
344 LazyArray::from_bytes_impl(position_bytes, meta_bytes)
348 fn write_to_bytes(self, b: &mut [u8; 8]) {
350 None => unreachable!(),
351 Some(lazy) => lazy.write_to_bytes_impl(b),
356 /// Helper for constructing a table's serialization (also see `Table`).
357 pub(super) struct TableBuilder<I: Idx, T: FixedSizeEncoding> {
358 blocks: IndexVec<I, T::ByteArray>,
359 _marker: PhantomData<T>,
362 impl<I: Idx, T: FixedSizeEncoding> Default for TableBuilder<I, T> {
363 fn default() -> Self {
364 TableBuilder { blocks: Default::default(), _marker: PhantomData }
368 impl<I: Idx, const N: usize, T> TableBuilder<I, Option<T>>
370 Option<T>: FixedSizeEncoding<ByteArray = [u8; N]>,
372 pub(crate) fn set_some(&mut self, i: I, value: T) {
373 self.set(i, Some(value))
377 impl<I: Idx, const N: usize, T: FixedSizeEncoding<ByteArray = [u8; N]>> TableBuilder<I, T> {
378 /// Sets the table value if it is not default.
379 /// ATTENTION: For optimization default values are simply ignored by this function, because
380 /// right now metadata tables never need to reset non-default values to default. If such need
381 /// arises in the future then a new method (e.g. `clear` or `reset`) will need to be introduced
382 /// for doing that explicitly.
383 pub(crate) fn set(&mut self, i: I, value: T) {
384 if !value.is_default() {
385 // FIXME(eddyb) investigate more compact encodings for sparse tables.
386 // On the PR @michaelwoerister mentioned:
387 // > Space requirements could perhaps be optimized by using the HAMT `popcnt`
388 // > trick (i.e. divide things into buckets of 32 or 64 items and then
389 // > store bit-masks of which item in each bucket is actually serialized).
390 self.blocks.ensure_contains_elem(i, || [0; N]);
391 value.write_to_bytes(&mut self.blocks[i]);
395 pub(crate) fn encode(&self, buf: &mut FileEncoder) -> LazyTable<I, T> {
396 let pos = buf.position();
397 for block in &self.blocks {
398 buf.emit_raw_bytes(block);
400 let num_bytes = self.blocks.len() * N;
401 LazyTable::from_position_and_encoded_size(
402 NonZeroUsize::new(pos as usize).unwrap(),
408 impl<I: Idx, const N: usize, T: FixedSizeEncoding<ByteArray = [u8; N]> + ParameterizedOverTcx>
411 for<'tcx> T::Value<'tcx>: FixedSizeEncoding<ByteArray = [u8; N]>,
413 /// Given the metadata, extract out the value at a particular index (if any).
415 pub(super) fn get<'a, 'tcx, M: Metadata<'a, 'tcx>>(&self, metadata: M, i: I) -> T::Value<'tcx> {
416 debug!("LazyTable::lookup: index={:?} len={:?}", i, self.encoded_size);
418 let start = self.position.get();
419 let bytes = &metadata.blob()[start..start + self.encoded_size];
420 let (bytes, []) = bytes.as_chunks::<N>() else { panic!() };
421 bytes.get(i.index()).map_or_else(Default::default, FixedSizeEncoding::from_bytes)
424 /// Size of the table in entries, including possible gaps.
425 pub(super) fn size(&self) -> usize {
426 self.encoded_size / N