// Handle shorthands first, if we have an usize > 0x80.
if self.opaque.data[self.opaque.position()] & 0x80 != 0 {
let pos = self.read_usize()?;
- assert!(pos >= TYPE_SHORTHAND_OFFSET);
+ assert!(pos >= SHORTHAND_OFFSET);
let key = ty::CReaderCacheKey {
cnum: self.cdata().cnum,
- pos: pos - TYPE_SHORTHAND_OFFSET
+ pos: pos - SHORTHAND_OFFSET
};
if let Some(ty) = tcx.rcache.borrow().get(&key).cloned() {
return Ok(ty);
})
}
-pub fn load_xrefs(data: &[u8]) -> index::DenseIndex {
- let index = rbml::Doc::new(data).get(root_tag::xref_index);
- index::DenseIndex::from_buf(index.data, index.start, index.end)
-}
-
// Go through each item in the metadata and create a map from that
// item's def-key to the item's DefIndex.
pub fn load_key_map(data: &[u8]) -> FnvHashMap<DefKey, DefIndex> {
{
let mut dcx = base_doc.get(tag).decoder();
dcx.cdata = Some(cdata);
+ dcx.tcx = Some(tcx);
ty::GenericPredicates {
parent: dcx.decode(),
- predicates: dcx.seq().map(|offset| {
- let predicate_pos = cdata.xref_index.lookup(
- cdata.data(), offset).unwrap() as usize;
- let mut dcx = rbml::Doc {
- data: cdata.data(),
- start: predicate_pos,
- end: cdata.data().len(),
- }.decoder();
- dcx.tcx = Some(tcx);
- dcx.cdata = Some(cdata);
- dcx.decode()
+ predicates: (0..dcx.decode::<usize>()).map(|_| {
+ // Handle shorthands first, if we have an usize > 0x80.
+ if dcx.opaque.data[dcx.opaque.position()] & 0x80 != 0 {
+ let pos = dcx.decode::<usize>();
+ assert!(pos >= SHORTHAND_OFFSET);
+ let pos = pos - SHORTHAND_OFFSET;
+
+ let mut dcx = rbml::Doc {
+ data: cdata.data(),
+ start: pos,
+ end: cdata.data().len(),
+ }.decoder();
+ dcx.tcx = Some(tcx);
+ dcx.cdata = Some(cdata);
+ dcx.decode()
+ } else {
+ dcx.decode()
+ }
}).collect()
}
}
use astencode::encode_inlined_item;
use common::*;
use cstore;
-use index::{self, IndexData};
+use index::IndexData;
use rustc::middle::cstore::{InlinedItemRef, LinkMeta, LinkagePreference};
use rustc::hir::def;
use rustc::util::nodemap::{FnvHashMap, NodeSet};
use rustc_serialize::{Encodable, Encoder, SpecializedEncoder, opaque};
-use std::cell::RefCell;
+use std::hash::Hash;
use std::intrinsics;
use std::io::prelude::*;
use std::io::Cursor;
-use std::mem;
use std::ops::{Deref, DerefMut};
use std::rc::Rc;
use std::u32;
reachable: &'a NodeSet,
mir_map: &'a MirMap<'tcx>,
- type_shorthands: RefCell<FnvHashMap<Ty<'tcx>, usize>>,
- xrefs: FnvHashMap<XRef<'tcx>, u32>, // sequentially-assigned
+ type_shorthands: FnvHashMap<Ty<'tcx>, usize>,
+ predicate_shorthands: FnvHashMap<ty::Predicate<'tcx>, usize>,
}
-/// "interned" entries referenced by id
-#[derive(PartialEq, Eq, Hash)]
-enum XRef<'tcx> { Predicate(ty::Predicate<'tcx>) }
-
impl<'a, 'tcx> Deref for EncodeContext<'a, 'tcx> {
type Target = rbml::writer::Encoder<'a>;
fn deref(&self) -> &Self::Target {
impl<'a, 'tcx> SpecializedEncoder<Ty<'tcx>> for EncodeContext<'a, 'tcx> {
fn specialized_encode(&mut self, ty: &Ty<'tcx>) -> Result<(), Self::Error> {
- let existing_shorthand = self.type_shorthands.borrow().get(ty).cloned();
+ self.encode_with_shorthand(ty, &ty.sty, |ecx| &mut ecx.type_shorthands)
+ }
+}
+
+impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
+ fn seq<I, F, T>(&mut self, iter: I, mut f: F)
+ where I: IntoIterator,
+ I::IntoIter: ExactSizeIterator,
+ F: FnMut(&mut Self, I::Item) -> T,
+ T: Encodable {
+ let iter = iter.into_iter();
+ self.emit_seq(iter.len(), move |ecx| {
+ for (i, elem) in iter.enumerate() {
+ ecx.emit_seq_elt(i, |ecx| {
+ f(ecx, elem).encode(ecx)
+ })?;
+ }
+ Ok(())
+ }).unwrap();
+ }
+
+ /// Encode the given value or a previously cached shorthand.
+ fn encode_with_shorthand<T, U, M>(&mut self, value: &T, variant: &U, map: M)
+ -> Result<(), <Self as Encoder>::Error>
+ where M: for<'b> Fn(&'b mut Self) -> &'b mut FnvHashMap<T, usize>,
+ T: Clone + Eq + Hash,
+ U: Encodable {
+ let existing_shorthand = map(self).get(value).cloned();
if let Some(shorthand) = existing_shorthand {
return self.emit_usize(shorthand);
}
let start = self.mark_stable_position();
- ty.sty.encode(self)?;
+ variant.encode(self)?;
let len = self.mark_stable_position() - start;
// The shorthand encoding uses the same usize as the
// discriminant, with an offset so they can't conflict.
- let discriminant = unsafe { intrinsics::discriminant_value(&ty.sty) };
- assert!(discriminant < TYPE_SHORTHAND_OFFSET as u64);
- let shorthand = start + TYPE_SHORTHAND_OFFSET;
+ let discriminant = unsafe {
+ intrinsics::discriminant_value(variant)
+ };
+ assert!(discriminant < SHORTHAND_OFFSET as u64);
+ let shorthand = start + SHORTHAND_OFFSET;
// Get the number of bits that leb128 could fit
// in the same space as the fully encoded type.
// Check that the shorthand is a not longer than the
// full encoding itself, i.e. it's an obvious win.
if leb128_bits >= 64 || (shorthand as u64) < (1 << leb128_bits) {
- self.type_shorthands.borrow_mut().insert(*ty, shorthand);
+ map(self).insert(value.clone(), shorthand);
}
Ok(())
}
-}
-
-impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
- fn seq<I, F, T>(&mut self, iter: I, mut f: F)
- where I: IntoIterator,
- I::IntoIter: ExactSizeIterator,
- F: FnMut(&mut Self, I::Item) -> T,
- T: Encodable {
- let iter = iter.into_iter();
- self.emit_seq(iter.len(), move |ecx| {
- for (i, elem) in iter.enumerate() {
- ecx.emit_seq_elt(i, |ecx| {
- f(ecx, elem).encode(ecx)
- })?;
- }
- Ok(())
- }).unwrap();
- }
/// For every DefId that we create a metadata item for, we include a
/// serialized copy of its DefKey, which allows us to recreate a path.
self.start_tag(tag);
predicates.parent.encode(self).unwrap();
self.seq(&predicates.predicates, |ecx, predicate| {
- ecx.add_xref(XRef::Predicate(predicate.clone()))
+ ecx.encode_with_shorthand(predicate, predicate,
+ |ecx| &mut ecx.predicate_shorthands).unwrap()
});
self.end_tag();
}
}
impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
- fn add_xref(&mut self, xref: XRef<'tcx>) -> u32 {
- let old_len = self.xrefs.len() as u32;
- *self.xrefs.entry(xref).or_insert(old_len)
- }
-
- fn encode_xrefs(&mut self) {
- let xrefs = mem::replace(&mut self.xrefs, Default::default());
- let mut xref_positions = vec![0; xrefs.len()];
-
- // Encode XRefs sorted by their ID
- let mut sorted_xrefs: Vec<_> = xrefs.into_iter().collect();
- sorted_xrefs.sort_by_key(|&(_, id)| id);
-
- self.start_tag(root_tag::xref_data);
- for (xref, id) in sorted_xrefs.into_iter() {
- xref_positions[id as usize] = self.mark_stable_position() as u32;
- match xref {
- XRef::Predicate(p) => p.encode(self).unwrap()
- }
- }
- self.mark_stable_position();
- self.end_tag();
-
- self.start_tag(root_tag::xref_index);
- index::write_dense_index(xref_positions, &mut self.opaque.cursor);
- self.end_tag();
- }
-
fn encode_info_for_item(&mut self,
(def_id, item): (DefId, &hir::Item)) {
let tcx = self.tcx;
reachable: reachable,
mir_map: mir_map,
type_shorthands: Default::default(),
- xrefs: Default::default()
+ predicate_shorthands: Default::default()
});
// RBML compacts the encoded bytes whenever appropriate,
encode_item_index(ecx, items);
let index_bytes = ecx.position() - i;
- i = ecx.position();
- ecx.encode_xrefs();
- let xref_bytes = ecx.position() - i;
-
let total_bytes = ecx.position();
if ecx.tcx.sess.meta_stats() {
println!(" reachable bytes: {}", reachable_bytes);
println!(" item bytes: {}", item_bytes);
println!(" index bytes: {}", index_bytes);
- println!(" xref bytes: {}", xref_bytes);
println!(" zero bytes: {}", zero_bytes);
println!(" total bytes: {}", total_bytes);
}