enum AllocDiscriminant {
Alloc,
- ExternStatic,
+ Static,
Function,
}
impl_stable_hash_for!(enum self::AllocDiscriminant {
Alloc,
- ExternStatic,
+ Static,
Function
});
hasher: &mut StableHasher<W>,
) {
ty::tls::with_opt(|tcx| {
+ trace!("hashing {:?}", *self);
let tcx = tcx.expect("can't hash AllocIds during hir lowering");
- if let Some(alloc) = tcx.interpret_interner.get_alloc(*self) {
+ if let Some(def_id) = tcx.interpret_interner
+ .get_corresponding_static_def_id(*self) {
+ AllocDiscriminant::Static.hash_stable(hcx, hasher);
+ trace!("hashing {:?} as static {:?}", *self, def_id);
+ def_id.hash_stable(hcx, hasher);
+ } else if let Some(alloc) = tcx.interpret_interner.get_alloc(*self) {
AllocDiscriminant::Alloc.hash_stable(hcx, hasher);
if hcx.alloc_id_recursion_tracker.insert(*self) {
- tcx
- .interpret_interner
- .get_corresponding_static_def_id(*self)
- .hash_stable(hcx, hasher);
+ trace!("hashing {:?} as alloc {:#?}", *self, alloc);
alloc.hash_stable(hcx, hasher);
assert!(hcx.alloc_id_recursion_tracker.remove(self));
+ } else {
+ trace!("skipping hashing of {:?} due to recursion", *self);
}
} else if let Some(inst) = tcx.interpret_interner.get_fn(*self) {
+ trace!("hashing {:?} as fn {:#?}", *self, inst);
AllocDiscriminant::Function.hash_stable(hcx, hasher);
inst.hash_stable(hcx, hasher);
- } else if let Some(def_id) = tcx.interpret_interner
- .get_corresponding_static_def_id(*self) {
- AllocDiscriminant::ExternStatic.hash_stable(hcx, hasher);
- def_id.hash_stable(hcx, hasher);
} else {
bug!("no allocation for {}", self);
}
impl ::rustc_serialize::UseSpecializedEncodable for AllocId {}
impl ::rustc_serialize::UseSpecializedDecodable for AllocId {}
-pub const ALLOC_DISCRIMINANT: usize = 0;
-pub const FN_DISCRIMINANT: usize = 1;
-pub const EXTERN_STATIC_DISCRIMINANT: usize = 2;
-pub const SHORTHAND_START: usize = 3;
+#[derive(RustcDecodable, RustcEncodable)]
+enum AllocKind {
+ Alloc,
+ Fn,
+ ExternStatic,
+}
pub fn specialized_encode_alloc_id<
'a, 'tcx,
encoder: &mut E,
tcx: TyCtxt<'a, 'tcx, 'tcx>,
alloc_id: AllocId,
- shorthand: Option<usize>,
) -> Result<(), E::Error> {
- if let Some(shorthand) = shorthand {
- return shorthand.encode(encoder);
- }
if let Some(alloc) = tcx.interpret_interner.get_alloc(alloc_id) {
trace!("encoding {:?} with {:#?}", alloc_id, alloc);
- ALLOC_DISCRIMINANT.encode(encoder)?;
+ AllocKind::Alloc.encode(encoder)?;
alloc.encode(encoder)?;
// encode whether this allocation is the root allocation of a static
tcx.interpret_interner
.encode(encoder)?;
} else if let Some(fn_instance) = tcx.interpret_interner.get_fn(alloc_id) {
trace!("encoding {:?} with {:#?}", alloc_id, fn_instance);
- FN_DISCRIMINANT.encode(encoder)?;
+ AllocKind::Fn.encode(encoder)?;
fn_instance.encode(encoder)?;
} else if let Some(did) = tcx.interpret_interner.get_corresponding_static_def_id(alloc_id) {
// extern "C" statics don't have allocations, just encode its def_id
- EXTERN_STATIC_DISCRIMINANT.encode(encoder)?;
+ AllocKind::ExternStatic.encode(encoder)?;
did.encode(encoder)?;
} else {
bug!("alloc id without corresponding allocation: {}", alloc_id);
pub fn specialized_decode_alloc_id<
'a, 'tcx,
D: Decoder,
- CACHE: FnOnce(&mut D, usize, AllocId),
- SHORT: FnOnce(&mut D, usize) -> Result<AllocId, D::Error>
+ CACHE: FnOnce(&mut D, AllocId),
>(
decoder: &mut D,
tcx: TyCtxt<'a, 'tcx, 'tcx>,
- pos: usize,
cache: CACHE,
- short: SHORT,
) -> Result<AllocId, D::Error> {
- match usize::decode(decoder)? {
- ALLOC_DISCRIMINANT => {
+ match AllocKind::decode(decoder)? {
+ AllocKind::Alloc => {
let alloc_id = tcx.interpret_interner.reserve();
- trace!("creating alloc id {:?} at {}", alloc_id, pos);
+ trace!("creating alloc id {:?}", alloc_id);
// insert early to allow recursive allocs
- cache(decoder, pos, alloc_id);
+ cache(decoder, alloc_id);
let allocation = Allocation::decode(decoder)?;
trace!("decoded alloc {:?} {:#?}", alloc_id, allocation);
Ok(alloc_id)
},
- FN_DISCRIMINANT => {
- trace!("creating fn alloc id at {}", pos);
+ AllocKind::Fn => {
+ trace!("creating fn alloc id");
let instance = ty::Instance::decode(decoder)?;
trace!("decoded fn alloc instance: {:?}", instance);
let id = tcx.interpret_interner.create_fn_alloc(instance);
trace!("created fn alloc id: {:?}", id);
- cache(decoder, pos, id);
+ cache(decoder, id);
Ok(id)
},
- EXTERN_STATIC_DISCRIMINANT => {
- trace!("creating extern static alloc id at {}", pos);
+ AllocKind::ExternStatic => {
+ trace!("creating extern static alloc id at");
let did = DefId::decode(decoder)?;
let alloc_id = tcx.interpret_interner.reserve();
+ cache(decoder, alloc_id);
tcx.interpret_interner.cache(did, alloc_id);
Ok(alloc_id)
},
- shorthand => {
- trace!("loading shorthand {}", shorthand);
- short(decoder, shorthand)
- },
}
}
// `serialized_data`.
prev_diagnostics_index: FxHashMap<SerializedDepNodeIndex, AbsoluteBytePos>,
- // A cache to ensure we don't read allocations twice
- interpret_alloc_cache: RefCell<FxHashMap<usize, interpret::AllocId>>,
+ // Alloc indices to memory location map
+ prev_interpret_alloc_index: Vec<AbsoluteBytePos>,
- // A map from positions to size of the serialized allocation
- // so we can skip over already processed allocations
- interpret_alloc_size: RefCell<FxHashMap<usize, usize>>,
+ /// Deserialization: A cache to ensure we don't read allocations twice
+ interpret_alloc_cache: RefCell<FxHashMap<usize, interpret::AllocId>>,
}
// This type is used only for (de-)serialization.
prev_cnums: Vec<(u32, String, CrateDisambiguator)>,
query_result_index: EncodedQueryResultIndex,
diagnostics_index: EncodedQueryResultIndex,
+ // the location of all allocations
+ interpret_alloc_index: Vec<AbsoluteBytePos>,
}
type EncodedQueryResultIndex = Vec<(SerializedDepNodeIndex, AbsoluteBytePos)>;
query_result_index: footer.query_result_index.into_iter().collect(),
prev_diagnostics_index: footer.diagnostics_index.into_iter().collect(),
synthetic_expansion_infos: Lock::new(FxHashMap()),
+ prev_interpret_alloc_index: footer.interpret_alloc_index,
interpret_alloc_cache: RefCell::new(FxHashMap::default()),
- interpret_alloc_size: RefCell::new(FxHashMap::default()),
}
}
query_result_index: FxHashMap(),
prev_diagnostics_index: FxHashMap(),
synthetic_expansion_infos: Lock::new(FxHashMap()),
+ prev_interpret_alloc_index: Vec::new(),
interpret_alloc_cache: RefCell::new(FxHashMap::default()),
- interpret_alloc_size: RefCell::new(FxHashMap::default()),
}
}
type_shorthands: FxHashMap(),
predicate_shorthands: FxHashMap(),
expn_info_shorthands: FxHashMap(),
- interpret_alloc_shorthands: FxHashMap(),
+ interpret_allocs: FxHashMap(),
+ interpret_alloc_ids: FxHashSet(),
+ interpret_allocs_inverse: Vec::new(),
codemap: CachingCodemapView::new(tcx.sess.codemap()),
file_to_file_index,
};
diagnostics_index
};
+ let interpret_alloc_index = {
+ let mut interpret_alloc_index = Vec::new();
+ let mut n = 0;
+ loop {
+ let new_n = encoder.interpret_alloc_ids.len();
+ for idx in n..new_n {
+ let id = encoder.interpret_allocs_inverse[idx];
+ let pos = AbsoluteBytePos::new(encoder.position());
+ interpret_alloc_index.push(pos);
+ interpret::specialized_encode_alloc_id(
+ &mut encoder,
+ tcx,
+ id,
+ )?;
+ }
+ // if we have found new ids, serialize those, too
+ if n == new_n {
+ // otherwise, abort
+ break;
+ }
+ n = new_n;
+ }
+ interpret_alloc_index
+ };
+
let sorted_cnums = sorted_cnums_including_local_crate(tcx);
let prev_cnums: Vec<_> = sorted_cnums.iter().map(|&cnum| {
let crate_name = tcx.original_crate_name(cnum).as_str().to_string();
prev_cnums,
query_result_index,
diagnostics_index,
+ interpret_alloc_index,
})?;
// Encode the position of the footer as the last 8 bytes of the
file_index_to_file: &self.file_index_to_file,
file_index_to_stable_id: &self.file_index_to_stable_id,
synthetic_expansion_infos: &self.synthetic_expansion_infos,
+ prev_interpret_alloc_index: &self.prev_interpret_alloc_index,
interpret_alloc_cache: &self.interpret_alloc_cache,
- interpret_alloc_size: &self.interpret_alloc_size,
};
match decode_tagged(&mut decoder, dep_node_index) {
file_index_to_file: &'x Lock<FxHashMap<FileMapIndex, Lrc<FileMap>>>,
file_index_to_stable_id: &'x FxHashMap<FileMapIndex, StableFilemapId>,
interpret_alloc_cache: &'x RefCell<FxHashMap<usize, interpret::AllocId>>,
- interpret_alloc_size: &'x RefCell<FxHashMap<usize, usize>>,
+ /// maps from index in the cache file to location in the cache file
+ prev_interpret_alloc_index: &'x [AbsoluteBytePos],
}
impl<'a, 'tcx, 'x> CacheDecoder<'a, 'tcx, 'x> {
impl<'a, 'tcx, 'x> SpecializedDecoder<interpret::AllocId> for CacheDecoder<'a, 'tcx, 'x> {
fn specialized_decode(&mut self) -> Result<interpret::AllocId, Self::Error> {
let tcx = self.tcx;
- let pos = TyDecoder::position(self);
- trace!("specialized_decode_alloc_id: {:?}", pos);
- if let Some(cached) = self.interpret_alloc_cache.borrow().get(&pos).cloned() {
- // if there's no end position we are currently deserializing a recursive
- // allocation
- if let Some(end) = self.interpret_alloc_size.borrow().get(&pos).cloned() {
- trace!("{} already cached as {:?}", pos, cached);
- // skip ahead
- self.opaque.set_position(end);
- return Ok(cached)
- }
+ let idx = usize::decode(self)?;
+ trace!("loading index {}", idx);
+
+ if let Some(cached) = self.interpret_alloc_cache.borrow().get(&idx).cloned() {
+ trace!("loading alloc id {:?} from alloc_cache", cached);
+ return Ok(cached);
}
- let id = interpret::specialized_decode_alloc_id(
- self,
- tcx,
- pos,
- |this, pos, alloc_id| {
- assert!(this.interpret_alloc_cache.borrow_mut().insert(pos, alloc_id).is_none());
- },
- |this, shorthand| {
- // need to load allocation
- this.with_position(shorthand, |this| interpret::AllocId::decode(this))
- }
- )?;
- assert!(self
- .interpret_alloc_size
- .borrow_mut()
- .insert(pos, TyDecoder::position(self))
- .is_none());
- Ok(id)
+ let pos = self.prev_interpret_alloc_index[idx].to_usize();
+ trace!("loading position {}", pos);
+ self.with_position(pos, |this| {
+ interpret::specialized_decode_alloc_id(
+ this,
+ tcx,
+ |this, alloc_id| {
+ trace!("caching idx {} for alloc id {} at position {}", idx, alloc_id, pos);
+ assert!(this
+ .interpret_alloc_cache
+ .borrow_mut()
+ .insert(idx, alloc_id)
+ .is_none());
+ },
+ )
+ })
}
}
impl<'a, 'tcx, 'x> SpecializedDecoder<Span> for CacheDecoder<'a, 'tcx, 'x> {
type_shorthands: FxHashMap<ty::Ty<'tcx>, usize>,
predicate_shorthands: FxHashMap<ty::Predicate<'tcx>, usize>,
expn_info_shorthands: FxHashMap<Mark, AbsoluteBytePos>,
- interpret_alloc_shorthands: FxHashMap<interpret::AllocId, usize>,
+ interpret_allocs: FxHashMap<interpret::AllocId, usize>,
+ interpret_allocs_inverse: Vec<interpret::AllocId>,
+ interpret_alloc_ids: FxHashSet<interpret::AllocId>,
codemap: CachingCodemapView<'tcx>,
file_to_file_index: FxHashMap<*const FileMap, FileMapIndex>,
}
where E: 'enc + ty_codec::TyEncoder
{
fn specialized_encode(&mut self, alloc_id: &interpret::AllocId) -> Result<(), Self::Error> {
- use std::collections::hash_map::Entry;
- let tcx = self.tcx;
- let pos = self.position();
- let shorthand = match self.interpret_alloc_shorthands.entry(*alloc_id) {
- Entry::Occupied(entry) => Some(entry.get().clone()),
- Entry::Vacant(entry) => {
- // ensure that we don't place any AllocIds at the very beginning
- // of the metadata file, because that would end up making our indices
- // not special. It is essentially impossible for that to happen,
- // but let's make sure
- assert!(pos >= interpret::SHORTHAND_START);
- entry.insert(pos);
- None
- },
+ let index = if self.interpret_alloc_ids.insert(*alloc_id) {
+ let idx = self.interpret_alloc_ids.len() - 1;
+ assert_eq!(idx, self.interpret_allocs_inverse.len());
+ self.interpret_allocs_inverse.push(*alloc_id);
+ assert!(self.interpret_allocs.insert(*alloc_id, idx).is_none());
+ idx
+ } else {
+ self.interpret_allocs[alloc_id]
};
- interpret::specialized_encode_alloc_id(
- self,
- tcx,
- *alloc_id,
- shorthand,
- )
+
+ index.encode(self)
}
}
// interpreter allocation cache
interpret_alloc_cache: FxHashMap<usize, interpret::AllocId>,
- // a cache for sizes of interpreter allocations
- // needed to skip already deserialized allocations
- interpret_alloc_size: FxHashMap<usize, usize>,
}
/// Abstract over the various ways one can create metadata decoders.
last_filemap_index: 0,
lazy_state: LazyState::NoNode,
interpret_alloc_cache: FxHashMap::default(),
- interpret_alloc_size: FxHashMap::default(),
}
}
}
impl<'a, 'tcx> SpecializedDecoder<interpret::AllocId> for DecodeContext<'a, 'tcx> {
fn specialized_decode(&mut self) -> Result<interpret::AllocId, Self::Error> {
- let tcx = self.tcx.expect("need tcx for AllocId decoding");
- let pos = self.position();
- if let Some(cached) = self.interpret_alloc_cache.get(&pos).cloned() {
- // if there's no end position we are currently deserializing a recursive
- // allocation
- if let Some(end) = self.interpret_alloc_size.get(&pos).cloned() {
- trace!("{} already cached as {:?}", pos, cached);
- // skip ahead
- self.opaque.set_position(end);
- return Ok(cached)
- }
+ let tcx = self.tcx.unwrap();
+ let idx = usize::decode(self)?;
+
+ if let Some(cached) = self.interpret_alloc_cache.get(&idx).cloned() {
+ return Ok(cached);
}
- let id = interpret::specialized_decode_alloc_id(
- self,
- tcx,
- pos,
- |this, pos, alloc_id| { this.interpret_alloc_cache.insert(pos, alloc_id); },
- |this, shorthand| {
- // need to load allocation
- this.with_position(shorthand, |this| interpret::AllocId::decode(this))
- }
- )?;
- let end_pos = self.position();
- assert!(self
- .interpret_alloc_size
- .insert(pos, end_pos)
- .is_none());
- Ok(id)
+ let pos = self
+ .cdata()
+ .root
+ .interpret_alloc_index[idx];
+ self.with_position(pos as usize, |this| {
+ interpret::specialized_decode_alloc_id(
+ this,
+ tcx,
+ |this, alloc_id| {
+ assert!(this.interpret_alloc_cache.insert(idx, alloc_id).is_none());
+ },
+ )
+ })
}
}
use rustc::ty::codec::{self as ty_codec, TyEncoder};
use rustc::session::config::{self, CrateTypeProcMacro};
-use rustc::util::nodemap::FxHashMap;
+use rustc::util::nodemap::{FxHashMap, FxHashSet};
use rustc_data_structures::stable_hasher::StableHasher;
use rustc_serialize::{Encodable, Encoder, SpecializedEncoder, opaque};
lazy_state: LazyState,
type_shorthands: FxHashMap<Ty<'tcx>, usize>,
predicate_shorthands: FxHashMap<ty::Predicate<'tcx>, usize>,
- interpret_alloc_shorthands: FxHashMap<interpret::AllocId, usize>,
+
+ interpret_allocs: FxHashMap<interpret::AllocId, usize>,
+ interpret_allocs_inverse: Vec<interpret::AllocId>,
+ interpret_alloc_ids: FxHashSet<interpret::AllocId>,
// This is used to speed up Span encoding.
filemap_cache: Lrc<FileMap>,
impl<'a, 'tcx> SpecializedEncoder<interpret::AllocId> for EncodeContext<'a, 'tcx> {
fn specialized_encode(&mut self, alloc_id: &interpret::AllocId) -> Result<(), Self::Error> {
- use std::collections::hash_map::Entry;
- let tcx = self.tcx;
- let pos = self.position();
- let shorthand = match self.interpret_alloc_shorthands.entry(*alloc_id) {
- Entry::Occupied(entry) => Some(entry.get().clone()),
- Entry::Vacant(entry) => {
- // ensure that we don't place any AllocIds at the very beginning
- // of the metadata file, because that would end up making our indices
- // not special. This is essentially impossible, but let's make sure
- assert!(pos >= interpret::SHORTHAND_START);
- entry.insert(pos);
- None
- },
+ let index = if self.interpret_alloc_ids.insert(*alloc_id) {
+ let idx = self.interpret_alloc_ids.len() - 1;
+ assert_eq!(idx, self.interpret_allocs_inverse.len());
+ self.interpret_allocs_inverse.push(*alloc_id);
+ assert!(self.interpret_allocs.insert(*alloc_id, idx).is_none());
+ idx
+ } else {
+ self.interpret_allocs[alloc_id]
};
- interpret::specialized_encode_alloc_id(
- self,
- tcx,
- *alloc_id,
- shorthand,
- )
+
+ index.encode(self)
}
}
let has_default_lib_allocator =
attr::contains_name(tcx.hir.krate_attrs(), "default_lib_allocator");
let has_global_allocator = *tcx.sess.has_global_allocator.get();
+
+ // Encode the allocation index
+ let interpret_alloc_index = {
+ let mut interpret_alloc_index = Vec::new();
+ let mut n = 0;
+ loop {
+ let new_n = self.interpret_alloc_ids.len();
+ for idx in n..new_n {
+ let id = self.interpret_allocs_inverse[idx];
+ let pos = self.position() as u32;
+ interpret_alloc_index.push(pos);
+ interpret::specialized_encode_alloc_id(
+ self,
+ tcx,
+ id,
+ ).unwrap();
+ }
+ // if we have found new ids, serialize those, too
+ if n == new_n {
+ // otherwise, abort
+ break;
+ }
+ n = new_n;
+ }
+ interpret_alloc_index
+ };
+
let root = self.lazy(&CrateRoot {
name: tcx.crate_name(LOCAL_CRATE),
extra_filename: tcx.sess.opts.cg.extra_filename.clone(),
exported_symbols,
wasm_custom_sections,
index,
+ interpret_alloc_index,
});
let total_bytes = self.position();
type_shorthands: Default::default(),
predicate_shorthands: Default::default(),
filemap_cache: tcx.sess.codemap().files()[0].clone(),
- interpret_alloc_shorthands: Default::default(),
+ interpret_allocs: Default::default(),
+ interpret_allocs_inverse: Default::default(),
+ interpret_alloc_ids: Default::default(),
};
// Encode the rustc version string in a predictable location.
pub impls: LazySeq<TraitImpls>,
pub exported_symbols: EncodedExportedSymbols,
pub wasm_custom_sections: LazySeq<DefIndex>,
+ pub interpret_alloc_index: Vec<u32>,
pub index: LazySeq<index::Index>,
}
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// revisions:rpass1 rpass2
+
+#![cfg_attr(rpass2, warn(dead_code))]
+
+pub static mut BAA: *const i8 = unsafe { &BOO as *const _ as *const i8 };
+
+pub static mut BOO: *const i8 = unsafe { &BAA as *const _ as *const i8 };
+
+fn main() {}