]> git.lizzy.rs Git - rust.git/commitdiff
Don't recurse into allocations, use a global table instead
authorOliver Schneider <git-no-reply-9879165716479413131@oli-obk.de>
Tue, 10 Apr 2018 07:58:46 +0000 (09:58 +0200)
committerOliver Schneider <git-no-reply-9879165716479413131@oli-obk.de>
Sat, 14 Apr 2018 10:21:46 +0000 (12:21 +0200)
src/librustc/ich/impls_ty.rs
src/librustc/mir/interpret/mod.rs
src/librustc/ty/maps/on_disk_cache.rs
src/librustc_metadata/decoder.rs
src/librustc_metadata/encoder.rs
src/librustc_metadata/schema.rs
src/test/incremental/static_cycle/b.rs [new file with mode: 0644]

index 41cfac2674be66b4dc358a30515923f777923758..2425fef7e7f534972b809343e4fafb64ffa4427f 100644 (file)
@@ -398,12 +398,12 @@ fn hash_stable<W: StableHasherResult>(&self,
 
 enum AllocDiscriminant {
     Alloc,
-    ExternStatic,
+    Static,
     Function,
 }
 impl_stable_hash_for!(enum self::AllocDiscriminant {
     Alloc,
-    ExternStatic,
+    Static,
     Function
 });
 
@@ -414,24 +414,26 @@ fn hash_stable<W: StableHasherResult>(
         hasher: &mut StableHasher<W>,
     ) {
         ty::tls::with_opt(|tcx| {
+            trace!("hashing {:?}", *self);
             let tcx = tcx.expect("can't hash AllocIds during hir lowering");
-            if let Some(alloc) = tcx.interpret_interner.get_alloc(*self) {
+            if let Some(def_id) = tcx.interpret_interner
+                                     .get_corresponding_static_def_id(*self) {
+                AllocDiscriminant::Static.hash_stable(hcx, hasher);
+                trace!("hashing {:?} as static {:?}", *self, def_id);
+                def_id.hash_stable(hcx, hasher);
+            } else if let Some(alloc) = tcx.interpret_interner.get_alloc(*self) {
                 AllocDiscriminant::Alloc.hash_stable(hcx, hasher);
                 if hcx.alloc_id_recursion_tracker.insert(*self) {
-                    tcx
-                        .interpret_interner
-                        .get_corresponding_static_def_id(*self)
-                        .hash_stable(hcx, hasher);
+                    trace!("hashing {:?} as alloc {:#?}", *self, alloc);
                     alloc.hash_stable(hcx, hasher);
                     assert!(hcx.alloc_id_recursion_tracker.remove(self));
+                } else {
+                    trace!("skipping hashing of {:?} due to recursion", *self);
                 }
             } else if let Some(inst) = tcx.interpret_interner.get_fn(*self) {
+                trace!("hashing {:?} as fn {:#?}", *self, inst);
                 AllocDiscriminant::Function.hash_stable(hcx, hasher);
                 inst.hash_stable(hcx, hasher);
-            } else if let Some(def_id) = tcx.interpret_interner
-                                            .get_corresponding_static_def_id(*self) {
-                AllocDiscriminant::ExternStatic.hash_stable(hcx, hasher);
-                def_id.hash_stable(hcx, hasher);
             } else {
                 bug!("no allocation for {}", self);
             }
index e242ec4985ab4ae838f7a974aef0776fcf4fc4d4..9003cca815ee3d45538ee591f66a0f8495b50660 100644 (file)
@@ -154,10 +154,12 @@ pub fn offset<C: HasDataLayout>(self, i: u64, cx: C) -> EvalResult<'tcx, Self> {
 impl ::rustc_serialize::UseSpecializedEncodable for AllocId {}
 impl ::rustc_serialize::UseSpecializedDecodable for AllocId {}
 
-pub const ALLOC_DISCRIMINANT: usize = 0;
-pub const FN_DISCRIMINANT: usize = 1;
-pub const EXTERN_STATIC_DISCRIMINANT: usize = 2;
-pub const SHORTHAND_START: usize = 3;
+#[derive(RustcDecodable, RustcEncodable)]
+enum AllocKind {
+    Alloc,
+    Fn,
+    ExternStatic,
+}
 
 pub fn specialized_encode_alloc_id<
     'a, 'tcx,
@@ -166,14 +168,10 @@ pub fn specialized_encode_alloc_id<
     encoder: &mut E,
     tcx: TyCtxt<'a, 'tcx, 'tcx>,
     alloc_id: AllocId,
-    shorthand: Option<usize>,
 ) -> Result<(), E::Error> {
-    if let Some(shorthand) = shorthand {
-        return shorthand.encode(encoder);
-    }
     if let Some(alloc) = tcx.interpret_interner.get_alloc(alloc_id) {
         trace!("encoding {:?} with {:#?}", alloc_id, alloc);
-        ALLOC_DISCRIMINANT.encode(encoder)?;
+        AllocKind::Alloc.encode(encoder)?;
         alloc.encode(encoder)?;
         // encode whether this allocation is the root allocation of a static
         tcx.interpret_interner
@@ -181,11 +179,11 @@ pub fn specialized_encode_alloc_id<
             .encode(encoder)?;
     } else if let Some(fn_instance) = tcx.interpret_interner.get_fn(alloc_id) {
         trace!("encoding {:?} with {:#?}", alloc_id, fn_instance);
-        FN_DISCRIMINANT.encode(encoder)?;
+        AllocKind::Fn.encode(encoder)?;
         fn_instance.encode(encoder)?;
     } else if let Some(did) = tcx.interpret_interner.get_corresponding_static_def_id(alloc_id) {
         // extern "C" statics don't have allocations, just encode its def_id
-        EXTERN_STATIC_DISCRIMINANT.encode(encoder)?;
+        AllocKind::ExternStatic.encode(encoder)?;
         did.encode(encoder)?;
     } else {
         bug!("alloc id without corresponding allocation: {}", alloc_id);
@@ -196,21 +194,18 @@ pub fn specialized_encode_alloc_id<
 pub fn specialized_decode_alloc_id<
     'a, 'tcx,
     D: Decoder,
-    CACHE: FnOnce(&mut D, usize, AllocId),
-    SHORT: FnOnce(&mut D, usize) -> Result<AllocId, D::Error>
+    CACHE: FnOnce(&mut D, AllocId),
 >(
     decoder: &mut D,
     tcx: TyCtxt<'a, 'tcx, 'tcx>,
-    pos: usize,
     cache: CACHE,
-    short: SHORT,
 ) -> Result<AllocId, D::Error> {
-    match usize::decode(decoder)? {
-        ALLOC_DISCRIMINANT => {
+    match AllocKind::decode(decoder)? {
+        AllocKind::Alloc => {
             let alloc_id = tcx.interpret_interner.reserve();
-            trace!("creating alloc id {:?} at {}", alloc_id, pos);
+            trace!("creating alloc id {:?}", alloc_id);
             // insert early to allow recursive allocs
-            cache(decoder, pos, alloc_id);
+            cache(decoder, alloc_id);
 
             let allocation = Allocation::decode(decoder)?;
             trace!("decoded alloc {:?} {:#?}", alloc_id, allocation);
@@ -223,26 +218,23 @@ pub fn specialized_decode_alloc_id<
 
             Ok(alloc_id)
         },
-        FN_DISCRIMINANT => {
-            trace!("creating fn alloc id at {}", pos);
+        AllocKind::Fn => {
+            trace!("creating fn alloc id");
             let instance = ty::Instance::decode(decoder)?;
             trace!("decoded fn alloc instance: {:?}", instance);
             let id = tcx.interpret_interner.create_fn_alloc(instance);
             trace!("created fn alloc id: {:?}", id);
-            cache(decoder, pos, id);
+            cache(decoder, id);
             Ok(id)
         },
-        EXTERN_STATIC_DISCRIMINANT => {
-            trace!("creating extern static alloc id at {}", pos);
+        AllocKind::ExternStatic => {
+            trace!("creating extern static alloc id at");
             let did = DefId::decode(decoder)?;
             let alloc_id = tcx.interpret_interner.reserve();
+            cache(decoder, alloc_id);
             tcx.interpret_interner.cache(did, alloc_id);
             Ok(alloc_id)
         },
-        shorthand => {
-            trace!("loading shorthand {}", shorthand);
-            short(decoder, shorthand)
-        },
     }
 }
 
index 9ea4b21c552218dc785bf7a8a57189fd1b8af681..62f2cd88935d367b75f3ff89c06d693607ad1801 100644 (file)
@@ -77,12 +77,11 @@ pub struct OnDiskCache<'sess> {
     // `serialized_data`.
     prev_diagnostics_index: FxHashMap<SerializedDepNodeIndex, AbsoluteBytePos>,
 
-    // A cache to ensure we don't read allocations twice
-    interpret_alloc_cache: RefCell<FxHashMap<usize, interpret::AllocId>>,
+    // Alloc indices to memory location map
+    prev_interpret_alloc_index: Vec<AbsoluteBytePos>,
 
-    // A map from positions to size of the serialized allocation
-    // so we can skip over already processed allocations
-    interpret_alloc_size: RefCell<FxHashMap<usize, usize>>,
+    /// Deserialization: A cache to ensure we don't read allocations twice
+    interpret_alloc_cache: RefCell<FxHashMap<usize, interpret::AllocId>>,
 }
 
 // This type is used only for (de-)serialization.
@@ -92,6 +91,8 @@ struct Footer {
     prev_cnums: Vec<(u32, String, CrateDisambiguator)>,
     query_result_index: EncodedQueryResultIndex,
     diagnostics_index: EncodedQueryResultIndex,
+    // the location of all allocations
+    interpret_alloc_index: Vec<AbsoluteBytePos>,
 }
 
 type EncodedQueryResultIndex = Vec<(SerializedDepNodeIndex, AbsoluteBytePos)>;
@@ -148,8 +149,8 @@ pub fn new(sess: &'sess Session, data: Vec<u8>, start_pos: usize) -> OnDiskCache
             query_result_index: footer.query_result_index.into_iter().collect(),
             prev_diagnostics_index: footer.diagnostics_index.into_iter().collect(),
             synthetic_expansion_infos: Lock::new(FxHashMap()),
+            prev_interpret_alloc_index: footer.interpret_alloc_index,
             interpret_alloc_cache: RefCell::new(FxHashMap::default()),
-            interpret_alloc_size: RefCell::new(FxHashMap::default()),
         }
     }
 
@@ -165,8 +166,8 @@ pub fn new_empty(codemap: &'sess CodeMap) -> OnDiskCache<'sess> {
             query_result_index: FxHashMap(),
             prev_diagnostics_index: FxHashMap(),
             synthetic_expansion_infos: Lock::new(FxHashMap()),
+            prev_interpret_alloc_index: Vec::new(),
             interpret_alloc_cache: RefCell::new(FxHashMap::default()),
-            interpret_alloc_size: RefCell::new(FxHashMap::default()),
         }
     }
 
@@ -199,7 +200,9 @@ pub fn serialize<'a, 'tcx, E>(&self,
                 type_shorthands: FxHashMap(),
                 predicate_shorthands: FxHashMap(),
                 expn_info_shorthands: FxHashMap(),
-                interpret_alloc_shorthands: FxHashMap(),
+                interpret_allocs: FxHashMap(),
+                interpret_alloc_ids: FxHashSet(),
+                interpret_allocs_inverse: Vec::new(),
                 codemap: CachingCodemapView::new(tcx.sess.codemap()),
                 file_to_file_index,
             };
@@ -277,6 +280,31 @@ pub fn serialize<'a, 'tcx, E>(&self,
                 diagnostics_index
             };
 
+            let interpret_alloc_index = {
+                let mut interpret_alloc_index = Vec::new();
+                let mut n = 0;
+                loop {
+                    let new_n = encoder.interpret_alloc_ids.len();
+                    for idx in n..new_n {
+                        let id = encoder.interpret_allocs_inverse[idx];
+                        let pos = AbsoluteBytePos::new(encoder.position());
+                        interpret_alloc_index.push(pos);
+                        interpret::specialized_encode_alloc_id(
+                            &mut encoder,
+                            tcx,
+                            id,
+                        )?;
+                    }
+                    // if we have found new ids, serialize those, too
+                    if n == new_n {
+                        // otherwise, abort
+                        break;
+                    }
+                    n = new_n;
+                }
+                interpret_alloc_index
+            };
+
             let sorted_cnums = sorted_cnums_including_local_crate(tcx);
             let prev_cnums: Vec<_> = sorted_cnums.iter().map(|&cnum| {
                 let crate_name = tcx.original_crate_name(cnum).as_str().to_string();
@@ -291,6 +319,7 @@ pub fn serialize<'a, 'tcx, E>(&self,
                 prev_cnums,
                 query_result_index,
                 diagnostics_index,
+                interpret_alloc_index,
             })?;
 
             // Encode the position of the footer as the last 8 bytes of the
@@ -396,8 +425,8 @@ fn load_indexed<'tcx, T>(&self,
             file_index_to_file: &self.file_index_to_file,
             file_index_to_stable_id: &self.file_index_to_stable_id,
             synthetic_expansion_infos: &self.synthetic_expansion_infos,
+            prev_interpret_alloc_index: &self.prev_interpret_alloc_index,
             interpret_alloc_cache: &self.interpret_alloc_cache,
-            interpret_alloc_size: &self.interpret_alloc_size,
         };
 
         match decode_tagged(&mut decoder, dep_node_index) {
@@ -460,7 +489,8 @@ struct CacheDecoder<'a, 'tcx: 'a, 'x> {
     file_index_to_file: &'x Lock<FxHashMap<FileMapIndex, Lrc<FileMap>>>,
     file_index_to_stable_id: &'x FxHashMap<FileMapIndex, StableFilemapId>,
     interpret_alloc_cache: &'x RefCell<FxHashMap<usize, interpret::AllocId>>,
-    interpret_alloc_size: &'x RefCell<FxHashMap<usize, usize>>,
+    /// maps from index in the cache file to location in the cache file
+    prev_interpret_alloc_index: &'x [AbsoluteBytePos],
 }
 
 impl<'a, 'tcx, 'x> CacheDecoder<'a, 'tcx, 'x> {
@@ -584,36 +614,29 @@ fn map_encoded_cnum_to_current(&self, cnum: CrateNum) -> CrateNum {
 impl<'a, 'tcx, 'x> SpecializedDecoder<interpret::AllocId> for CacheDecoder<'a, 'tcx, 'x> {
     fn specialized_decode(&mut self) -> Result<interpret::AllocId, Self::Error> {
         let tcx = self.tcx;
-        let pos = TyDecoder::position(self);
-        trace!("specialized_decode_alloc_id: {:?}", pos);
-        if let Some(cached) = self.interpret_alloc_cache.borrow().get(&pos).cloned() {
-            // if there's no end position we are currently deserializing a recursive
-            // allocation
-            if let Some(end) = self.interpret_alloc_size.borrow().get(&pos).cloned() {
-                trace!("{} already cached as {:?}", pos, cached);
-                // skip ahead
-                self.opaque.set_position(end);
-                return Ok(cached)
-            }
+        let idx = usize::decode(self)?;
+        trace!("loading index {}", idx);
+
+        if let Some(cached) = self.interpret_alloc_cache.borrow().get(&idx).cloned() {
+            trace!("loading alloc id {:?} from alloc_cache", cached);
+            return Ok(cached);
         }
-        let id = interpret::specialized_decode_alloc_id(
-            self,
-            tcx,
-            pos,
-            |this, pos, alloc_id| {
-                assert!(this.interpret_alloc_cache.borrow_mut().insert(pos, alloc_id).is_none());
-            },
-            |this, shorthand| {
-                // need to load allocation
-                this.with_position(shorthand, |this| interpret::AllocId::decode(this))
-            }
-        )?;
-        assert!(self
-            .interpret_alloc_size
-            .borrow_mut()
-            .insert(pos, TyDecoder::position(self))
-            .is_none());
-        Ok(id)
+        let pos = self.prev_interpret_alloc_index[idx].to_usize();
+        trace!("loading position {}", pos);
+        self.with_position(pos, |this| {
+            interpret::specialized_decode_alloc_id(
+                this,
+                tcx,
+                |this, alloc_id| {
+                    trace!("caching idx {} for alloc id {} at position {}", idx, alloc_id, pos);
+                    assert!(this
+                        .interpret_alloc_cache
+                        .borrow_mut()
+                        .insert(idx, alloc_id)
+                        .is_none());
+                },
+            )
+        })
     }
 }
 impl<'a, 'tcx, 'x> SpecializedDecoder<Span> for CacheDecoder<'a, 'tcx, 'x> {
@@ -777,7 +800,9 @@ struct CacheEncoder<'enc, 'a, 'tcx, E>
     type_shorthands: FxHashMap<ty::Ty<'tcx>, usize>,
     predicate_shorthands: FxHashMap<ty::Predicate<'tcx>, usize>,
     expn_info_shorthands: FxHashMap<Mark, AbsoluteBytePos>,
-    interpret_alloc_shorthands: FxHashMap<interpret::AllocId, usize>,
+    interpret_allocs: FxHashMap<interpret::AllocId, usize>,
+    interpret_allocs_inverse: Vec<interpret::AllocId>,
+    interpret_alloc_ids: FxHashSet<interpret::AllocId>,
     codemap: CachingCodemapView<'tcx>,
     file_to_file_index: FxHashMap<*const FileMap, FileMapIndex>,
 }
@@ -814,27 +839,17 @@ impl<'enc, 'a, 'tcx, E> SpecializedEncoder<interpret::AllocId> for CacheEncoder<
     where E: 'enc + ty_codec::TyEncoder
 {
     fn specialized_encode(&mut self, alloc_id: &interpret::AllocId) -> Result<(), Self::Error> {
-        use std::collections::hash_map::Entry;
-        let tcx = self.tcx;
-        let pos = self.position();
-        let shorthand = match self.interpret_alloc_shorthands.entry(*alloc_id) {
-            Entry::Occupied(entry) => Some(entry.get().clone()),
-            Entry::Vacant(entry) => {
-                // ensure that we don't place any AllocIds at the very beginning
-                // of the metadata file, because that would end up making our indices
-                // not special. It is essentially impossible for that to happen,
-                // but let's make sure
-                assert!(pos >= interpret::SHORTHAND_START);
-                entry.insert(pos);
-                None
-            },
+        let index = if self.interpret_alloc_ids.insert(*alloc_id) {
+            let idx = self.interpret_alloc_ids.len() - 1;
+            assert_eq!(idx, self.interpret_allocs_inverse.len());
+            self.interpret_allocs_inverse.push(*alloc_id);
+            assert!(self.interpret_allocs.insert(*alloc_id, idx).is_none());
+            idx
+        } else {
+            self.interpret_allocs[alloc_id]
         };
-        interpret::specialized_encode_alloc_id(
-            self,
-            tcx,
-            *alloc_id,
-            shorthand,
-        )
+
+        index.encode(self)
     }
 }
 
index 3ea4ddc25226fffe80d39cbeba80b474ea18e2cd..9173d12827446d3216c5e284b8e4bccde308bbc9 100644 (file)
@@ -59,9 +59,6 @@ pub struct DecodeContext<'a, 'tcx: 'a> {
 
     // interpreter allocation cache
     interpret_alloc_cache: FxHashMap<usize, interpret::AllocId>,
-    // a cache for sizes of interpreter allocations
-    // needed to skip already deserialized allocations
-    interpret_alloc_size: FxHashMap<usize, usize>,
 }
 
 /// Abstract over the various ways one can create metadata decoders.
@@ -81,7 +78,6 @@ fn decoder(self, pos: usize) -> DecodeContext<'a, 'tcx> {
             last_filemap_index: 0,
             lazy_state: LazyState::NoNode,
             interpret_alloc_cache: FxHashMap::default(),
-            interpret_alloc_size: FxHashMap::default(),
         }
     }
 }
@@ -290,34 +286,25 @@ fn specialized_decode(&mut self) -> Result<LocalDefId, Self::Error> {
 
 impl<'a, 'tcx> SpecializedDecoder<interpret::AllocId> for DecodeContext<'a, 'tcx> {
     fn specialized_decode(&mut self) -> Result<interpret::AllocId, Self::Error> {
-        let tcx = self.tcx.expect("need tcx for AllocId decoding");
-        let pos = self.position();
-        if let Some(cached) = self.interpret_alloc_cache.get(&pos).cloned() {
-            // if there's no end position we are currently deserializing a recursive
-            // allocation
-            if let Some(end) = self.interpret_alloc_size.get(&pos).cloned() {
-                trace!("{} already cached as {:?}", pos, cached);
-                // skip ahead
-                self.opaque.set_position(end);
-                return Ok(cached)
-            }
+        let tcx = self.tcx.unwrap();
+        let idx = usize::decode(self)?;
+
+        if let Some(cached) = self.interpret_alloc_cache.get(&idx).cloned() {
+            return Ok(cached);
         }
-        let id = interpret::specialized_decode_alloc_id(
-            self,
-            tcx,
-            pos,
-            |this, pos, alloc_id| { this.interpret_alloc_cache.insert(pos, alloc_id); },
-            |this, shorthand| {
-                // need to load allocation
-                this.with_position(shorthand, |this| interpret::AllocId::decode(this))
-            }
-        )?;
-        let end_pos = self.position();
-        assert!(self
-            .interpret_alloc_size
-            .insert(pos, end_pos)
-            .is_none());
-        Ok(id)
+        let pos = self
+            .cdata()
+            .root
+            .interpret_alloc_index[idx];
+        self.with_position(pos as usize, |this| {
+            interpret::specialized_decode_alloc_id(
+                this,
+                tcx,
+                |this, alloc_id| {
+                    assert!(this.interpret_alloc_cache.insert(idx, alloc_id).is_none());
+                },
+            )
+        })
     }
 }
 
index 22b440eea60efec9e5ffa5606e9a4266f8d69d57..cc2d0eab2331ecc034bebd42173abe0c9fccf4c9 100644 (file)
@@ -29,7 +29,7 @@
 use rustc::ty::codec::{self as ty_codec, TyEncoder};
 
 use rustc::session::config::{self, CrateTypeProcMacro};
-use rustc::util::nodemap::FxHashMap;
+use rustc::util::nodemap::{FxHashMap, FxHashSet};
 
 use rustc_data_structures::stable_hasher::StableHasher;
 use rustc_serialize::{Encodable, Encoder, SpecializedEncoder, opaque};
@@ -59,7 +59,10 @@ pub struct EncodeContext<'a, 'tcx: 'a> {
     lazy_state: LazyState,
     type_shorthands: FxHashMap<Ty<'tcx>, usize>,
     predicate_shorthands: FxHashMap<ty::Predicate<'tcx>, usize>,
-    interpret_alloc_shorthands: FxHashMap<interpret::AllocId, usize>,
+
+    interpret_allocs: FxHashMap<interpret::AllocId, usize>,
+    interpret_allocs_inverse: Vec<interpret::AllocId>,
+    interpret_alloc_ids: FxHashSet<interpret::AllocId>,
 
     // This is used to speed up Span encoding.
     filemap_cache: Lrc<FileMap>,
@@ -196,26 +199,17 @@ fn specialized_encode(&mut self, ty: &Ty<'tcx>) -> Result<(), Self::Error> {
 
 impl<'a, 'tcx> SpecializedEncoder<interpret::AllocId> for EncodeContext<'a, 'tcx> {
     fn specialized_encode(&mut self, alloc_id: &interpret::AllocId) -> Result<(), Self::Error> {
-        use std::collections::hash_map::Entry;
-        let tcx = self.tcx;
-        let pos = self.position();
-        let shorthand = match self.interpret_alloc_shorthands.entry(*alloc_id) {
-            Entry::Occupied(entry) => Some(entry.get().clone()),
-            Entry::Vacant(entry) => {
-                // ensure that we don't place any AllocIds at the very beginning
-                // of the metadata file, because that would end up making our indices
-                // not special. This is essentially impossible, but let's make sure
-                assert!(pos >= interpret::SHORTHAND_START);
-                entry.insert(pos);
-                None
-            },
+        let index = if self.interpret_alloc_ids.insert(*alloc_id) {
+            let idx = self.interpret_alloc_ids.len() - 1;
+            assert_eq!(idx, self.interpret_allocs_inverse.len());
+            self.interpret_allocs_inverse.push(*alloc_id);
+            assert!(self.interpret_allocs.insert(*alloc_id, idx).is_none());
+            idx
+        } else {
+            self.interpret_allocs[alloc_id]
         };
-        interpret::specialized_encode_alloc_id(
-            self,
-            tcx,
-            *alloc_id,
-            shorthand,
-        )
+
+        index.encode(self)
     }
 }
 
@@ -460,6 +454,33 @@ fn encode_crate_root(&mut self) -> Lazy<CrateRoot> {
         let has_default_lib_allocator =
             attr::contains_name(tcx.hir.krate_attrs(), "default_lib_allocator");
         let has_global_allocator = *tcx.sess.has_global_allocator.get();
+
+        // Encode the allocation index
+        let interpret_alloc_index = {
+            let mut interpret_alloc_index = Vec::new();
+            let mut n = 0;
+            loop {
+                let new_n = self.interpret_alloc_ids.len();
+                for idx in n..new_n {
+                    let id = self.interpret_allocs_inverse[idx];
+                    let pos = self.position() as u32;
+                    interpret_alloc_index.push(pos);
+                    interpret::specialized_encode_alloc_id(
+                        self,
+                        tcx,
+                        id,
+                    ).unwrap();
+                }
+                // if we have found new ids, serialize those, too
+                if n == new_n {
+                    // otherwise, abort
+                    break;
+                }
+                n = new_n;
+            }
+            interpret_alloc_index
+        };
+
         let root = self.lazy(&CrateRoot {
             name: tcx.crate_name(LOCAL_CRATE),
             extra_filename: tcx.sess.opts.cg.extra_filename.clone(),
@@ -492,6 +513,7 @@ fn encode_crate_root(&mut self) -> Lazy<CrateRoot> {
             exported_symbols,
             wasm_custom_sections,
             index,
+            interpret_alloc_index,
         });
 
         let total_bytes = self.position();
@@ -1760,7 +1782,9 @@ pub fn encode_metadata<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
             type_shorthands: Default::default(),
             predicate_shorthands: Default::default(),
             filemap_cache: tcx.sess.codemap().files()[0].clone(),
-            interpret_alloc_shorthands: Default::default(),
+            interpret_allocs: Default::default(),
+            interpret_allocs_inverse: Default::default(),
+            interpret_alloc_ids: Default::default(),
         };
 
         // Encode the rustc version string in a predictable location.
index e3986bb7d91f94c5bdbde85cef7b828b75cf6d58..4eaf08742ecaeaeb1e5e75c48f144a95dad04f9e 100644 (file)
@@ -207,6 +207,7 @@ pub struct CrateRoot {
     pub impls: LazySeq<TraitImpls>,
     pub exported_symbols: EncodedExportedSymbols,
     pub wasm_custom_sections: LazySeq<DefIndex>,
+    pub interpret_alloc_index: Vec<u32>,
 
     pub index: LazySeq<index::Index>,
 }
diff --git a/src/test/incremental/static_cycle/b.rs b/src/test/incremental/static_cycle/b.rs
new file mode 100644 (file)
index 0000000..b659703
--- /dev/null
@@ -0,0 +1,19 @@
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// revisions:rpass1 rpass2
+
+#![cfg_attr(rpass2, warn(dead_code))]
+
+pub static mut BAA: *const i8 = unsafe { &BOO as *const _ as *const i8 };
+
+pub static mut BOO: *const i8 = unsafe { &BAA as *const _ as *const i8 };
+
+fn main() {}