1 use crate::dep_graph::{DepNodeIndex, SerializedDepNodeIndex};
2 use crate::mir::interpret;
3 use crate::mir::interpret::{AllocDecodingSession, AllocDecodingState};
4 use crate::ty::codec::{self as ty_codec, TyDecoder, TyEncoder};
5 use crate::ty::context::TyCtxt;
6 use crate::ty::{self, Ty};
7 use rustc_data_structures::fingerprint::Fingerprint;
8 use rustc_data_structures::fx::{FxHashMap, FxIndexSet};
9 use rustc_data_structures::sync::{HashMapExt, Lock, Lrc, OnceCell};
10 use rustc_data_structures::thin_vec::ThinVec;
11 use rustc_errors::Diagnostic;
12 use rustc_hir::def_id::{CrateNum, DefId, DefIndex, LocalDefId, LOCAL_CRATE};
13 use rustc_hir::definitions::DefPathHash;
14 use rustc_index::vec::{Idx, IndexVec};
15 use rustc_serialize::{
16 opaque, Decodable, Decoder, Encodable, Encoder, SpecializedDecoder, SpecializedEncoder,
17 UseSpecializedDecodable, UseSpecializedEncodable,
19 use rustc_session::{CrateDisambiguator, Session};
20 use rustc_span::hygiene::{
21 ExpnDataDecodeMode, ExpnDataEncodeMode, ExpnId, HygieneDecodeContext, HygieneEncodeContext,
22 SyntaxContext, SyntaxContextData,
24 use rustc_span::source_map::{SourceMap, StableSourceFileId};
25 use rustc_span::symbol::Ident;
26 use rustc_span::CachingSourceMapView;
27 use rustc_span::{BytePos, ExpnData, SourceFile, Span, DUMMY_SP};
30 const TAG_FILE_FOOTER: u128 = 0xC0FFEE_C0FFEE_C0FFEE_C0FFEE_C0FFEE;
32 const TAG_VALID_SPAN: u8 = 0;
33 const TAG_INVALID_SPAN: u8 = 1;
35 const TAG_SYNTAX_CONTEXT: u8 = 0;
36 const TAG_EXPN_DATA: u8 = 1;
38 /// Provides an interface to incremental compilation data cached from the
39 /// previous compilation session. This data will eventually include the results
40 /// of a few selected queries (like `typeck` and `mir_optimized`) and
41 /// any diagnostics that have been emitted during a query.
42 pub struct OnDiskCache<'sess> {
43 // The complete cache data in serialized form.
44 serialized_data: Vec<u8>,
46 // Collects all `Diagnostic`s emitted during the current compilation
48 current_diagnostics: Lock<FxHashMap<DepNodeIndex, Vec<Diagnostic>>>,
50 prev_cnums: Vec<(u32, String, CrateDisambiguator)>,
51 cnum_map: OnceCell<IndexVec<CrateNum, Option<CrateNum>>>,
53 source_map: &'sess SourceMap,
54 file_index_to_stable_id: FxHashMap<SourceFileIndex, StableSourceFileId>,
56 // Caches that are populated lazily during decoding.
57 file_index_to_file: Lock<FxHashMap<SourceFileIndex, Lrc<SourceFile>>>,
59 // A map from dep-node to the position of the cached query result in
61 query_result_index: FxHashMap<SerializedDepNodeIndex, AbsoluteBytePos>,
63 // A map from dep-node to the position of any associated diagnostics in
65 prev_diagnostics_index: FxHashMap<SerializedDepNodeIndex, AbsoluteBytePos>,
67 alloc_decoding_state: AllocDecodingState,
69 // A map from syntax context ids to the position of their associated
70 // `SyntaxContextData`. We use a `u32` instead of a `SyntaxContext`
71 // to represent the fact that we are storing *encoded* ids. When we decode
72 // a `SyntaxContext`, a new id will be allocated from the global `HygieneData`,
73 // which will almost certainly be different than the serialized id.
74 syntax_contexts: FxHashMap<u32, AbsoluteBytePos>,
75 // A map from the `DefPathHash` of an `ExpnId` to the position
76 // of their associated `ExpnData`. Ideally, we would store a `DefId`,
77 // but we need to decode this before we've constructed a `TyCtxt` (which
78 // makes it difficult to decode a `DefId`).
80 // Note that these `DefPathHashes` correspond to both local and foreign
81 // `ExpnData` (e.g `ExpnData.krate` may not be `LOCAL_CRATE`). Alternatively,
82 // we could look up the `ExpnData` from the metadata of foreign crates,
83 // but it seemed easier to have `OnDiskCache` be independent of the `CStore`.
84 expn_data: FxHashMap<u32, AbsoluteBytePos>,
85 // Additional information used when decoding hygiene data.
86 hygiene_context: HygieneDecodeContext,
89 // This type is used only for serialization and deserialization.
90 #[derive(RustcEncodable, RustcDecodable)]
92 file_index_to_stable_id: FxHashMap<SourceFileIndex, StableSourceFileId>,
93 prev_cnums: Vec<(u32, String, CrateDisambiguator)>,
94 query_result_index: EncodedQueryResultIndex,
95 diagnostics_index: EncodedQueryResultIndex,
96 // The location of all allocations.
97 interpret_alloc_index: Vec<u32>,
98 // See `OnDiskCache.syntax_contexts`
99 syntax_contexts: FxHashMap<u32, AbsoluteBytePos>,
100 // See `OnDiskCache.expn_data`
101 expn_data: FxHashMap<u32, AbsoluteBytePos>,
104 type EncodedQueryResultIndex = Vec<(SerializedDepNodeIndex, AbsoluteBytePos)>;
105 type EncodedDiagnosticsIndex = Vec<(SerializedDepNodeIndex, AbsoluteBytePos)>;
106 type EncodedDiagnostics = Vec<Diagnostic>;
108 #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)]
109 struct SourceFileIndex(u32);
111 #[derive(Copy, Clone, Debug, Hash, Eq, PartialEq, RustcEncodable, RustcDecodable)]
112 struct AbsoluteBytePos(u32);
114 impl AbsoluteBytePos {
115 fn new(pos: usize) -> AbsoluteBytePos {
116 debug_assert!(pos <= u32::MAX as usize);
117 AbsoluteBytePos(pos as u32)
120 fn to_usize(self) -> usize {
125 impl<'sess> OnDiskCache<'sess> {
126 /// Creates a new `OnDiskCache` instance from the serialized data in `data`.
127 pub fn new(sess: &'sess Session, data: Vec<u8>, start_pos: usize) -> Self {
128 debug_assert!(sess.opts.incremental.is_some());
130 // Wrap in a scope so we can borrow `data`.
131 let footer: Footer = {
132 let mut decoder = opaque::Decoder::new(&data[..], start_pos);
134 // Decode the *position* of the footer, which can be found in the
135 // last 8 bytes of the file.
136 decoder.set_position(data.len() - IntEncodedWithFixedSize::ENCODED_SIZE);
137 let footer_pos = IntEncodedWithFixedSize::decode(&mut decoder)
138 .expect("error while trying to decode footer position")
141 // Decode the file footer, which contains all the lookup tables, etc.
142 decoder.set_position(footer_pos);
144 decode_tagged(&mut decoder, TAG_FILE_FOOTER)
145 .expect("error while trying to decode footer position")
149 serialized_data: data,
150 file_index_to_stable_id: footer.file_index_to_stable_id,
151 file_index_to_file: Default::default(),
152 prev_cnums: footer.prev_cnums,
153 cnum_map: OnceCell::new(),
154 source_map: sess.source_map(),
155 current_diagnostics: Default::default(),
156 query_result_index: footer.query_result_index.into_iter().collect(),
157 prev_diagnostics_index: footer.diagnostics_index.into_iter().collect(),
158 alloc_decoding_state: AllocDecodingState::new(footer.interpret_alloc_index),
159 syntax_contexts: footer.syntax_contexts,
160 expn_data: footer.expn_data,
161 hygiene_context: Default::default(),
165 pub fn new_empty(source_map: &'sess SourceMap) -> Self {
167 serialized_data: Vec::new(),
168 file_index_to_stable_id: Default::default(),
169 file_index_to_file: Default::default(),
171 cnum_map: OnceCell::new(),
173 current_diagnostics: Default::default(),
174 query_result_index: Default::default(),
175 prev_diagnostics_index: Default::default(),
176 alloc_decoding_state: AllocDecodingState::new(Vec::new()),
177 syntax_contexts: FxHashMap::default(),
178 expn_data: FxHashMap::default(),
179 hygiene_context: Default::default(),
183 pub fn serialize<'tcx, E>(&self, tcx: TyCtxt<'tcx>, encoder: &mut E) -> Result<(), E::Error>
187 // Serializing the `DepGraph` should not modify it.
188 tcx.dep_graph.with_ignore(|| {
189 // Allocate `SourceFileIndex`es.
190 let (file_to_file_index, file_index_to_stable_id) = {
191 let files = tcx.sess.source_map().files();
192 let mut file_to_file_index =
193 FxHashMap::with_capacity_and_hasher(files.len(), Default::default());
194 let mut file_index_to_stable_id =
195 FxHashMap::with_capacity_and_hasher(files.len(), Default::default());
197 for (index, file) in files.iter().enumerate() {
198 let index = SourceFileIndex(index as u32);
199 let file_ptr: *const SourceFile = &**file as *const _;
200 file_to_file_index.insert(file_ptr, index);
201 file_index_to_stable_id.insert(index, StableSourceFileId::new(&file));
204 (file_to_file_index, file_index_to_stable_id)
207 let hygiene_encode_context = HygieneEncodeContext::default();
209 let mut encoder = CacheEncoder {
212 type_shorthands: Default::default(),
213 predicate_shorthands: Default::default(),
214 interpret_allocs: Default::default(),
215 source_map: CachingSourceMapView::new(tcx.sess.source_map()),
217 hygiene_context: &hygiene_encode_context,
220 // Load everything into memory so we can write it out to the on-disk
221 // cache. The vast majority of cacheable query results should already
222 // be in memory, so this should be a cheap operation.
223 tcx.dep_graph.exec_cache_promotions(tcx);
225 // Encode query results.
226 let mut query_result_index = EncodedQueryResultIndex::new();
228 tcx.sess.time("encode_query_results", || {
229 let enc = &mut encoder;
230 let qri = &mut query_result_index;
232 macro_rules! encode_queries {
233 ($($query:ident,)*) => {
235 encode_query_results::<ty::query::queries::$query<'_>, _>(
244 rustc_cached_queries!(encode_queries!);
249 // Encode diagnostics.
250 let diagnostics_index: EncodedDiagnosticsIndex = self
254 .map(|(dep_node_index, diagnostics)| {
255 let pos = AbsoluteBytePos::new(encoder.position());
256 // Let's make sure we get the expected type here.
257 let diagnostics: &EncodedDiagnostics = diagnostics;
258 let dep_node_index = SerializedDepNodeIndex::new(dep_node_index.index());
259 encoder.encode_tagged(dep_node_index, diagnostics)?;
261 Ok((dep_node_index, pos))
263 .collect::<Result<_, _>>()?;
265 let interpret_alloc_index = {
266 let mut interpret_alloc_index = Vec::new();
269 let new_n = encoder.interpret_allocs.len();
270 // If we have found new IDs, serialize those too.
275 interpret_alloc_index.reserve(new_n - n);
276 for idx in n..new_n {
277 let id = encoder.interpret_allocs[idx];
278 let pos = encoder.position() as u32;
279 interpret_alloc_index.push(pos);
280 interpret::specialized_encode_alloc_id(&mut encoder, tcx, id)?;
284 interpret_alloc_index
287 let sorted_cnums = sorted_cnums_including_local_crate(tcx);
288 let prev_cnums: Vec<_> = sorted_cnums
291 let crate_name = tcx.original_crate_name(cnum).to_string();
292 let crate_disambiguator = tcx.crate_disambiguator(cnum);
293 (cnum.as_u32(), crate_name, crate_disambiguator)
297 let mut syntax_contexts = FxHashMap::default();
298 let mut expn_ids = FxHashMap::default();
300 // Encode all hygiene data (`SyntaxContextData` and `ExpnData`) from the current
303 hygiene_encode_context.encode(
305 |encoder, index, ctxt_data| {
306 let pos = AbsoluteBytePos::new(encoder.position());
307 encoder.encode_tagged(TAG_SYNTAX_CONTEXT, ctxt_data)?;
308 syntax_contexts.insert(index, pos);
311 |encoder, index, expn_data| {
312 let pos = AbsoluteBytePos::new(encoder.position());
313 encoder.encode_tagged(TAG_EXPN_DATA, expn_data)?;
314 expn_ids.insert(index, pos);
319 // `Encode the file footer.
320 let footer_pos = encoder.position() as u64;
321 encoder.encode_tagged(
324 file_index_to_stable_id,
328 interpret_alloc_index,
334 // Encode the position of the footer as the last 8 bytes of the
335 // file so we know where to look for it.
336 IntEncodedWithFixedSize(footer_pos).encode(encoder.encoder)?;
338 // DO NOT WRITE ANYTHING TO THE ENCODER AFTER THIS POINT! The address
339 // of the footer must be the last thing in the data stream.
343 fn sorted_cnums_including_local_crate(tcx: TyCtxt<'_>) -> Vec<CrateNum> {
344 let mut cnums = vec![LOCAL_CRATE];
345 cnums.extend_from_slice(&tcx.crates()[..]);
346 cnums.sort_unstable();
347 // Just to be sure...
354 /// Loads a diagnostic emitted during the previous compilation session.
355 pub fn load_diagnostics(
358 dep_node_index: SerializedDepNodeIndex,
359 ) -> Vec<Diagnostic> {
360 let diagnostics: Option<EncodedDiagnostics> =
361 self.load_indexed(tcx, dep_node_index, &self.prev_diagnostics_index, "diagnostics");
363 diagnostics.unwrap_or_default()
366 /// Stores a diagnostic emitted during the current compilation session.
367 /// Anything stored like this will be available via `load_diagnostics` in
368 /// the next compilation session.
371 pub fn store_diagnostics(
373 dep_node_index: DepNodeIndex,
374 diagnostics: ThinVec<Diagnostic>,
376 let mut current_diagnostics = self.current_diagnostics.borrow_mut();
377 let prev = current_diagnostics.insert(dep_node_index, diagnostics.into());
378 debug_assert!(prev.is_none());
381 /// Returns the cached query result if there is something in the cache for
382 /// the given `SerializedDepNodeIndex`; otherwise returns `None`.
383 pub fn try_load_query_result<T>(
386 dep_node_index: SerializedDepNodeIndex,
391 self.load_indexed(tcx, dep_node_index, &self.query_result_index, "query result")
394 /// Stores a diagnostic emitted during computation of an anonymous query.
395 /// Since many anonymous queries can share the same `DepNode`, we aggregate
396 /// them -- as opposed to regular queries where we assume that there is a
397 /// 1:1 relationship between query-key and `DepNode`.
400 pub fn store_diagnostics_for_anon_node(
402 dep_node_index: DepNodeIndex,
403 diagnostics: ThinVec<Diagnostic>,
405 let mut current_diagnostics = self.current_diagnostics.borrow_mut();
407 let x = current_diagnostics.entry(dep_node_index).or_insert(Vec::new());
409 x.extend(Into::<Vec<_>>::into(diagnostics));
412 fn load_indexed<'tcx, T>(
415 dep_node_index: SerializedDepNodeIndex,
416 index: &FxHashMap<SerializedDepNodeIndex, AbsoluteBytePos>,
417 debug_tag: &'static str,
422 let pos = index.get(&dep_node_index).cloned()?;
424 self.with_decoder(tcx, pos, |decoder| match decode_tagged(decoder, dep_node_index) {
426 Err(e) => bug!("could not decode cached {}: {}", debug_tag, e),
430 fn with_decoder<'tcx, T, F: FnOnce(&mut CacheDecoder<'sess, 'tcx>) -> T>(
433 pos: AbsoluteBytePos,
440 self.cnum_map.get_or_init(|| Self::compute_cnum_map(tcx, &self.prev_cnums[..]));
442 let mut decoder = CacheDecoder {
444 opaque: opaque::Decoder::new(&self.serialized_data[..], pos.to_usize()),
445 source_map: self.source_map,
447 file_index_to_file: &self.file_index_to_file,
448 file_index_to_stable_id: &self.file_index_to_stable_id,
449 alloc_decoding_session: self.alloc_decoding_state.new_decoding_session(),
450 syntax_contexts: &self.syntax_contexts,
451 expn_data: &self.expn_data,
452 hygiene_context: &self.hygiene_context,
457 // This function builds mapping from previous-session-`CrateNum` to
458 // current-session-`CrateNum`. There might be `CrateNum`s from the previous
459 // `Session` that don't occur in the current one. For these, the mapping
463 prev_cnums: &[(u32, String, CrateDisambiguator)],
464 ) -> IndexVec<CrateNum, Option<CrateNum>> {
465 tcx.dep_graph.with_ignore(|| {
466 let current_cnums = tcx
467 .all_crate_nums(LOCAL_CRATE)
470 let crate_name = tcx.original_crate_name(cnum).to_string();
471 let crate_disambiguator = tcx.crate_disambiguator(cnum);
472 ((crate_name, crate_disambiguator), cnum)
474 .collect::<FxHashMap<_, _>>();
476 let map_size = prev_cnums.iter().map(|&(cnum, ..)| cnum).max().unwrap_or(0) + 1;
477 let mut map = IndexVec::from_elem_n(None, map_size as usize);
479 for &(prev_cnum, ref crate_name, crate_disambiguator) in prev_cnums {
480 let key = (crate_name.clone(), crate_disambiguator);
481 map[CrateNum::from_u32(prev_cnum)] = current_cnums.get(&key).cloned();
484 map[LOCAL_CRATE] = Some(LOCAL_CRATE);
490 //- DECODING -------------------------------------------------------------------
492 /// A decoder that can read from the incr. comp. cache. It is similar to the one
493 /// we use for crate metadata decoding in that it can rebase spans and eventually
494 /// will also handle things that contain `Ty` instances.
495 struct CacheDecoder<'a, 'tcx> {
497 opaque: opaque::Decoder<'a>,
498 source_map: &'a SourceMap,
499 cnum_map: &'a IndexVec<CrateNum, Option<CrateNum>>,
500 file_index_to_file: &'a Lock<FxHashMap<SourceFileIndex, Lrc<SourceFile>>>,
501 file_index_to_stable_id: &'a FxHashMap<SourceFileIndex, StableSourceFileId>,
502 alloc_decoding_session: AllocDecodingSession<'a>,
503 syntax_contexts: &'a FxHashMap<u32, AbsoluteBytePos>,
504 expn_data: &'a FxHashMap<u32, AbsoluteBytePos>,
505 hygiene_context: &'a HygieneDecodeContext,
508 impl<'a, 'tcx> CacheDecoder<'a, 'tcx> {
509 fn file_index_to_file(&self, index: SourceFileIndex) -> Lrc<SourceFile> {
511 ref file_index_to_file,
512 ref file_index_to_stable_id,
521 let stable_id = file_index_to_stable_id[&index];
523 .source_file_by_stable_id(stable_id)
524 .expect("failed to lookup `SourceFile` in new context")
530 trait DecoderWithPosition: Decoder {
531 fn position(&self) -> usize;
534 impl<'a> DecoderWithPosition for opaque::Decoder<'a> {
535 fn position(&self) -> usize {
540 impl<'a, 'tcx> DecoderWithPosition for CacheDecoder<'a, 'tcx> {
541 fn position(&self) -> usize {
542 self.opaque.position()
546 // Decodes something that was encoded with `encode_tagged()` and verify that the
547 // tag matches and the correct amount of bytes was read.
548 fn decode_tagged<D, T, V>(decoder: &mut D, expected_tag: T) -> Result<V, D::Error>
550 T: Decodable + Eq + ::std::fmt::Debug,
552 D: DecoderWithPosition,
554 let start_pos = decoder.position();
556 let actual_tag = T::decode(decoder)?;
557 assert_eq!(actual_tag, expected_tag);
558 let value = V::decode(decoder)?;
559 let end_pos = decoder.position();
561 let expected_len: u64 = Decodable::decode(decoder)?;
562 assert_eq!((end_pos - start_pos) as u64, expected_len);
567 impl<'a, 'tcx> TyDecoder<'tcx> for CacheDecoder<'a, 'tcx> {
569 fn tcx(&self) -> TyCtxt<'tcx> {
574 fn position(&self) -> usize {
575 self.opaque.position()
579 fn peek_byte(&self) -> u8 {
580 self.opaque.data[self.opaque.position()]
583 fn cached_ty_for_shorthand<F>(
587 ) -> Result<Ty<'tcx>, Self::Error>
589 F: FnOnce(&mut Self) -> Result<Ty<'tcx>, Self::Error>,
591 let tcx = self.tcx();
594 ty::CReaderCacheKey { cnum: CrateNum::ReservedForIncrCompCache, pos: shorthand };
596 if let Some(&ty) = tcx.ty_rcache.borrow().get(&cache_key) {
600 let ty = or_insert_with(self)?;
601 // This may overwrite the entry, but it should overwrite with the same value.
602 tcx.ty_rcache.borrow_mut().insert_same(cache_key, ty);
606 fn cached_predicate_for_shorthand<F>(
610 ) -> Result<ty::Predicate<'tcx>, Self::Error>
612 F: FnOnce(&mut Self) -> Result<ty::Predicate<'tcx>, Self::Error>,
614 let tcx = self.tcx();
617 ty::CReaderCacheKey { cnum: CrateNum::ReservedForIncrCompCache, pos: shorthand };
619 if let Some(&pred) = tcx.pred_rcache.borrow().get(&cache_key) {
623 let pred = or_insert_with(self)?;
624 // This may overwrite the entry, but it should overwrite with the same value.
625 tcx.pred_rcache.borrow_mut().insert_same(cache_key, pred);
629 fn with_position<F, R>(&mut self, pos: usize, f: F) -> R
631 F: FnOnce(&mut Self) -> R,
633 debug_assert!(pos < self.opaque.data.len());
635 let new_opaque = opaque::Decoder::new(self.opaque.data, pos);
636 let old_opaque = mem::replace(&mut self.opaque, new_opaque);
638 self.opaque = old_opaque;
642 fn map_encoded_cnum_to_current(&self, cnum: CrateNum) -> CrateNum {
643 self.cnum_map[cnum].unwrap_or_else(|| bug!("could not find new `CrateNum` for {:?}", cnum))
647 implement_ty_decoder!(CacheDecoder<'a, 'tcx>);
649 impl<'a, 'tcx> SpecializedDecoder<SyntaxContext> for CacheDecoder<'a, 'tcx> {
650 fn specialized_decode(&mut self) -> Result<SyntaxContext, Self::Error> {
651 let syntax_contexts = self.syntax_contexts;
652 rustc_span::hygiene::decode_syntax_context(self, self.hygiene_context, |this, id| {
653 // This closure is invoked if we haven't already decoded the data for the `SyntaxContext` we are deserializing.
654 // We look up the position of the associated `SyntaxData` and decode it.
655 let pos = syntax_contexts.get(&id).unwrap();
656 this.with_position(pos.to_usize(), |decoder| {
657 let data: SyntaxContextData = decode_tagged(decoder, TAG_SYNTAX_CONTEXT)?;
664 impl<'a, 'tcx> SpecializedDecoder<ExpnId> for CacheDecoder<'a, 'tcx> {
665 fn specialized_decode(&mut self) -> Result<ExpnId, Self::Error> {
666 let expn_data = self.expn_data;
667 rustc_span::hygiene::decode_expn_id(
669 ExpnDataDecodeMode::incr_comp(self.hygiene_context),
671 // This closure is invoked if we haven't already decoded the data for the `ExpnId` we are deserializing.
672 // We look up the position of the associated `ExpnData` and decode it.
675 .unwrap_or_else(|| panic!("Bad index {:?} (map {:?})", index, expn_data));
677 this.with_position(pos.to_usize(), |decoder| {
678 let data: ExpnData = decode_tagged(decoder, TAG_EXPN_DATA)?;
686 impl<'a, 'tcx> SpecializedDecoder<interpret::AllocId> for CacheDecoder<'a, 'tcx> {
687 fn specialized_decode(&mut self) -> Result<interpret::AllocId, Self::Error> {
688 let alloc_decoding_session = self.alloc_decoding_session;
689 alloc_decoding_session.decode_alloc_id(self)
693 impl<'a, 'tcx> SpecializedDecoder<Span> for CacheDecoder<'a, 'tcx> {
694 fn specialized_decode(&mut self) -> Result<Span, Self::Error> {
695 let tag: u8 = Decodable::decode(self)?;
697 if tag == TAG_INVALID_SPAN {
700 debug_assert_eq!(tag, TAG_VALID_SPAN);
703 let file_lo_index = SourceFileIndex::decode(self)?;
704 let line_lo = usize::decode(self)?;
705 let col_lo = BytePos::decode(self)?;
706 let len = BytePos::decode(self)?;
707 let ctxt = SyntaxContext::decode(self)?;
709 let file_lo = self.file_index_to_file(file_lo_index);
710 let lo = file_lo.lines[line_lo - 1] + col_lo;
713 Ok(Span::new(lo, hi, ctxt))
717 impl<'a, 'tcx> SpecializedDecoder<Ident> for CacheDecoder<'a, 'tcx> {
718 fn specialized_decode(&mut self) -> Result<Ident, Self::Error> {
719 // FIXME: Handle hygiene in incremental
720 bug!("Trying to decode Ident for incremental");
724 // This impl makes sure that we get a runtime error when we try decode a
725 // `DefIndex` that is not contained in a `DefId`. Such a case would be problematic
726 // because we would not know how to transform the `DefIndex` to the current
728 impl<'a, 'tcx> SpecializedDecoder<DefIndex> for CacheDecoder<'a, 'tcx> {
729 fn specialized_decode(&mut self) -> Result<DefIndex, Self::Error> {
730 bug!("trying to decode `DefIndex` outside the context of a `DefId`")
734 // Both the `CrateNum` and the `DefIndex` of a `DefId` can change in between two
735 // compilation sessions. We use the `DefPathHash`, which is stable across
736 // sessions, to map the old `DefId` to the new one.
737 impl<'a, 'tcx> SpecializedDecoder<DefId> for CacheDecoder<'a, 'tcx> {
739 fn specialized_decode(&mut self) -> Result<DefId, Self::Error> {
740 // Load the `DefPathHash` which is was we encoded the `DefId` as.
741 let def_path_hash = DefPathHash::decode(self)?;
743 // Using the `DefPathHash`, we can lookup the new `DefId`.
744 Ok(self.tcx().def_path_hash_to_def_id.as_ref().unwrap()[&def_path_hash])
748 impl<'a, 'tcx> SpecializedDecoder<LocalDefId> for CacheDecoder<'a, 'tcx> {
750 fn specialized_decode(&mut self) -> Result<LocalDefId, Self::Error> {
751 Ok(DefId::decode(self)?.expect_local())
755 impl<'a, 'tcx> SpecializedDecoder<Fingerprint> for CacheDecoder<'a, 'tcx> {
756 fn specialized_decode(&mut self) -> Result<Fingerprint, Self::Error> {
757 Fingerprint::decode_opaque(&mut self.opaque)
761 //- ENCODING -------------------------------------------------------------------
763 /// An encoder that can write the incr. comp. cache.
764 struct CacheEncoder<'a, 'tcx, E: ty_codec::TyEncoder> {
767 type_shorthands: FxHashMap<Ty<'tcx>, usize>,
768 predicate_shorthands: FxHashMap<ty::Predicate<'tcx>, usize>,
769 interpret_allocs: FxIndexSet<interpret::AllocId>,
770 source_map: CachingSourceMapView<'tcx>,
771 file_to_file_index: FxHashMap<*const SourceFile, SourceFileIndex>,
772 hygiene_context: &'a HygieneEncodeContext,
775 impl<'a, 'tcx, E> CacheEncoder<'a, 'tcx, E>
779 fn source_file_index(&mut self, source_file: Lrc<SourceFile>) -> SourceFileIndex {
780 self.file_to_file_index[&(&*source_file as *const SourceFile)]
783 /// Encode something with additional information that allows to do some
784 /// sanity checks when decoding the data again. This method will first
785 /// encode the specified tag, then the given value, then the number of
786 /// bytes taken up by tag and value. On decoding, we can then verify that
787 /// we get the expected tag and read the expected number of bytes.
788 fn encode_tagged<T: Encodable, V: Encodable>(
792 ) -> Result<(), E::Error> {
793 let start_pos = self.position();
798 let end_pos = self.position();
799 ((end_pos - start_pos) as u64).encode(self)
803 impl<'a, 'tcx, E> SpecializedEncoder<interpret::AllocId> for CacheEncoder<'a, 'tcx, E>
807 fn specialized_encode(&mut self, alloc_id: &interpret::AllocId) -> Result<(), Self::Error> {
808 let (index, _) = self.interpret_allocs.insert_full(*alloc_id);
813 impl<'a, 'tcx, E> SpecializedEncoder<SyntaxContext> for CacheEncoder<'a, 'tcx, E>
817 fn specialized_encode(&mut self, ctxt: &SyntaxContext) -> Result<(), Self::Error> {
818 rustc_span::hygiene::raw_encode_syntax_context(*ctxt, self.hygiene_context, self)
822 impl<'a, 'tcx, E> SpecializedEncoder<ExpnId> for CacheEncoder<'a, 'tcx, E>
826 fn specialized_encode(&mut self, expn: &ExpnId) -> Result<(), Self::Error> {
827 rustc_span::hygiene::raw_encode_expn_id(
829 self.hygiene_context,
830 ExpnDataEncodeMode::IncrComp,
836 impl<'a, 'tcx, E> SpecializedEncoder<Span> for CacheEncoder<'a, 'tcx, E>
840 fn specialized_encode(&mut self, span: &Span) -> Result<(), Self::Error> {
841 if *span == DUMMY_SP {
842 return TAG_INVALID_SPAN.encode(self);
845 let span_data = span.data();
846 let (file_lo, line_lo, col_lo) =
847 match self.source_map.byte_pos_to_line_and_col(span_data.lo) {
849 None => return TAG_INVALID_SPAN.encode(self),
852 if !file_lo.contains(span_data.hi) {
853 return TAG_INVALID_SPAN.encode(self);
856 let len = span_data.hi - span_data.lo;
858 let source_file_index = self.source_file_index(file_lo);
860 TAG_VALID_SPAN.encode(self)?;
861 source_file_index.encode(self)?;
862 line_lo.encode(self)?;
863 col_lo.encode(self)?;
865 span_data.ctxt.encode(self)?;
870 impl<'a, 'tcx, E> SpecializedEncoder<Ident> for CacheEncoder<'a, 'tcx, E>
872 E: 'a + ty_codec::TyEncoder,
874 fn specialized_encode(&mut self, _: &Ident) -> Result<(), Self::Error> {
875 // We don't currently encode enough information to ensure hygiene works
876 // with incremental, so panic rather than risk incremental bugs.
878 // FIXME: handle hygiene in incremental.
879 bug!("trying to encode `Ident` for incremental");
883 impl<'a, 'tcx, E> ty_codec::TyEncoder for CacheEncoder<'a, 'tcx, E>
888 fn position(&self) -> usize {
889 self.encoder.position()
893 impl<'a, 'tcx, E> SpecializedEncoder<CrateNum> for CacheEncoder<'a, 'tcx, E>
898 fn specialized_encode(&mut self, cnum: &CrateNum) -> Result<(), Self::Error> {
899 self.emit_u32(cnum.as_u32())
903 impl<'a, 'b, 'c, 'tcx, E> SpecializedEncoder<&'b ty::TyS<'c>> for CacheEncoder<'a, 'tcx, E>
906 &'b ty::TyS<'c>: UseSpecializedEncodable,
909 fn specialized_encode(&mut self, ty: &&'b ty::TyS<'c>) -> Result<(), Self::Error> {
910 debug_assert!(self.tcx.lift(ty).is_some());
911 let ty = unsafe { std::mem::transmute::<&&'b ty::TyS<'c>, &&'tcx ty::TyS<'tcx>>(ty) };
912 ty_codec::encode_with_shorthand(self, ty, |encoder| &mut encoder.type_shorthands)
916 impl<'a, 'b, 'tcx, E> SpecializedEncoder<ty::Predicate<'b>> for CacheEncoder<'a, 'tcx, E>
921 fn specialized_encode(&mut self, predicate: &ty::Predicate<'b>) -> Result<(), Self::Error> {
922 debug_assert!(self.tcx.lift(predicate).is_some());
924 unsafe { std::mem::transmute::<&ty::Predicate<'b>, &ty::Predicate<'tcx>>(predicate) };
925 ty_codec::encode_with_shorthand(self, predicate, |encoder| {
926 &mut encoder.predicate_shorthands
931 impl<'a, 'tcx, E> SpecializedEncoder<DefId> for CacheEncoder<'a, 'tcx, E>
936 fn specialized_encode(&mut self, id: &DefId) -> Result<(), Self::Error> {
937 let def_path_hash = self.tcx.def_path_hash(*id);
938 def_path_hash.encode(self)
942 impl<'a, 'tcx, E> SpecializedEncoder<LocalDefId> for CacheEncoder<'a, 'tcx, E>
947 fn specialized_encode(&mut self, id: &LocalDefId) -> Result<(), Self::Error> {
948 id.to_def_id().encode(self)
952 impl<'a, 'tcx, E> SpecializedEncoder<DefIndex> for CacheEncoder<'a, 'tcx, E>
956 fn specialized_encode(&mut self, _: &DefIndex) -> Result<(), Self::Error> {
957 bug!("encoding `DefIndex` without context");
961 impl<'a, 'tcx> SpecializedEncoder<Fingerprint> for CacheEncoder<'a, 'tcx, opaque::Encoder> {
962 fn specialized_encode(&mut self, f: &Fingerprint) -> Result<(), Self::Error> {
963 f.encode_opaque(&mut self.encoder)
967 macro_rules! encoder_methods {
968 ($($name:ident($ty:ty);)*) => {
970 $(fn $name(&mut self, value: $ty) -> Result<(), Self::Error> {
971 self.encoder.$name(value)
976 impl<'a, 'tcx, E> Encoder for CacheEncoder<'a, 'tcx, E>
980 type Error = E::Error;
983 fn emit_unit(&mut self) -> Result<(), Self::Error> {
1010 // An integer that will always encode to 8 bytes.
1011 struct IntEncodedWithFixedSize(u64);
1013 impl IntEncodedWithFixedSize {
1014 pub const ENCODED_SIZE: usize = 8;
1017 impl UseSpecializedEncodable for IntEncodedWithFixedSize {}
1018 impl UseSpecializedDecodable for IntEncodedWithFixedSize {}
1020 impl SpecializedEncoder<IntEncodedWithFixedSize> for opaque::Encoder {
1021 fn specialized_encode(&mut self, x: &IntEncodedWithFixedSize) -> Result<(), Self::Error> {
1022 let start_pos = self.position();
1023 for i in 0..IntEncodedWithFixedSize::ENCODED_SIZE {
1024 ((x.0 >> (i * 8)) as u8).encode(self)?;
1026 let end_pos = self.position();
1027 assert_eq!((end_pos - start_pos), IntEncodedWithFixedSize::ENCODED_SIZE);
1032 impl<'a> SpecializedDecoder<IntEncodedWithFixedSize> for opaque::Decoder<'a> {
1033 fn specialized_decode(&mut self) -> Result<IntEncodedWithFixedSize, Self::Error> {
1034 let mut value: u64 = 0;
1035 let start_pos = self.position();
1037 for i in 0..IntEncodedWithFixedSize::ENCODED_SIZE {
1038 let byte: u8 = Decodable::decode(self)?;
1039 value |= (byte as u64) << (i * 8);
1042 let end_pos = self.position();
1043 assert_eq!((end_pos - start_pos), IntEncodedWithFixedSize::ENCODED_SIZE);
1045 Ok(IntEncodedWithFixedSize(value))
1049 fn encode_query_results<'a, 'tcx, Q, E>(
1051 encoder: &mut CacheEncoder<'a, 'tcx, E>,
1052 query_result_index: &mut EncodedQueryResultIndex,
1053 ) -> Result<(), E::Error>
1055 Q: super::QueryDescription<TyCtxt<'tcx>> + super::QueryAccessors<TyCtxt<'tcx>>,
1056 Q::Value: Encodable,
1062 .extra_verbose_generic_activity("encode_query_results_for", ::std::any::type_name::<Q>());
1064 let state = Q::query_state(tcx);
1065 assert!(state.all_inactive());
1067 state.iter_results(|results| {
1068 for (key, value, dep_node) in results {
1069 if Q::cache_on_disk(tcx, &key, Some(&value)) {
1070 let dep_node = SerializedDepNodeIndex::new(dep_node.index());
1072 // Record position of the cache entry.
1073 query_result_index.push((dep_node, AbsoluteBytePos::new(encoder.position())));
1075 // Encode the type check tables with the `SerializedDepNodeIndex`
1077 encoder.encode_tagged(dep_node, &value)?;