]> git.lizzy.rs Git - rust.git/blob - src/librustc/ich/hcx.rs
Fix stable hashing of AllocIds
[rust.git] / src / librustc / ich / hcx.rs
1 // Copyright 2017 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 use hir;
12 use hir::def_id::{DefId, DefIndex};
13 use hir::map::DefPathHash;
14 use hir::map::definitions::Definitions;
15 use ich::{self, CachingCodemapView, Fingerprint};
16 use middle::cstore::CrateStore;
17 use ty::{TyCtxt, fast_reject};
18 use mir::interpret::AllocId;
19 use session::Session;
20
21 use std::cmp::Ord;
22 use std::hash as std_hash;
23 use std::collections::HashMap;
24 use std::cell::RefCell;
25
26 use syntax::ast;
27
28 use syntax::codemap::CodeMap;
29 use syntax::ext::hygiene::SyntaxContext;
30 use syntax::symbol::Symbol;
31 use syntax_pos::{Span, DUMMY_SP};
32 use syntax_pos::hygiene;
33
34 use rustc_data_structures::stable_hasher::{HashStable,
35                                            StableHasher, StableHasherResult,
36                                            ToStableHashKey};
37 use rustc_data_structures::accumulate_vec::AccumulateVec;
38 use rustc_data_structures::fx::{FxHashSet, FxHashMap};
39
40 pub fn compute_ignored_attr_names() -> FxHashSet<Symbol> {
41     debug_assert!(ich::IGNORED_ATTRIBUTES.len() > 0);
42     ich::IGNORED_ATTRIBUTES.iter().map(|&s| Symbol::intern(s)).collect()
43 }
44
45 /// This is the context state available during incr. comp. hashing. It contains
46 /// enough information to transform DefIds and HirIds into stable DefPaths (i.e.
47 /// a reference to the TyCtxt) and it holds a few caches for speeding up various
48 /// things (e.g. each DefId/DefPath is only hashed once).
49 #[derive(Clone)]
50 pub struct StableHashingContext<'a> {
51     sess: &'a Session,
52     definitions: &'a Definitions,
53     cstore: &'a dyn CrateStore,
54     body_resolver: BodyResolver<'a>,
55     hash_spans: bool,
56     hash_bodies: bool,
57     node_id_hashing_mode: NodeIdHashingMode,
58
59     // Very often, we are hashing something that does not need the
60     // CachingCodemapView, so we initialize it lazily.
61     raw_codemap: &'a CodeMap,
62     caching_codemap: Option<CachingCodemapView<'a>>,
63
64     pub(super) alloc_id_recursion_tracker: FxHashSet<AllocId>,
65 }
66
67 #[derive(PartialEq, Eq, Clone, Copy)]
68 pub enum NodeIdHashingMode {
69     Ignore,
70     HashDefPath,
71 }
72
73 /// The BodyResolver allows to map a BodyId to the corresponding hir::Body.
74 /// We could also just store a plain reference to the hir::Crate but we want
75 /// to avoid that the crate is used to get untracked access to all of the HIR.
76 #[derive(Clone, Copy)]
77 struct BodyResolver<'gcx>(&'gcx hir::Crate);
78
79 impl<'gcx> BodyResolver<'gcx> {
80     // Return a reference to the hir::Body with the given BodyId.
81     // DOES NOT DO ANY TRACKING, use carefully.
82     fn body(self, id: hir::BodyId) -> &'gcx hir::Body {
83         self.0.body(id)
84     }
85 }
86
87 impl<'a> StableHashingContext<'a> {
88     // The `krate` here is only used for mapping BodyIds to Bodies.
89     // Don't use it for anything else or you'll run the risk of
90     // leaking data out of the tracking system.
91     pub fn new(sess: &'a Session,
92                krate: &'a hir::Crate,
93                definitions: &'a Definitions,
94                cstore: &'a dyn CrateStore)
95                -> Self {
96         let hash_spans_initial = !sess.opts.debugging_opts.incremental_ignore_spans;
97
98         StableHashingContext {
99             sess,
100             body_resolver: BodyResolver(krate),
101             definitions,
102             cstore,
103             caching_codemap: None,
104             raw_codemap: sess.codemap(),
105             hash_spans: hash_spans_initial,
106             hash_bodies: true,
107             node_id_hashing_mode: NodeIdHashingMode::HashDefPath,
108             alloc_id_recursion_tracker: Default::default(),
109         }
110     }
111
112     #[inline]
113     pub fn sess(&self) -> &'a Session {
114         self.sess
115     }
116
117     #[inline]
118     pub fn while_hashing_hir_bodies<F: FnOnce(&mut Self)>(&mut self,
119                                                           hash_bodies: bool,
120                                                           f: F) {
121         let prev_hash_bodies = self.hash_bodies;
122         self.hash_bodies = hash_bodies;
123         f(self);
124         self.hash_bodies = prev_hash_bodies;
125     }
126
127     #[inline]
128     pub fn while_hashing_spans<F: FnOnce(&mut Self)>(&mut self,
129                                                      hash_spans: bool,
130                                                      f: F) {
131         let prev_hash_spans = self.hash_spans;
132         self.hash_spans = hash_spans;
133         f(self);
134         self.hash_spans = prev_hash_spans;
135     }
136
137     #[inline]
138     pub fn with_node_id_hashing_mode<F: FnOnce(&mut Self)>(&mut self,
139                                                            mode: NodeIdHashingMode,
140                                                            f: F) {
141         let prev = self.node_id_hashing_mode;
142         self.node_id_hashing_mode = mode;
143         f(self);
144         self.node_id_hashing_mode = prev;
145     }
146
147     #[inline]
148     pub fn def_path_hash(&self, def_id: DefId) -> DefPathHash {
149         if def_id.is_local() {
150             self.definitions.def_path_hash(def_id.index)
151         } else {
152             self.cstore.def_path_hash(def_id)
153         }
154     }
155
156     #[inline]
157     pub fn local_def_path_hash(&self, def_index: DefIndex) -> DefPathHash {
158         self.definitions.def_path_hash(def_index)
159     }
160
161     #[inline]
162     pub fn node_to_hir_id(&self, node_id: ast::NodeId) -> hir::HirId {
163         self.definitions.node_to_hir_id(node_id)
164     }
165
166     #[inline]
167     pub fn hash_bodies(&self) -> bool {
168         self.hash_bodies
169     }
170
171     #[inline]
172     pub fn codemap(&mut self) -> &mut CachingCodemapView<'a> {
173         match self.caching_codemap {
174             Some(ref mut cm) => {
175                 cm
176             }
177             ref mut none => {
178                 *none = Some(CachingCodemapView::new(self.raw_codemap));
179                 none.as_mut().unwrap()
180             }
181         }
182     }
183
184     #[inline]
185     pub fn is_ignored_attr(&self, name: Symbol) -> bool {
186         self.sess.ignored_attr_names.contains(&name)
187     }
188
189     pub fn hash_hir_item_like<F: FnOnce(&mut Self)>(&mut self, f: F) {
190         let prev_hash_node_ids = self.node_id_hashing_mode;
191         self.node_id_hashing_mode = NodeIdHashingMode::Ignore;
192
193         f(self);
194
195         self.node_id_hashing_mode = prev_hash_node_ids;
196     }
197 }
198
199 /// Something that can provide a stable hashing context.
200 pub trait StableHashingContextProvider<'a> {
201     fn get_stable_hashing_context(&self) -> StableHashingContext<'a>;
202 }
203
204 impl<'a, 'b, T: StableHashingContextProvider<'a>> StableHashingContextProvider<'a>
205 for &'b T {
206     fn get_stable_hashing_context(&self) -> StableHashingContext<'a> {
207         (**self).get_stable_hashing_context()
208     }
209 }
210
211 impl<'a, 'b, T: StableHashingContextProvider<'a>> StableHashingContextProvider<'a>
212 for &'b mut T {
213     fn get_stable_hashing_context(&self) -> StableHashingContext<'a> {
214         (**self).get_stable_hashing_context()
215     }
216 }
217
218 impl<'a, 'gcx, 'lcx> StableHashingContextProvider<'a> for TyCtxt<'a, 'gcx, 'lcx> {
219     fn get_stable_hashing_context(&self) -> StableHashingContext<'a> {
220         (*self).create_stable_hashing_context()
221     }
222 }
223
224 impl<'a> StableHashingContextProvider<'a> for StableHashingContext<'a> {
225     fn get_stable_hashing_context(&self) -> StableHashingContext<'a> {
226         self.clone()
227     }
228 }
229
230 impl<'a> ::dep_graph::DepGraphSafe for StableHashingContext<'a> {
231 }
232
233
234 impl<'a> HashStable<StableHashingContext<'a>> for hir::BodyId {
235     fn hash_stable<W: StableHasherResult>(&self,
236                                           hcx: &mut StableHashingContext<'a>,
237                                           hasher: &mut StableHasher<W>) {
238         if hcx.hash_bodies() {
239             hcx.body_resolver.body(*self).hash_stable(hcx, hasher);
240         }
241     }
242 }
243
244 impl<'a> HashStable<StableHashingContext<'a>> for hir::HirId {
245     #[inline]
246     fn hash_stable<W: StableHasherResult>(&self,
247                                           hcx: &mut StableHashingContext<'a>,
248                                           hasher: &mut StableHasher<W>) {
249         match hcx.node_id_hashing_mode {
250             NodeIdHashingMode::Ignore => {
251                 // Don't do anything.
252             }
253             NodeIdHashingMode::HashDefPath => {
254                 let hir::HirId {
255                     owner,
256                     local_id,
257                 } = *self;
258
259                 hcx.local_def_path_hash(owner).hash_stable(hcx, hasher);
260                 local_id.hash_stable(hcx, hasher);
261             }
262         }
263     }
264 }
265
266 impl<'a> ToStableHashKey<StableHashingContext<'a>> for hir::HirId {
267     type KeyType = (DefPathHash, hir::ItemLocalId);
268
269     #[inline]
270     fn to_stable_hash_key(&self,
271                           hcx: &StableHashingContext<'a>)
272                           -> (DefPathHash, hir::ItemLocalId) {
273         let def_path_hash = hcx.local_def_path_hash(self.owner);
274         (def_path_hash, self.local_id)
275     }
276 }
277
278 impl<'a> HashStable<StableHashingContext<'a>> for ast::NodeId {
279     fn hash_stable<W: StableHasherResult>(&self,
280                                           hcx: &mut StableHashingContext<'a>,
281                                           hasher: &mut StableHasher<W>) {
282         match hcx.node_id_hashing_mode {
283             NodeIdHashingMode::Ignore => {
284                 // Don't do anything.
285             }
286             NodeIdHashingMode::HashDefPath => {
287                 hcx.definitions.node_to_hir_id(*self).hash_stable(hcx, hasher);
288             }
289         }
290     }
291 }
292
293 impl<'a> ToStableHashKey<StableHashingContext<'a>> for ast::NodeId {
294     type KeyType = (DefPathHash, hir::ItemLocalId);
295
296     #[inline]
297     fn to_stable_hash_key(&self,
298                           hcx: &StableHashingContext<'a>)
299                           -> (DefPathHash, hir::ItemLocalId) {
300         hcx.definitions.node_to_hir_id(*self).to_stable_hash_key(hcx)
301     }
302 }
303
304 impl<'a> HashStable<StableHashingContext<'a>> for Span {
305
306     // Hash a span in a stable way. We can't directly hash the span's BytePos
307     // fields (that would be similar to hashing pointers, since those are just
308     // offsets into the CodeMap). Instead, we hash the (file name, line, column)
309     // triple, which stays the same even if the containing FileMap has moved
310     // within the CodeMap.
311     // Also note that we are hashing byte offsets for the column, not unicode
312     // codepoint offsets. For the purpose of the hash that's sufficient.
313     // Also, hashing filenames is expensive so we avoid doing it twice when the
314     // span starts and ends in the same file, which is almost always the case.
315     fn hash_stable<W: StableHasherResult>(&self,
316                                           hcx: &mut StableHashingContext<'a>,
317                                           hasher: &mut StableHasher<W>) {
318         const TAG_VALID_SPAN: u8 = 0;
319         const TAG_INVALID_SPAN: u8 = 1;
320         const TAG_EXPANSION: u8 = 0;
321         const TAG_NO_EXPANSION: u8 = 1;
322
323         if !hcx.hash_spans {
324             return
325         }
326
327         if *self == DUMMY_SP {
328             return std_hash::Hash::hash(&TAG_INVALID_SPAN, hasher);
329         }
330
331         // If this is not an empty or invalid span, we want to hash the last
332         // position that belongs to it, as opposed to hashing the first
333         // position past it.
334         let span = self.data();
335
336         if span.hi < span.lo {
337             return std_hash::Hash::hash(&TAG_INVALID_SPAN, hasher);
338         }
339
340         let (file_lo, line_lo, col_lo) = match hcx.codemap()
341                                                   .byte_pos_to_line_and_col(span.lo) {
342             Some(pos) => pos,
343             None => {
344                 return std_hash::Hash::hash(&TAG_INVALID_SPAN, hasher);
345             }
346         };
347
348         if !file_lo.contains(span.hi) {
349             return std_hash::Hash::hash(&TAG_INVALID_SPAN, hasher);
350         }
351
352         std_hash::Hash::hash(&TAG_VALID_SPAN, hasher);
353         // We truncate the stable_id hash and line and col numbers. The chances
354         // of causing a collision this way should be minimal.
355         std_hash::Hash::hash(&(file_lo.name_hash as u64), hasher);
356
357         let col = (col_lo.0 as u64) & 0xFF;
358         let line = ((line_lo as u64) & 0xFF_FF_FF) << 8;
359         let len = ((span.hi - span.lo).0 as u64) << 32;
360         let line_col_len = col | line | len;
361         std_hash::Hash::hash(&line_col_len, hasher);
362
363         if span.ctxt == SyntaxContext::empty() {
364             TAG_NO_EXPANSION.hash_stable(hcx, hasher);
365         } else {
366             TAG_EXPANSION.hash_stable(hcx, hasher);
367
368             // Since the same expansion context is usually referenced many
369             // times, we cache a stable hash of it and hash that instead of
370             // recursing every time.
371             thread_local! {
372                 static CACHE: RefCell<FxHashMap<hygiene::Mark, u64>> =
373                     RefCell::new(FxHashMap());
374             }
375
376             let sub_hash: u64 = CACHE.with(|cache| {
377                 let mark = span.ctxt.outer();
378
379                 if let Some(&sub_hash) = cache.borrow().get(&mark) {
380                     return sub_hash;
381                 }
382
383                 let mut hasher = StableHasher::new();
384                 mark.expn_info().hash_stable(hcx, &mut hasher);
385                 let sub_hash: Fingerprint = hasher.finish();
386                 let sub_hash = sub_hash.to_smaller_hash();
387                 cache.borrow_mut().insert(mark, sub_hash);
388                 sub_hash
389             });
390
391             sub_hash.hash_stable(hcx, hasher);
392         }
393     }
394 }
395
396 pub fn hash_stable_trait_impls<'a, 'gcx, W, R>(
397     hcx: &mut StableHashingContext<'a>,
398     hasher: &mut StableHasher<W>,
399     blanket_impls: &Vec<DefId>,
400     non_blanket_impls: &HashMap<fast_reject::SimplifiedType, Vec<DefId>, R>)
401     where W: StableHasherResult,
402           R: std_hash::BuildHasher,
403 {
404     {
405         let mut blanket_impls: AccumulateVec<[_; 8]> = blanket_impls
406             .iter()
407             .map(|&def_id| hcx.def_path_hash(def_id))
408             .collect();
409
410         if blanket_impls.len() > 1 {
411             blanket_impls.sort_unstable();
412         }
413
414         blanket_impls.hash_stable(hcx, hasher);
415     }
416
417     {
418         let mut keys: AccumulateVec<[_; 8]> =
419             non_blanket_impls.keys()
420                              .map(|k| (k, k.map_def(|d| hcx.def_path_hash(d))))
421                              .collect();
422         keys.sort_unstable_by(|&(_, ref k1), &(_, ref k2)| k1.cmp(k2));
423         keys.len().hash_stable(hcx, hasher);
424         for (key, ref stable_key) in keys {
425             stable_key.hash_stable(hcx, hasher);
426             let mut impls : AccumulateVec<[_; 8]> = non_blanket_impls[key]
427                 .iter()
428                 .map(|&impl_id| hcx.def_path_hash(impl_id))
429                 .collect();
430
431             if impls.len() > 1 {
432                 impls.sort_unstable();
433             }
434
435             impls.hash_stable(hcx, hasher);
436         }
437     }
438 }
439