]> git.lizzy.rs Git - rust.git/blob - src/librustc/ich/hcx.rs
67f6c0c2e49e88dda4be510bb45b63ad9e5e3caf
[rust.git] / src / librustc / ich / hcx.rs
1 // Copyright 2017 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 use hir;
12 use hir::def_id::{DefId, DefIndex};
13 use hir::map::DefPathHash;
14 use hir::map::definitions::Definitions;
15 use ich::{self, CachingCodemapView, Fingerprint};
16 use middle::cstore::CrateStore;
17 use ty::{TyCtxt, fast_reject};
18 use session::Session;
19
20 use std::cmp::Ord;
21 use std::hash as std_hash;
22 use std::collections::HashMap;
23 use std::cell::RefCell;
24
25 use syntax::ast;
26
27 use syntax::codemap::CodeMap;
28 use syntax::ext::hygiene::SyntaxContext;
29 use syntax::symbol::Symbol;
30 use syntax_pos::{Span, DUMMY_SP};
31 use syntax_pos::hygiene;
32
33 use rustc_data_structures::stable_hasher::{HashStable, StableHashingContextProvider,
34                                            StableHasher, StableHasherResult,
35                                            ToStableHashKey};
36 use rustc_data_structures::accumulate_vec::AccumulateVec;
37 use rustc_data_structures::fx::{FxHashSet, FxHashMap};
38
39 pub fn compute_ignored_attr_names() -> FxHashSet<Symbol> {
40     debug_assert!(ich::IGNORED_ATTRIBUTES.len() > 0);
41     ich::IGNORED_ATTRIBUTES.iter().map(|&s| Symbol::intern(s)).collect()
42 }
43
44 /// This is the context state available during incr. comp. hashing. It contains
45 /// enough information to transform DefIds and HirIds into stable DefPaths (i.e.
46 /// a reference to the TyCtxt) and it holds a few caches for speeding up various
47 /// things (e.g. each DefId/DefPath is only hashed once).
48 #[derive(Clone)]
49 pub struct StableHashingContext<'gcx> {
50     sess: &'gcx Session,
51     definitions: &'gcx Definitions,
52     cstore: &'gcx CrateStore,
53     body_resolver: BodyResolver<'gcx>,
54     hash_spans: bool,
55     hash_bodies: bool,
56     node_id_hashing_mode: NodeIdHashingMode,
57
58     // Very often, we are hashing something that does not need the
59     // CachingCodemapView, so we initialize it lazily.
60     raw_codemap: &'gcx CodeMap,
61     caching_codemap: Option<CachingCodemapView<'gcx>>,
62 }
63
64 #[derive(PartialEq, Eq, Clone, Copy)]
65 pub enum NodeIdHashingMode {
66     Ignore,
67     HashDefPath,
68 }
69
70 /// The BodyResolver allows to map a BodyId to the corresponding hir::Body.
71 /// We could also just store a plain reference to the hir::Crate but we want
72 /// to avoid that the crate is used to get untracked access to all of the HIR.
73 #[derive(Clone, Copy)]
74 struct BodyResolver<'gcx>(&'gcx hir::Crate);
75
76 impl<'gcx> BodyResolver<'gcx> {
77     // Return a reference to the hir::Body with the given BodyId.
78     // DOES NOT DO ANY TRACKING, use carefully.
79     fn body(self, id: hir::BodyId) -> &'gcx hir::Body {
80         self.0.body(id)
81     }
82 }
83
84 impl<'gcx> StableHashingContext<'gcx> {
85     // The `krate` here is only used for mapping BodyIds to Bodies.
86     // Don't use it for anything else or you'll run the risk of
87     // leaking data out of the tracking system.
88     pub fn new(sess: &'gcx Session,
89                krate: &'gcx hir::Crate,
90                definitions: &'gcx Definitions,
91                cstore: &'gcx CrateStore)
92                -> Self {
93         let hash_spans_initial = !sess.opts.debugging_opts.incremental_ignore_spans;
94
95         StableHashingContext {
96             sess,
97             body_resolver: BodyResolver(krate),
98             definitions,
99             cstore,
100             caching_codemap: None,
101             raw_codemap: sess.codemap(),
102             hash_spans: hash_spans_initial,
103             hash_bodies: true,
104             node_id_hashing_mode: NodeIdHashingMode::HashDefPath,
105         }
106     }
107
108     #[inline]
109     pub fn sess(&self) -> &'gcx Session {
110         self.sess
111     }
112
113     #[inline]
114     pub fn while_hashing_hir_bodies<F: FnOnce(&mut Self)>(&mut self,
115                                                           hash_bodies: bool,
116                                                           f: F) {
117         let prev_hash_bodies = self.hash_bodies;
118         self.hash_bodies = hash_bodies;
119         f(self);
120         self.hash_bodies = prev_hash_bodies;
121     }
122
123     #[inline]
124     pub fn while_hashing_spans<F: FnOnce(&mut Self)>(&mut self,
125                                                      hash_spans: bool,
126                                                      f: F) {
127         let prev_hash_spans = self.hash_spans;
128         self.hash_spans = hash_spans;
129         f(self);
130         self.hash_spans = prev_hash_spans;
131     }
132
133     #[inline]
134     pub fn with_node_id_hashing_mode<F: FnOnce(&mut Self)>(&mut self,
135                                                            mode: NodeIdHashingMode,
136                                                            f: F) {
137         let prev = self.node_id_hashing_mode;
138         self.node_id_hashing_mode = mode;
139         f(self);
140         self.node_id_hashing_mode = prev;
141     }
142
143     #[inline]
144     pub fn def_path_hash(&self, def_id: DefId) -> DefPathHash {
145         if def_id.is_local() {
146             self.definitions.def_path_hash(def_id.index)
147         } else {
148             self.cstore.def_path_hash(def_id)
149         }
150     }
151
152     #[inline]
153     pub fn local_def_path_hash(&self, def_index: DefIndex) -> DefPathHash {
154         self.definitions.def_path_hash(def_index)
155     }
156
157     #[inline]
158     pub fn node_to_hir_id(&self, node_id: ast::NodeId) -> hir::HirId {
159         self.definitions.node_to_hir_id(node_id)
160     }
161
162     #[inline]
163     pub fn hash_bodies(&self) -> bool {
164         self.hash_bodies
165     }
166
167     #[inline]
168     pub fn codemap(&mut self) -> &mut CachingCodemapView<'gcx> {
169         match self.caching_codemap {
170             Some(ref mut cm) => {
171                 cm
172             }
173             ref mut none => {
174                 *none = Some(CachingCodemapView::new(self.raw_codemap));
175                 none.as_mut().unwrap()
176             }
177         }
178     }
179
180     #[inline]
181     pub fn is_ignored_attr(&self, name: Symbol) -> bool {
182         self.sess.ignored_attr_names.contains(&name)
183     }
184
185     pub fn hash_hir_item_like<F: FnOnce(&mut Self)>(&mut self, f: F) {
186         let prev_hash_node_ids = self.node_id_hashing_mode;
187         self.node_id_hashing_mode = NodeIdHashingMode::Ignore;
188
189         f(self);
190
191         self.node_id_hashing_mode = prev_hash_node_ids;
192     }
193 }
194
195 impl<'a, 'gcx, 'lcx> StableHashingContextProvider for TyCtxt<'a, 'gcx, 'lcx> {
196     type ContextType = StableHashingContext<'gcx>;
197     fn create_stable_hashing_context(&self) -> Self::ContextType {
198         (*self).create_stable_hashing_context()
199     }
200 }
201
202
203 impl<'gcx> StableHashingContextProvider for StableHashingContext<'gcx> {
204     type ContextType = StableHashingContext<'gcx>;
205     fn create_stable_hashing_context(&self) -> Self::ContextType {
206         self.clone()
207     }
208 }
209
210 impl<'gcx> ::dep_graph::DepGraphSafe for StableHashingContext<'gcx> {
211 }
212
213
214 impl<'gcx> HashStable<StableHashingContext<'gcx>> for hir::BodyId {
215     fn hash_stable<W: StableHasherResult>(&self,
216                                           hcx: &mut StableHashingContext<'gcx>,
217                                           hasher: &mut StableHasher<W>) {
218         if hcx.hash_bodies() {
219             hcx.body_resolver.body(*self).hash_stable(hcx, hasher);
220         }
221     }
222 }
223
224 impl<'gcx> HashStable<StableHashingContext<'gcx>> for hir::HirId {
225     #[inline]
226     fn hash_stable<W: StableHasherResult>(&self,
227                                           hcx: &mut StableHashingContext<'gcx>,
228                                           hasher: &mut StableHasher<W>) {
229         match hcx.node_id_hashing_mode {
230             NodeIdHashingMode::Ignore => {
231                 // Don't do anything.
232             }
233             NodeIdHashingMode::HashDefPath => {
234                 let hir::HirId {
235                     owner,
236                     local_id,
237                 } = *self;
238
239                 hcx.local_def_path_hash(owner).hash_stable(hcx, hasher);
240                 local_id.hash_stable(hcx, hasher);
241             }
242         }
243     }
244 }
245
246 impl<'gcx> ToStableHashKey<StableHashingContext<'gcx>> for hir::HirId {
247     type KeyType = (DefPathHash, hir::ItemLocalId);
248
249     #[inline]
250     fn to_stable_hash_key(&self,
251                           hcx: &StableHashingContext<'gcx>)
252                           -> (DefPathHash, hir::ItemLocalId) {
253         let def_path_hash = hcx.local_def_path_hash(self.owner);
254         (def_path_hash, self.local_id)
255     }
256 }
257
258 impl<'gcx> HashStable<StableHashingContext<'gcx>> for ast::NodeId {
259     fn hash_stable<W: StableHasherResult>(&self,
260                                           hcx: &mut StableHashingContext<'gcx>,
261                                           hasher: &mut StableHasher<W>) {
262         match hcx.node_id_hashing_mode {
263             NodeIdHashingMode::Ignore => {
264                 // Don't do anything.
265             }
266             NodeIdHashingMode::HashDefPath => {
267                 hcx.definitions.node_to_hir_id(*self).hash_stable(hcx, hasher);
268             }
269         }
270     }
271 }
272
273 impl<'gcx> ToStableHashKey<StableHashingContext<'gcx>> for ast::NodeId {
274     type KeyType = (DefPathHash, hir::ItemLocalId);
275
276     #[inline]
277     fn to_stable_hash_key(&self,
278                           hcx: &StableHashingContext<'gcx>)
279                           -> (DefPathHash, hir::ItemLocalId) {
280         hcx.definitions.node_to_hir_id(*self).to_stable_hash_key(hcx)
281     }
282 }
283
284 impl<'gcx> HashStable<StableHashingContext<'gcx>> for Span {
285
286     // Hash a span in a stable way. We can't directly hash the span's BytePos
287     // fields (that would be similar to hashing pointers, since those are just
288     // offsets into the CodeMap). Instead, we hash the (file name, line, column)
289     // triple, which stays the same even if the containing FileMap has moved
290     // within the CodeMap.
291     // Also note that we are hashing byte offsets for the column, not unicode
292     // codepoint offsets. For the purpose of the hash that's sufficient.
293     // Also, hashing filenames is expensive so we avoid doing it twice when the
294     // span starts and ends in the same file, which is almost always the case.
295     fn hash_stable<W: StableHasherResult>(&self,
296                                           hcx: &mut StableHashingContext<'gcx>,
297                                           hasher: &mut StableHasher<W>) {
298         const TAG_VALID_SPAN: u8 = 0;
299         const TAG_INVALID_SPAN: u8 = 1;
300         const TAG_EXPANSION: u8 = 0;
301         const TAG_NO_EXPANSION: u8 = 1;
302
303         if !hcx.hash_spans {
304             return
305         }
306
307         if *self == DUMMY_SP {
308             return std_hash::Hash::hash(&TAG_INVALID_SPAN, hasher);
309         }
310
311         // If this is not an empty or invalid span, we want to hash the last
312         // position that belongs to it, as opposed to hashing the first
313         // position past it.
314         let span = self.data();
315
316         if span.hi < span.lo {
317             return std_hash::Hash::hash(&TAG_INVALID_SPAN, hasher);
318         }
319
320         let (file_lo, line_lo, col_lo) = match hcx.codemap()
321                                                   .byte_pos_to_line_and_col(span.lo) {
322             Some(pos) => pos,
323             None => {
324                 return std_hash::Hash::hash(&TAG_INVALID_SPAN, hasher);
325             }
326         };
327
328         if !file_lo.contains(span.hi) {
329             return std_hash::Hash::hash(&TAG_INVALID_SPAN, hasher);
330         }
331
332         std_hash::Hash::hash(&TAG_VALID_SPAN, hasher);
333         // We truncate the stable_id hash and line and col numbers. The chances
334         // of causing a collision this way should be minimal.
335         std_hash::Hash::hash(&(file_lo.name_hash as u64), hasher);
336
337         let col = (col_lo.0 as u64) & 0xFF;
338         let line = ((line_lo as u64) & 0xFF_FF_FF) << 8;
339         let len = ((span.hi - span.lo).0 as u64) << 32;
340         let line_col_len = col | line | len;
341         std_hash::Hash::hash(&line_col_len, hasher);
342
343         if span.ctxt == SyntaxContext::empty() {
344             TAG_NO_EXPANSION.hash_stable(hcx, hasher);
345         } else {
346             TAG_EXPANSION.hash_stable(hcx, hasher);
347
348             // Since the same expansion context is usually referenced many
349             // times, we cache a stable hash of it and hash that instead of
350             // recursing every time.
351             thread_local! {
352                 static CACHE: RefCell<FxHashMap<hygiene::Mark, u64>> =
353                     RefCell::new(FxHashMap());
354             }
355
356             let sub_hash: u64 = CACHE.with(|cache| {
357                 let mark = span.ctxt.outer();
358
359                 if let Some(&sub_hash) = cache.borrow().get(&mark) {
360                     return sub_hash;
361                 }
362
363                 let mut hasher = StableHasher::new();
364                 mark.expn_info().hash_stable(hcx, &mut hasher);
365                 let sub_hash: Fingerprint = hasher.finish();
366                 let sub_hash = sub_hash.to_smaller_hash();
367                 cache.borrow_mut().insert(mark, sub_hash);
368                 sub_hash
369             });
370
371             sub_hash.hash_stable(hcx, hasher);
372         }
373     }
374 }
375
376 pub fn hash_stable_trait_impls<'gcx, W, R>(
377     hcx: &mut StableHashingContext<'gcx>,
378     hasher: &mut StableHasher<W>,
379     blanket_impls: &Vec<DefId>,
380     non_blanket_impls: &HashMap<fast_reject::SimplifiedType, Vec<DefId>, R>)
381     where W: StableHasherResult,
382           R: std_hash::BuildHasher,
383 {
384     {
385         let mut blanket_impls: AccumulateVec<[_; 8]> = blanket_impls
386             .iter()
387             .map(|&def_id| hcx.def_path_hash(def_id))
388             .collect();
389
390         if blanket_impls.len() > 1 {
391             blanket_impls.sort_unstable();
392         }
393
394         blanket_impls.hash_stable(hcx, hasher);
395     }
396
397     {
398         let mut keys: AccumulateVec<[_; 8]> =
399             non_blanket_impls.keys()
400                              .map(|k| (k, k.map_def(|d| hcx.def_path_hash(d))))
401                              .collect();
402         keys.sort_unstable_by(|&(_, ref k1), &(_, ref k2)| k1.cmp(k2));
403         keys.len().hash_stable(hcx, hasher);
404         for (key, ref stable_key) in keys {
405             stable_key.hash_stable(hcx, hasher);
406             let mut impls : AccumulateVec<[_; 8]> = non_blanket_impls[key]
407                 .iter()
408                 .map(|&impl_id| hcx.def_path_hash(impl_id))
409                 .collect();
410
411             if impls.len() > 1 {
412                 impls.sort_unstable();
413             }
414
415             impls.hash_stable(hcx, hasher);
416         }
417     }
418 }
419