1 // Copyright 2017 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
12 use hir::def_id::DefId;
13 use hir::map::DefPathHash;
14 use ich::{self, CachingCodemapView};
15 use session::config::DebugInfoLevel::NoDebugInfo;
17 use util::nodemap::{NodeMap, ItemLocalMap};
19 use std::hash as std_hash;
20 use std::collections::{HashMap, HashSet, BTreeMap};
24 use syntax::ext::hygiene::SyntaxContext;
25 use syntax::symbol::Symbol;
28 use rustc_data_structures::stable_hasher::{HashStable, StableHasher,
30 use rustc_data_structures::accumulate_vec::AccumulateVec;
32 /// This is the context state available during incr. comp. hashing. It contains
33 /// enough information to transform DefIds and HirIds into stable DefPaths (i.e.
34 /// a reference to the TyCtxt) and it holds a few caches for speeding up various
35 /// things (e.g. each DefId/DefPath is only hashed once).
36 pub struct StableHashingContext<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
37 tcx: ty::TyCtxt<'a, 'gcx, 'tcx>,
38 codemap: CachingCodemapView<'gcx>,
41 overflow_checks_enabled: bool,
42 node_id_hashing_mode: NodeIdHashingMode,
43 // A sorted array of symbol keys for fast lookup.
44 ignored_attr_names: Vec<Symbol>,
47 #[derive(PartialEq, Eq, Clone, Copy)]
48 pub enum NodeIdHashingMode {
54 impl<'a, 'gcx, 'tcx> StableHashingContext<'a, 'gcx, 'tcx> {
56 pub fn new(tcx: ty::TyCtxt<'a, 'gcx, 'tcx>) -> Self {
57 let hash_spans_initial = tcx.sess.opts.debuginfo != NoDebugInfo;
58 let check_overflow_initial = tcx.sess.overflow_checks();
60 let mut ignored_attr_names: Vec<_> = ich::IGNORED_ATTRIBUTES
62 .map(|&s| Symbol::intern(s))
65 ignored_attr_names.sort();
67 StableHashingContext {
69 codemap: CachingCodemapView::new(tcx),
70 hash_spans: hash_spans_initial,
72 overflow_checks_enabled: check_overflow_initial,
73 node_id_hashing_mode: NodeIdHashingMode::HashDefPath,
78 pub fn force_span_hashing(mut self) -> Self {
79 self.hash_spans = true;
84 pub fn while_hashing_hir_bodies<F: FnOnce(&mut Self)>(&mut self,
87 let prev_hash_bodies = self.hash_bodies;
88 self.hash_bodies = hash_bodies;
90 self.hash_bodies = prev_hash_bodies;
94 pub fn while_hashing_spans<F: FnOnce(&mut Self)>(&mut self,
97 let prev_hash_spans = self.hash_spans;
98 self.hash_spans = hash_spans;
100 self.hash_spans = prev_hash_spans;
104 pub fn with_node_id_hashing_mode<F: FnOnce(&mut Self)>(&mut self,
105 mode: NodeIdHashingMode,
107 let prev = self.node_id_hashing_mode;
108 self.node_id_hashing_mode = mode;
110 self.node_id_hashing_mode = prev;
114 pub fn tcx(&self) -> ty::TyCtxt<'a, 'gcx, 'tcx> {
119 pub fn def_path_hash(&mut self, def_id: DefId) -> DefPathHash {
120 self.tcx.def_path_hash(def_id)
124 pub fn hash_spans(&self) -> bool {
129 pub fn hash_bodies(&self) -> bool {
134 pub fn codemap(&mut self) -> &mut CachingCodemapView<'gcx> {
139 pub fn is_ignored_attr(&self, name: Symbol) -> bool {
140 self.ignored_attr_names.binary_search(&name).is_ok()
143 pub fn hash_hir_item_like<F: FnOnce(&mut Self)>(&mut self,
144 item_attrs: &[ast::Attribute],
146 let prev_overflow_checks = self.overflow_checks_enabled;
147 if attr::contains_name(item_attrs, "rustc_inherit_overflow_checks") {
148 self.overflow_checks_enabled = true;
150 let prev_hash_node_ids = self.node_id_hashing_mode;
151 self.node_id_hashing_mode = NodeIdHashingMode::Ignore;
155 self.node_id_hashing_mode = prev_hash_node_ids;
156 self.overflow_checks_enabled = prev_overflow_checks;
160 pub fn binop_can_panic_at_runtime(&self, binop: hir::BinOp_) -> bool
165 hir::BiMul => self.overflow_checks_enabled,
187 pub fn unop_can_panic_at_runtime(&self, unop: hir::UnOp) -> bool
192 hir::UnNeg => self.overflow_checks_enabled,
198 impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for ast::NodeId {
199 fn hash_stable<W: StableHasherResult>(&self,
200 hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
201 hasher: &mut StableHasher<W>) {
202 let hir_id = hcx.tcx.hir.node_to_hir_id(*self);
203 match hcx.node_id_hashing_mode {
204 NodeIdHashingMode::Ignore => {
205 // Most NodeIds in the HIR can be ignored, but if there is a
206 // corresponding entry in the `trait_map` we need to hash that.
207 // Make sure we don't ignore too much by checking that there is
208 // no entry in a debug_assert!().
209 debug_assert!(hcx.tcx.in_scope_traits(hir_id).is_none());
211 NodeIdHashingMode::HashDefPath => {
212 hir_id.hash_stable(hcx, hasher);
214 NodeIdHashingMode::HashTraitsInScope => {
215 if let Some(traits) = hcx.tcx.in_scope_traits(hir_id) {
216 // The ordering of the candidates is not fixed. So we hash
217 // the def-ids and then sort them and hash the collection.
218 let mut candidates: AccumulateVec<[_; 8]> =
220 .map(|&hir::TraitCandidate { def_id, import_id: _ }| {
221 hcx.def_path_hash(def_id)
224 if traits.len() > 1 {
227 candidates.hash_stable(hcx, hasher);
234 impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for Span {
236 // Hash a span in a stable way. We can't directly hash the span's BytePos
237 // fields (that would be similar to hashing pointers, since those are just
238 // offsets into the CodeMap). Instead, we hash the (file name, line, column)
239 // triple, which stays the same even if the containing FileMap has moved
240 // within the CodeMap.
241 // Also note that we are hashing byte offsets for the column, not unicode
242 // codepoint offsets. For the purpose of the hash that's sufficient.
243 // Also, hashing filenames is expensive so we avoid doing it twice when the
244 // span starts and ends in the same file, which is almost always the case.
245 fn hash_stable<W: StableHasherResult>(&self,
246 hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
247 hasher: &mut StableHasher<W>) {
254 // If this is not an empty or invalid span, we want to hash the last
255 // position that belongs to it, as opposed to hashing the first
257 let span_hi = if self.hi() > self.lo() {
258 // We might end up in the middle of a multibyte character here,
259 // but that's OK, since we are not trying to decode anything at
261 self.hi() - ::syntax_pos::BytePos(1)
267 let loc1 = hcx.codemap().byte_pos_to_line_and_col(self.lo());
268 let loc1 = loc1.as_ref()
269 .map(|&(ref fm, line, col)| (&fm.name[..], line, col.to_usize()))
270 .unwrap_or(("???", 0, 0));
272 let loc2 = hcx.codemap().byte_pos_to_line_and_col(span_hi);
273 let loc2 = loc2.as_ref()
274 .map(|&(ref fm, line, col)| (&fm.name[..], line, col.to_usize()))
275 .unwrap_or(("???", 0, 0));
277 if loc1.0 == loc2.0 {
278 std_hash::Hash::hash(&0u8, hasher);
280 std_hash::Hash::hash(loc1.0, hasher);
281 std_hash::Hash::hash(&loc1.1, hasher);
282 std_hash::Hash::hash(&loc1.2, hasher);
284 // Do not hash the file name twice
285 std_hash::Hash::hash(&loc2.1, hasher);
286 std_hash::Hash::hash(&loc2.2, hasher);
288 std_hash::Hash::hash(&1u8, hasher);
290 std_hash::Hash::hash(loc1.0, hasher);
291 std_hash::Hash::hash(&loc1.1, hasher);
292 std_hash::Hash::hash(&loc1.2, hasher);
294 std_hash::Hash::hash(loc2.0, hasher);
295 std_hash::Hash::hash(&loc2.1, hasher);
296 std_hash::Hash::hash(&loc2.2, hasher);
300 if self.ctxt() == SyntaxContext::empty() {
301 0u8.hash_stable(hcx, hasher);
303 1u8.hash_stable(hcx, hasher);
304 self.source_callsite().hash_stable(hcx, hasher);
309 pub fn hash_stable_hashmap<'a, 'gcx, 'tcx, K, V, R, SK, F, W>(
310 hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
311 hasher: &mut StableHasher<W>,
312 map: &HashMap<K, V, R>,
313 extract_stable_key: F)
314 where K: Eq + std_hash::Hash,
315 V: HashStable<StableHashingContext<'a, 'gcx, 'tcx>>,
316 R: std_hash::BuildHasher,
317 SK: HashStable<StableHashingContext<'a, 'gcx, 'tcx>> + Ord + Clone,
318 F: Fn(&mut StableHashingContext<'a, 'gcx, 'tcx>, &K) -> SK,
319 W: StableHasherResult,
321 let mut keys: Vec<_> = map.keys()
322 .map(|k| (extract_stable_key(hcx, k), k))
324 keys.sort_unstable_by_key(|&(ref stable_key, _)| stable_key.clone());
325 keys.len().hash_stable(hcx, hasher);
326 for (stable_key, key) in keys {
327 stable_key.hash_stable(hcx, hasher);
328 map[key].hash_stable(hcx, hasher);
332 pub fn hash_stable_hashset<'a, 'tcx, 'gcx, K, R, SK, F, W>(
333 hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
334 hasher: &mut StableHasher<W>,
336 extract_stable_key: F)
337 where K: Eq + std_hash::Hash,
338 R: std_hash::BuildHasher,
339 SK: HashStable<StableHashingContext<'a, 'gcx, 'tcx>> + Ord + Clone,
340 F: Fn(&mut StableHashingContext<'a, 'gcx, 'tcx>, &K) -> SK,
341 W: StableHasherResult,
343 let mut keys: Vec<_> = set.iter()
344 .map(|k| extract_stable_key(hcx, k))
346 keys.sort_unstable();
347 keys.hash_stable(hcx, hasher);
350 pub fn hash_stable_nodemap<'a, 'tcx, 'gcx, V, W>(
351 hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
352 hasher: &mut StableHasher<W>,
354 where V: HashStable<StableHashingContext<'a, 'gcx, 'tcx>>,
355 W: StableHasherResult,
357 hash_stable_hashmap(hcx, hasher, map, |hcx, node_id| {
358 hcx.tcx.hir.definitions().node_to_hir_id(*node_id).local_id
362 pub fn hash_stable_itemlocalmap<'a, 'tcx, 'gcx, V, W>(
363 hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
364 hasher: &mut StableHasher<W>,
365 map: &ItemLocalMap<V>)
366 where V: HashStable<StableHashingContext<'a, 'gcx, 'tcx>>,
367 W: StableHasherResult,
369 hash_stable_hashmap(hcx, hasher, map, |_, local_id| {
375 pub fn hash_stable_btreemap<'a, 'tcx, 'gcx, K, V, SK, F, W>(
376 hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
377 hasher: &mut StableHasher<W>,
378 map: &BTreeMap<K, V>,
379 extract_stable_key: F)
381 V: HashStable<StableHashingContext<'a, 'gcx, 'tcx>>,
382 SK: HashStable<StableHashingContext<'a, 'gcx, 'tcx>> + Ord + Clone,
383 F: Fn(&mut StableHashingContext<'a, 'gcx, 'tcx>, &K) -> SK,
384 W: StableHasherResult,
386 let mut keys: Vec<_> = map.keys()
387 .map(|k| (extract_stable_key(hcx, k), k))
389 keys.sort_unstable_by_key(|&(ref stable_key, _)| stable_key.clone());
390 keys.len().hash_stable(hcx, hasher);
391 for (stable_key, key) in keys {
392 stable_key.hash_stable(hcx, hasher);
393 map[key].hash_stable(hcx, hasher);