1 use crate::dep_graph::DepNodeIndex;
3 use rustc_arena::TypedArena;
4 use rustc_data_structures::fx::FxHashMap;
5 use rustc_data_structures::sharded;
6 #[cfg(parallel_compiler)]
7 use rustc_data_structures::sharded::Sharded;
8 #[cfg(not(parallel_compiler))]
9 use rustc_data_structures::sync::Lock;
10 use rustc_data_structures::sync::WorkerLocal;
11 use rustc_index::vec::{Idx, IndexVec};
14 use std::marker::PhantomData;
16 pub trait CacheSelector<'tcx, V> {
23 pub trait QueryStorage {
28 pub trait QueryCache: QueryStorage + Sized {
29 type Key: Hash + Eq + Clone + Debug;
31 /// Checks if the query is already computed and in the cache.
32 /// It returns the shard index and a lock guard to the shard,
33 /// which will be used if the query is not in the cache and we need
35 fn lookup(&self, key: &Self::Key) -> Option<(Self::Stored, DepNodeIndex)>;
37 fn complete(&self, key: Self::Key, value: Self::Value, index: DepNodeIndex) -> Self::Stored;
39 fn iter(&self, f: &mut dyn FnMut(&Self::Key, &Self::Value, DepNodeIndex));
42 pub struct DefaultCacheSelector<K>(PhantomData<K>);
44 impl<'tcx, K: Eq + Hash, V: 'tcx> CacheSelector<'tcx, V> for DefaultCacheSelector<K> {
45 type Cache = DefaultCache<K, V>
48 type ArenaCache = ArenaCache<'tcx, K, V>;
51 pub struct DefaultCache<K, V> {
52 #[cfg(parallel_compiler)]
53 cache: Sharded<FxHashMap<K, (V, DepNodeIndex)>>,
54 #[cfg(not(parallel_compiler))]
55 cache: Lock<FxHashMap<K, (V, DepNodeIndex)>>,
58 impl<K, V> Default for DefaultCache<K, V> {
59 fn default() -> Self {
60 DefaultCache { cache: Default::default() }
64 impl<K: Eq + Hash, V: Copy + Debug> QueryStorage for DefaultCache<K, V> {
69 impl<K, V> QueryCache for DefaultCache<K, V>
71 K: Eq + Hash + Clone + Debug,
77 fn lookup(&self, key: &K) -> Option<(V, DepNodeIndex)> {
78 let key_hash = sharded::make_hash(key);
79 #[cfg(parallel_compiler)]
80 let lock = self.cache.get_shard_by_hash(key_hash).lock();
81 #[cfg(not(parallel_compiler))]
82 let lock = self.cache.lock();
83 let result = lock.raw_entry().from_key_hashed_nocheck(key_hash, key);
85 if let Some((_, value)) = result { Some(*value) } else { None }
89 fn complete(&self, key: K, value: V, index: DepNodeIndex) -> Self::Stored {
90 #[cfg(parallel_compiler)]
91 let mut lock = self.cache.get_shard_by_value(&key).lock();
92 #[cfg(not(parallel_compiler))]
93 let mut lock = self.cache.lock();
94 // We may be overwriting another value. This is all right, since the dep-graph
95 // will check that the fingerprint matches.
96 lock.insert(key, (value.clone(), index));
100 fn iter(&self, f: &mut dyn FnMut(&Self::Key, &Self::Value, DepNodeIndex)) {
101 #[cfg(parallel_compiler)]
103 let shards = self.cache.lock_shards();
104 for shard in shards.iter() {
105 for (k, v) in shard.iter() {
110 #[cfg(not(parallel_compiler))]
112 let map = self.cache.lock();
113 for (k, v) in map.iter() {
120 pub struct ArenaCache<'tcx, K, V> {
121 arena: WorkerLocal<TypedArena<(V, DepNodeIndex)>>,
122 #[cfg(parallel_compiler)]
123 cache: Sharded<FxHashMap<K, &'tcx (V, DepNodeIndex)>>,
124 #[cfg(not(parallel_compiler))]
125 cache: Lock<FxHashMap<K, &'tcx (V, DepNodeIndex)>>,
128 impl<'tcx, K, V> Default for ArenaCache<'tcx, K, V> {
129 fn default() -> Self {
130 ArenaCache { arena: WorkerLocal::new(|_| TypedArena::default()), cache: Default::default() }
134 impl<'tcx, K: Eq + Hash, V: Debug + 'tcx> QueryStorage for ArenaCache<'tcx, K, V> {
136 type Stored = &'tcx V;
139 impl<'tcx, K, V: 'tcx> QueryCache for ArenaCache<'tcx, K, V>
141 K: Eq + Hash + Clone + Debug,
147 fn lookup(&self, key: &K) -> Option<(&'tcx V, DepNodeIndex)> {
148 let key_hash = sharded::make_hash(key);
149 #[cfg(parallel_compiler)]
150 let lock = self.cache.get_shard_by_hash(key_hash).lock();
151 #[cfg(not(parallel_compiler))]
152 let lock = self.cache.lock();
153 let result = lock.raw_entry().from_key_hashed_nocheck(key_hash, key);
155 if let Some((_, value)) = result { Some((&value.0, value.1)) } else { None }
159 fn complete(&self, key: K, value: V, index: DepNodeIndex) -> Self::Stored {
160 let value = self.arena.alloc((value, index));
161 let value = unsafe { &*(value as *const _) };
162 #[cfg(parallel_compiler)]
163 let mut lock = self.cache.get_shard_by_value(&key).lock();
164 #[cfg(not(parallel_compiler))]
165 let mut lock = self.cache.lock();
166 // We may be overwriting another value. This is all right, since the dep-graph
167 // will check that the fingerprint matches.
168 lock.insert(key, value);
172 fn iter(&self, f: &mut dyn FnMut(&Self::Key, &Self::Value, DepNodeIndex)) {
173 #[cfg(parallel_compiler)]
175 let shards = self.cache.lock_shards();
176 for shard in shards.iter() {
177 for (k, v) in shard.iter() {
182 #[cfg(not(parallel_compiler))]
184 let map = self.cache.lock();
185 for (k, v) in map.iter() {
192 pub struct VecCacheSelector<K>(PhantomData<K>);
194 impl<'tcx, K: Idx, V: 'tcx> CacheSelector<'tcx, V> for VecCacheSelector<K> {
195 type Cache = VecCache<K, V>
198 type ArenaCache = VecArenaCache<'tcx, K, V>;
201 pub struct VecCache<K: Idx, V> {
202 #[cfg(parallel_compiler)]
203 cache: Sharded<IndexVec<K, Option<(V, DepNodeIndex)>>>,
204 #[cfg(not(parallel_compiler))]
205 cache: Lock<IndexVec<K, Option<(V, DepNodeIndex)>>>,
208 impl<K: Idx, V> Default for VecCache<K, V> {
209 fn default() -> Self {
210 VecCache { cache: Default::default() }
214 impl<K: Eq + Idx, V: Copy + Debug> QueryStorage for VecCache<K, V> {
219 impl<K, V> QueryCache for VecCache<K, V>
221 K: Eq + Idx + Clone + Debug,
227 fn lookup(&self, key: &K) -> Option<(V, DepNodeIndex)> {
228 #[cfg(parallel_compiler)]
229 let lock = self.cache.get_shard_by_hash(key.index() as u64).lock();
230 #[cfg(not(parallel_compiler))]
231 let lock = self.cache.lock();
232 if let Some(Some(value)) = lock.get(*key) { Some(*value) } else { None }
236 fn complete(&self, key: K, value: V, index: DepNodeIndex) -> Self::Stored {
237 #[cfg(parallel_compiler)]
238 let mut lock = self.cache.get_shard_by_hash(key.index() as u64).lock();
239 #[cfg(not(parallel_compiler))]
240 let mut lock = self.cache.lock();
241 lock.insert(key, (value.clone(), index));
245 fn iter(&self, f: &mut dyn FnMut(&Self::Key, &Self::Value, DepNodeIndex)) {
246 #[cfg(parallel_compiler)]
248 let shards = self.cache.lock_shards();
249 for shard in shards.iter() {
250 for (k, v) in shard.iter_enumerated() {
257 #[cfg(not(parallel_compiler))]
259 let map = self.cache.lock();
260 for (k, v) in map.iter_enumerated() {
269 pub struct VecArenaCache<'tcx, K: Idx, V> {
270 arena: WorkerLocal<TypedArena<(V, DepNodeIndex)>>,
271 #[cfg(parallel_compiler)]
272 cache: Sharded<IndexVec<K, Option<&'tcx (V, DepNodeIndex)>>>,
273 #[cfg(not(parallel_compiler))]
274 cache: Lock<IndexVec<K, Option<&'tcx (V, DepNodeIndex)>>>,
277 impl<'tcx, K: Idx, V> Default for VecArenaCache<'tcx, K, V> {
278 fn default() -> Self {
280 arena: WorkerLocal::new(|_| TypedArena::default()),
281 cache: Default::default(),
286 impl<'tcx, K: Eq + Idx, V: Debug + 'tcx> QueryStorage for VecArenaCache<'tcx, K, V> {
288 type Stored = &'tcx V;
291 impl<'tcx, K, V: 'tcx> QueryCache for VecArenaCache<'tcx, K, V>
293 K: Eq + Idx + Clone + Debug,
299 fn lookup(&self, key: &K) -> Option<(&'tcx V, DepNodeIndex)> {
300 #[cfg(parallel_compiler)]
301 let lock = self.cache.get_shard_by_hash(key.index() as u64).lock();
302 #[cfg(not(parallel_compiler))]
303 let lock = self.cache.lock();
304 if let Some(Some(value)) = lock.get(*key) { Some((&value.0, value.1)) } else { None }
308 fn complete(&self, key: K, value: V, index: DepNodeIndex) -> Self::Stored {
309 let value = self.arena.alloc((value, index));
310 let value = unsafe { &*(value as *const _) };
311 #[cfg(parallel_compiler)]
312 let mut lock = self.cache.get_shard_by_hash(key.index() as u64).lock();
313 #[cfg(not(parallel_compiler))]
314 let mut lock = self.cache.lock();
315 lock.insert(key, value);
319 fn iter(&self, f: &mut dyn FnMut(&Self::Key, &Self::Value, DepNodeIndex)) {
320 #[cfg(parallel_compiler)]
322 let shards = self.cache.lock_shards();
323 for shard in shards.iter() {
324 for (k, v) in shard.iter_enumerated() {
331 #[cfg(not(parallel_compiler))]
333 let map = self.cache.lock();
334 for (k, v) in map.iter_enumerated() {