1 // Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! type context book-keeping
13 use dep_graph::DepGraph;
14 use dep_graph::{DepNode, DepConstructor};
15 use errors::DiagnosticBuilder;
17 use session::config::OutputFilenames;
19 use hir::{TraitCandidate, HirId, ItemLocalId};
20 use hir::def::{Def, Export};
21 use hir::def_id::{CrateNum, DefId, DefIndex, LOCAL_CRATE};
22 use hir::map as hir_map;
23 use hir::map::DefPathHash;
24 use lint::{self, Lint};
25 use ich::{StableHashingContext, NodeIdHashingMode};
26 use middle::const_val::ConstVal;
27 use middle::cstore::{CrateStore, LinkMeta, EncodedMetadataHashes};
28 use middle::cstore::EncodedMetadata;
29 use middle::free_region::FreeRegionMap;
30 use middle::lang_items;
31 use middle::resolve_lifetime::{self, ObjectLifetimeDefault};
32 use middle::stability;
34 use ty::subst::{Kind, Substs};
37 use ty::{self, Ty, TypeAndMut};
38 use ty::{TyS, TypeVariants, Slice};
39 use ty::{AdtKind, AdtDef, ClosureSubsts, GeneratorInterior, Region, Const};
40 use ty::{PolyFnSig, InferTy, ParamTy, ProjectionTy, ExistentialPredicate, Predicate};
42 use ty::{TyVar, TyVid, IntVar, IntVid, FloatVar, FloatVid};
43 use ty::TypeVariants::*;
44 use ty::layout::{LayoutDetails, TargetDataLayout};
48 use util::nodemap::{NodeMap, NodeSet, DefIdSet, ItemLocalMap};
49 use util::nodemap::{FxHashMap, FxHashSet};
50 use rustc_data_structures::accumulate_vec::AccumulateVec;
51 use rustc_data_structures::stable_hasher::{HashStable, hash_stable_hashmap,
52 StableHasher, StableHasherResult,
54 use arena::{TypedArena, DroplessArena};
55 use rustc_const_math::{ConstInt, ConstUsize};
56 use rustc_data_structures::indexed_vec::IndexVec;
58 use std::borrow::Borrow;
59 use std::cell::{Cell, RefCell};
60 use std::cmp::Ordering;
61 use std::collections::hash_map::{self, Entry};
62 use std::hash::{Hash, Hasher};
70 use syntax::ast::{self, Name, NodeId};
72 use syntax::codemap::MultiSpan;
73 use syntax::symbol::{Symbol, keywords};
79 pub struct GlobalArenas<'tcx> {
81 layout: TypedArena<LayoutDetails>,
84 generics: TypedArena<ty::Generics>,
85 trait_def: TypedArena<ty::TraitDef>,
86 adt_def: TypedArena<ty::AdtDef>,
87 steal_mir: TypedArena<Steal<Mir<'tcx>>>,
88 mir: TypedArena<Mir<'tcx>>,
89 tables: TypedArena<ty::TypeckTables<'tcx>>,
92 impl<'tcx> GlobalArenas<'tcx> {
93 pub fn new() -> GlobalArenas<'tcx> {
95 layout: TypedArena::new(),
96 generics: TypedArena::new(),
97 trait_def: TypedArena::new(),
98 adt_def: TypedArena::new(),
99 steal_mir: TypedArena::new(),
100 mir: TypedArena::new(),
101 tables: TypedArena::new(),
106 pub struct CtxtInterners<'tcx> {
107 /// The arena that types, regions, etc are allocated from
108 arena: &'tcx DroplessArena,
110 /// Specifically use a speedy hash algorithm for these hash sets,
111 /// they're accessed quite often.
112 type_: RefCell<FxHashSet<Interned<'tcx, TyS<'tcx>>>>,
113 type_list: RefCell<FxHashSet<Interned<'tcx, Slice<Ty<'tcx>>>>>,
114 substs: RefCell<FxHashSet<Interned<'tcx, Substs<'tcx>>>>,
115 region: RefCell<FxHashSet<Interned<'tcx, RegionKind>>>,
116 existential_predicates: RefCell<FxHashSet<Interned<'tcx, Slice<ExistentialPredicate<'tcx>>>>>,
117 predicates: RefCell<FxHashSet<Interned<'tcx, Slice<Predicate<'tcx>>>>>,
118 const_: RefCell<FxHashSet<Interned<'tcx, Const<'tcx>>>>,
121 impl<'gcx: 'tcx, 'tcx> CtxtInterners<'tcx> {
122 fn new(arena: &'tcx DroplessArena) -> CtxtInterners<'tcx> {
125 type_: RefCell::new(FxHashSet()),
126 type_list: RefCell::new(FxHashSet()),
127 substs: RefCell::new(FxHashSet()),
128 region: RefCell::new(FxHashSet()),
129 existential_predicates: RefCell::new(FxHashSet()),
130 predicates: RefCell::new(FxHashSet()),
131 const_: RefCell::new(FxHashSet()),
135 /// Intern a type. global_interners is Some only if this is
136 /// a local interner and global_interners is its counterpart.
137 fn intern_ty(&self, st: TypeVariants<'tcx>,
138 global_interners: Option<&CtxtInterners<'gcx>>)
141 let mut interner = self.type_.borrow_mut();
142 let global_interner = global_interners.map(|interners| {
143 interners.type_.borrow_mut()
145 if let Some(&Interned(ty)) = interner.get(&st) {
148 if let Some(ref interner) = global_interner {
149 if let Some(&Interned(ty)) = interner.get(&st) {
154 let flags = super::flags::FlagComputation::for_sty(&st);
155 let ty_struct = TyS {
158 region_depth: flags.depth,
161 // HACK(eddyb) Depend on flags being accurate to
162 // determine that all contents are in the global tcx.
163 // See comments on Lift for why we can't use that.
164 if !flags.flags.intersects(ty::TypeFlags::KEEP_IN_LOCAL_TCX) {
165 if let Some(interner) = global_interners {
166 let ty_struct: TyS<'gcx> = unsafe {
167 mem::transmute(ty_struct)
169 let ty: Ty<'gcx> = interner.arena.alloc(ty_struct);
170 global_interner.unwrap().insert(Interned(ty));
174 // Make sure we don't end up with inference
175 // types/regions in the global tcx.
176 if global_interners.is_none() {
178 bug!("Attempted to intern `{:?}` which contains \
179 inference types/regions in the global type context",
184 // Don't be &mut TyS.
185 let ty: Ty<'tcx> = self.arena.alloc(ty_struct);
186 interner.insert(Interned(ty));
190 debug!("Interned type: {:?} Pointer: {:?}",
191 ty, ty as *const TyS);
197 pub struct CommonTypes<'tcx> {
217 pub re_empty: Region<'tcx>,
218 pub re_static: Region<'tcx>,
219 pub re_erased: Region<'tcx>,
222 pub struct LocalTableInContext<'a, V: 'a> {
223 local_id_root: Option<DefId>,
224 data: &'a ItemLocalMap<V>
227 /// Validate that the given HirId (respectively its `local_id` part) can be
228 /// safely used as a key in the tables of a TypeckTable. For that to be
229 /// the case, the HirId must have the same `owner` as all the other IDs in
230 /// this table (signified by `local_id_root`). Otherwise the HirId
231 /// would be in a different frame of reference and using its `local_id`
232 /// would result in lookup errors, or worse, in silently wrong data being
234 fn validate_hir_id_for_typeck_tables(local_id_root: Option<DefId>,
237 if cfg!(debug_assertions) {
238 if let Some(local_id_root) = local_id_root {
239 if hir_id.owner != local_id_root.index {
240 ty::tls::with(|tcx| {
241 let node_id = tcx.hir
243 .find_node_for_hir_id(hir_id);
245 bug!("node {} with HirId::owner {:?} cannot be placed in \
246 TypeckTables with local_id_root {:?}",
247 tcx.hir.node_to_string(node_id),
248 DefId::local(hir_id.owner),
253 // We use "Null Object" TypeckTables in some of the analysis passes.
254 // These are just expected to be empty and their `local_id_root` is
255 // `None`. Therefore we cannot verify whether a given `HirId` would
256 // be a valid key for the given table. Instead we make sure that
257 // nobody tries to write to such a Null Object table.
259 bug!("access to invalid TypeckTables")
265 impl<'a, V> LocalTableInContext<'a, V> {
266 pub fn contains_key(&self, id: hir::HirId) -> bool {
267 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
268 self.data.contains_key(&id.local_id)
271 pub fn get(&self, id: hir::HirId) -> Option<&V> {
272 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
273 self.data.get(&id.local_id)
276 pub fn iter(&self) -> hash_map::Iter<hir::ItemLocalId, V> {
281 impl<'a, V> ::std::ops::Index<hir::HirId> for LocalTableInContext<'a, V> {
284 fn index(&self, key: hir::HirId) -> &V {
285 self.get(key).expect("LocalTableInContext: key not found")
289 pub struct LocalTableInContextMut<'a, V: 'a> {
290 local_id_root: Option<DefId>,
291 data: &'a mut ItemLocalMap<V>
294 impl<'a, V> LocalTableInContextMut<'a, V> {
295 pub fn get_mut(&mut self, id: hir::HirId) -> Option<&mut V> {
296 validate_hir_id_for_typeck_tables(self.local_id_root, id, true);
297 self.data.get_mut(&id.local_id)
300 pub fn entry(&mut self, id: hir::HirId) -> Entry<hir::ItemLocalId, V> {
301 validate_hir_id_for_typeck_tables(self.local_id_root, id, true);
302 self.data.entry(id.local_id)
305 pub fn insert(&mut self, id: hir::HirId, val: V) -> Option<V> {
306 validate_hir_id_for_typeck_tables(self.local_id_root, id, true);
307 self.data.insert(id.local_id, val)
310 pub fn remove(&mut self, id: hir::HirId) -> Option<V> {
311 validate_hir_id_for_typeck_tables(self.local_id_root, id, true);
312 self.data.remove(&id.local_id)
316 #[derive(RustcEncodable, RustcDecodable, Debug)]
317 pub struct TypeckTables<'tcx> {
318 /// The HirId::owner all ItemLocalIds in this table are relative to.
319 pub local_id_root: Option<DefId>,
321 /// Resolved definitions for `<T>::X` associated paths and
322 /// method calls, including those of overloaded operators.
323 type_dependent_defs: ItemLocalMap<Def>,
325 /// Stores the types for various nodes in the AST. Note that this table
326 /// is not guaranteed to be populated until after typeck. See
327 /// typeck::check::fn_ctxt for details.
328 node_types: ItemLocalMap<Ty<'tcx>>,
330 /// Stores the type parameters which were substituted to obtain the type
331 /// of this node. This only applies to nodes that refer to entities
332 /// parameterized by type parameters, such as generic fns, types, or
334 node_substs: ItemLocalMap<&'tcx Substs<'tcx>>,
336 adjustments: ItemLocalMap<Vec<ty::adjustment::Adjustment<'tcx>>>,
338 /// Stores the actual binding mode for all instances of hir::BindingAnnotation.
339 pat_binding_modes: ItemLocalMap<BindingMode>,
341 /// Stores the types which were implicitly dereferenced in pattern binding modes
342 /// for later usage in HAIR lowering. For example,
345 /// match &&Some(5i32) {
350 /// leads to a `vec![&&Option<i32>, &Option<i32>]`. Empty vectors are not stored.
353 /// https://github.com/rust-lang/rfcs/blob/master/text/2005-match-ergonomics.md#definitions
354 pat_adjustments: ItemLocalMap<Vec<Ty<'tcx>>>,
357 pub upvar_capture_map: ty::UpvarCaptureMap<'tcx>,
359 /// Records the reasons that we picked the kind of each closure;
360 /// not all closures are present in the map.
361 closure_kind_origins: ItemLocalMap<(Span, ast::Name)>,
363 /// For each fn, records the "liberated" types of its arguments
364 /// and return type. Liberated means that all bound regions
365 /// (including late-bound regions) are replaced with free
366 /// equivalents. This table is not used in trans (since regions
367 /// are erased there) and hence is not serialized to metadata.
368 liberated_fn_sigs: ItemLocalMap<ty::FnSig<'tcx>>,
370 /// For each FRU expression, record the normalized types of the fields
371 /// of the struct - this is needed because it is non-trivial to
372 /// normalize while preserving regions. This table is used only in
373 /// MIR construction and hence is not serialized to metadata.
374 fru_field_types: ItemLocalMap<Vec<Ty<'tcx>>>,
376 /// Maps a cast expression to its kind. This is keyed on the
377 /// *from* expression of the cast, not the cast itself.
378 cast_kinds: ItemLocalMap<ty::cast::CastKind>,
380 /// Set of trait imports actually used in the method resolution.
381 /// This is used for warning unused imports. During type
382 /// checking, this `Rc` should not be cloned: it must have a ref-count
383 /// of 1 so that we can insert things into the set mutably.
384 pub used_trait_imports: Rc<DefIdSet>,
386 /// If any errors occurred while type-checking this body,
387 /// this field will be set to `true`.
388 pub tainted_by_errors: bool,
390 /// Stores the free-region relationships that were deduced from
391 /// its where clauses and parameter types. These are then
392 /// read-again by borrowck.
393 pub free_region_map: FreeRegionMap<'tcx>,
396 impl<'tcx> TypeckTables<'tcx> {
397 pub fn empty(local_id_root: Option<DefId>) -> TypeckTables<'tcx> {
400 type_dependent_defs: ItemLocalMap(),
401 node_types: ItemLocalMap(),
402 node_substs: ItemLocalMap(),
403 adjustments: ItemLocalMap(),
404 pat_binding_modes: ItemLocalMap(),
405 pat_adjustments: ItemLocalMap(),
406 upvar_capture_map: FxHashMap(),
407 closure_kind_origins: ItemLocalMap(),
408 liberated_fn_sigs: ItemLocalMap(),
409 fru_field_types: ItemLocalMap(),
410 cast_kinds: ItemLocalMap(),
411 used_trait_imports: Rc::new(DefIdSet()),
412 tainted_by_errors: false,
413 free_region_map: FreeRegionMap::new(),
417 /// Returns the final resolution of a `QPath` in an `Expr` or `Pat` node.
418 pub fn qpath_def(&self, qpath: &hir::QPath, id: hir::HirId) -> Def {
420 hir::QPath::Resolved(_, ref path) => path.def,
421 hir::QPath::TypeRelative(..) => {
422 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
423 self.type_dependent_defs.get(&id.local_id).cloned().unwrap_or(Def::Err)
428 pub fn type_dependent_defs(&self) -> LocalTableInContext<Def> {
429 LocalTableInContext {
430 local_id_root: self.local_id_root,
431 data: &self.type_dependent_defs
435 pub fn type_dependent_defs_mut(&mut self) -> LocalTableInContextMut<Def> {
436 LocalTableInContextMut {
437 local_id_root: self.local_id_root,
438 data: &mut self.type_dependent_defs
442 pub fn node_types(&self) -> LocalTableInContext<Ty<'tcx>> {
443 LocalTableInContext {
444 local_id_root: self.local_id_root,
445 data: &self.node_types
449 pub fn node_types_mut(&mut self) -> LocalTableInContextMut<Ty<'tcx>> {
450 LocalTableInContextMut {
451 local_id_root: self.local_id_root,
452 data: &mut self.node_types
456 pub fn node_id_to_type(&self, id: hir::HirId) -> Ty<'tcx> {
457 match self.node_id_to_type_opt(id) {
460 bug!("node_id_to_type: no type for node `{}`",
462 let id = tcx.hir.definitions().find_node_for_hir_id(id);
463 tcx.hir.node_to_string(id)
469 pub fn node_id_to_type_opt(&self, id: hir::HirId) -> Option<Ty<'tcx>> {
470 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
471 self.node_types.get(&id.local_id).cloned()
474 pub fn node_substs_mut(&mut self) -> LocalTableInContextMut<&'tcx Substs<'tcx>> {
475 LocalTableInContextMut {
476 local_id_root: self.local_id_root,
477 data: &mut self.node_substs
481 pub fn node_substs(&self, id: hir::HirId) -> &'tcx Substs<'tcx> {
482 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
483 self.node_substs.get(&id.local_id).cloned().unwrap_or(Substs::empty())
486 pub fn node_substs_opt(&self, id: hir::HirId) -> Option<&'tcx Substs<'tcx>> {
487 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
488 self.node_substs.get(&id.local_id).cloned()
491 // Returns the type of a pattern as a monotype. Like @expr_ty, this function
492 // doesn't provide type parameter substitutions.
493 pub fn pat_ty(&self, pat: &hir::Pat) -> Ty<'tcx> {
494 self.node_id_to_type(pat.hir_id)
497 pub fn pat_ty_opt(&self, pat: &hir::Pat) -> Option<Ty<'tcx>> {
498 self.node_id_to_type_opt(pat.hir_id)
501 // Returns the type of an expression as a monotype.
503 // NB (1): This is the PRE-ADJUSTMENT TYPE for the expression. That is, in
504 // some cases, we insert `Adjustment` annotations such as auto-deref or
505 // auto-ref. The type returned by this function does not consider such
506 // adjustments. See `expr_ty_adjusted()` instead.
508 // NB (2): This type doesn't provide type parameter substitutions; e.g. if you
509 // ask for the type of "id" in "id(3)", it will return "fn(&isize) -> isize"
510 // instead of "fn(ty) -> T with T = isize".
511 pub fn expr_ty(&self, expr: &hir::Expr) -> Ty<'tcx> {
512 self.node_id_to_type(expr.hir_id)
515 pub fn expr_ty_opt(&self, expr: &hir::Expr) -> Option<Ty<'tcx>> {
516 self.node_id_to_type_opt(expr.hir_id)
519 pub fn adjustments(&self) -> LocalTableInContext<Vec<ty::adjustment::Adjustment<'tcx>>> {
520 LocalTableInContext {
521 local_id_root: self.local_id_root,
522 data: &self.adjustments
526 pub fn adjustments_mut(&mut self)
527 -> LocalTableInContextMut<Vec<ty::adjustment::Adjustment<'tcx>>> {
528 LocalTableInContextMut {
529 local_id_root: self.local_id_root,
530 data: &mut self.adjustments
534 pub fn expr_adjustments(&self, expr: &hir::Expr)
535 -> &[ty::adjustment::Adjustment<'tcx>] {
536 validate_hir_id_for_typeck_tables(self.local_id_root, expr.hir_id, false);
537 self.adjustments.get(&expr.hir_id.local_id).map_or(&[], |a| &a[..])
540 /// Returns the type of `expr`, considering any `Adjustment`
541 /// entry recorded for that expression.
542 pub fn expr_ty_adjusted(&self, expr: &hir::Expr) -> Ty<'tcx> {
543 self.expr_adjustments(expr)
545 .map_or_else(|| self.expr_ty(expr), |adj| adj.target)
548 pub fn expr_ty_adjusted_opt(&self, expr: &hir::Expr) -> Option<Ty<'tcx>> {
549 self.expr_adjustments(expr)
551 .map(|adj| adj.target)
552 .or_else(|| self.expr_ty_opt(expr))
555 pub fn is_method_call(&self, expr: &hir::Expr) -> bool {
556 // Only paths and method calls/overloaded operators have
557 // entries in type_dependent_defs, ignore the former here.
558 if let hir::ExprPath(_) = expr.node {
562 match self.type_dependent_defs().get(expr.hir_id) {
563 Some(&Def::Method(_)) => true,
568 pub fn pat_binding_modes(&self) -> LocalTableInContext<BindingMode> {
569 LocalTableInContext {
570 local_id_root: self.local_id_root,
571 data: &self.pat_binding_modes
575 pub fn pat_binding_modes_mut(&mut self)
576 -> LocalTableInContextMut<BindingMode> {
577 LocalTableInContextMut {
578 local_id_root: self.local_id_root,
579 data: &mut self.pat_binding_modes
583 pub fn pat_adjustments(&self) -> LocalTableInContext<Vec<Ty<'tcx>>> {
584 LocalTableInContext {
585 local_id_root: self.local_id_root,
586 data: &self.pat_adjustments,
590 pub fn pat_adjustments_mut(&mut self)
591 -> LocalTableInContextMut<Vec<Ty<'tcx>>> {
592 LocalTableInContextMut {
593 local_id_root: self.local_id_root,
594 data: &mut self.pat_adjustments,
598 pub fn upvar_capture(&self, upvar_id: ty::UpvarId) -> ty::UpvarCapture<'tcx> {
599 self.upvar_capture_map[&upvar_id]
602 pub fn closure_kind_origins(&self) -> LocalTableInContext<(Span, ast::Name)> {
603 LocalTableInContext {
604 local_id_root: self.local_id_root,
605 data: &self.closure_kind_origins
609 pub fn closure_kind_origins_mut(&mut self) -> LocalTableInContextMut<(Span, ast::Name)> {
610 LocalTableInContextMut {
611 local_id_root: self.local_id_root,
612 data: &mut self.closure_kind_origins
616 pub fn liberated_fn_sigs(&self) -> LocalTableInContext<ty::FnSig<'tcx>> {
617 LocalTableInContext {
618 local_id_root: self.local_id_root,
619 data: &self.liberated_fn_sigs
623 pub fn liberated_fn_sigs_mut(&mut self) -> LocalTableInContextMut<ty::FnSig<'tcx>> {
624 LocalTableInContextMut {
625 local_id_root: self.local_id_root,
626 data: &mut self.liberated_fn_sigs
630 pub fn fru_field_types(&self) -> LocalTableInContext<Vec<Ty<'tcx>>> {
631 LocalTableInContext {
632 local_id_root: self.local_id_root,
633 data: &self.fru_field_types
637 pub fn fru_field_types_mut(&mut self) -> LocalTableInContextMut<Vec<Ty<'tcx>>> {
638 LocalTableInContextMut {
639 local_id_root: self.local_id_root,
640 data: &mut self.fru_field_types
644 pub fn cast_kinds(&self) -> LocalTableInContext<ty::cast::CastKind> {
645 LocalTableInContext {
646 local_id_root: self.local_id_root,
647 data: &self.cast_kinds
651 pub fn cast_kinds_mut(&mut self) -> LocalTableInContextMut<ty::cast::CastKind> {
652 LocalTableInContextMut {
653 local_id_root: self.local_id_root,
654 data: &mut self.cast_kinds
659 impl<'gcx> HashStable<StableHashingContext<'gcx>> for TypeckTables<'gcx> {
660 fn hash_stable<W: StableHasherResult>(&self,
661 hcx: &mut StableHashingContext<'gcx>,
662 hasher: &mut StableHasher<W>) {
663 let ty::TypeckTables {
665 ref type_dependent_defs,
669 ref pat_binding_modes,
671 ref upvar_capture_map,
672 ref closure_kind_origins,
673 ref liberated_fn_sigs,
678 ref used_trait_imports,
683 hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| {
684 type_dependent_defs.hash_stable(hcx, hasher);
685 node_types.hash_stable(hcx, hasher);
686 node_substs.hash_stable(hcx, hasher);
687 adjustments.hash_stable(hcx, hasher);
688 pat_binding_modes.hash_stable(hcx, hasher);
689 pat_adjustments.hash_stable(hcx, hasher);
690 hash_stable_hashmap(hcx, hasher, upvar_capture_map, |up_var_id, hcx| {
697 local_id_root.expect("trying to hash invalid TypeckTables");
699 let var_owner_def_id = DefId {
700 krate: local_id_root.krate,
703 let closure_def_id = DefId {
704 krate: local_id_root.krate,
705 index: closure_expr_id.to_def_id().index,
707 (hcx.def_path_hash(var_owner_def_id),
709 hcx.def_path_hash(closure_def_id))
712 closure_kind_origins.hash_stable(hcx, hasher);
713 liberated_fn_sigs.hash_stable(hcx, hasher);
714 fru_field_types.hash_stable(hcx, hasher);
715 cast_kinds.hash_stable(hcx, hasher);
716 used_trait_imports.hash_stable(hcx, hasher);
717 tainted_by_errors.hash_stable(hcx, hasher);
718 free_region_map.hash_stable(hcx, hasher);
723 impl<'tcx> CommonTypes<'tcx> {
724 fn new(interners: &CtxtInterners<'tcx>) -> CommonTypes<'tcx> {
725 let mk = |sty| interners.intern_ty(sty, None);
726 let mk_region = |r| {
727 if let Some(r) = interners.region.borrow().get(&r) {
730 let r = interners.arena.alloc(r);
731 interners.region.borrow_mut().insert(Interned(r));
739 isize: mk(TyInt(ast::IntTy::Is)),
740 i8: mk(TyInt(ast::IntTy::I8)),
741 i16: mk(TyInt(ast::IntTy::I16)),
742 i32: mk(TyInt(ast::IntTy::I32)),
743 i64: mk(TyInt(ast::IntTy::I64)),
744 i128: mk(TyInt(ast::IntTy::I128)),
745 usize: mk(TyUint(ast::UintTy::Us)),
746 u8: mk(TyUint(ast::UintTy::U8)),
747 u16: mk(TyUint(ast::UintTy::U16)),
748 u32: mk(TyUint(ast::UintTy::U32)),
749 u64: mk(TyUint(ast::UintTy::U64)),
750 u128: mk(TyUint(ast::UintTy::U128)),
751 f32: mk(TyFloat(ast::FloatTy::F32)),
752 f64: mk(TyFloat(ast::FloatTy::F64)),
754 re_empty: mk_region(RegionKind::ReEmpty),
755 re_static: mk_region(RegionKind::ReStatic),
756 re_erased: mk_region(RegionKind::ReErased),
761 /// The central data structure of the compiler. It stores references
762 /// to the various **arenas** and also houses the results of the
763 /// various **compiler queries** that have been performed. See [the
764 /// README](README.md) for more deatils.
765 #[derive(Copy, Clone)]
766 pub struct TyCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
767 gcx: &'a GlobalCtxt<'gcx>,
768 interners: &'a CtxtInterners<'tcx>
771 impl<'a, 'gcx, 'tcx> Deref for TyCtxt<'a, 'gcx, 'tcx> {
772 type Target = &'a GlobalCtxt<'gcx>;
773 fn deref(&self) -> &Self::Target {
778 pub struct GlobalCtxt<'tcx> {
779 global_arenas: &'tcx GlobalArenas<'tcx>,
780 global_interners: CtxtInterners<'tcx>,
782 cstore: &'tcx CrateStore,
784 pub sess: &'tcx Session,
786 pub dep_graph: DepGraph,
788 /// This provides access to the incr. comp. on-disk cache for query results.
789 /// Do not access this directly. It is only meant to be used by
790 /// `DepGraph::try_mark_green()` and the query infrastructure in `ty::maps`.
791 pub(crate) on_disk_query_result_cache: maps::OnDiskCache<'tcx>,
793 /// Common types, pre-interned for your convenience.
794 pub types: CommonTypes<'tcx>,
796 /// Map indicating what traits are in scope for places where this
797 /// is relevant; generated by resolve.
798 trait_map: FxHashMap<DefIndex,
799 Rc<FxHashMap<ItemLocalId,
800 Rc<StableVec<TraitCandidate>>>>>,
802 /// Export map produced by name resolution.
803 export_map: FxHashMap<DefId, Rc<Vec<Export>>>,
805 named_region_map: NamedRegionMap,
807 pub hir: hir_map::Map<'tcx>,
809 /// A map from DefPathHash -> DefId. Includes DefIds from the local crate
810 /// as well as all upstream crates. Only populated in incremental mode.
811 pub def_path_hash_to_def_id: Option<FxHashMap<DefPathHash, DefId>>,
813 pub maps: maps::Maps<'tcx>,
815 // Records the free variables refrenced by every closure
816 // expression. Do not track deps for this, just recompute it from
817 // scratch every time.
818 freevars: FxHashMap<DefId, Rc<Vec<hir::Freevar>>>,
820 maybe_unused_trait_imports: FxHashSet<DefId>,
822 maybe_unused_extern_crates: Vec<(DefId, Span)>,
824 // Internal cache for metadata decoding. No need to track deps on this.
825 pub rcache: RefCell<FxHashMap<ty::CReaderCacheKey, Ty<'tcx>>>,
827 /// Caches the results of trait selection. This cache is used
828 /// for things that do not have to do with the parameters in scope.
829 pub selection_cache: traits::SelectionCache<'tcx>,
831 /// Caches the results of trait evaluation. This cache is used
832 /// for things that do not have to do with the parameters in scope.
833 /// Merge this with `selection_cache`?
834 pub evaluation_cache: traits::EvaluationCache<'tcx>,
836 /// The definite name of the current crate after taking into account
837 /// attributes, commandline parameters, etc.
838 pub crate_name: Symbol,
840 /// Data layout specification for the current target.
841 pub data_layout: TargetDataLayout,
843 /// Used to prevent layout from recursing too deeply.
844 pub layout_depth: Cell<usize>,
846 /// Map from function to the `#[derive]` mode that it's defining. Only used
847 /// by `proc-macro` crates.
848 pub derive_macros: RefCell<NodeMap<Symbol>>,
850 stability_interner: RefCell<FxHashSet<&'tcx attr::Stability>>,
852 layout_interner: RefCell<FxHashSet<&'tcx LayoutDetails>>,
854 /// A vector of every trait accessible in the whole crate
855 /// (i.e. including those from subcrates). This is used only for
856 /// error reporting, and so is lazily initialized and generally
857 /// shouldn't taint the common path (hence the RefCell).
858 pub all_traits: RefCell<Option<Vec<DefId>>>,
860 /// A general purpose channel to throw data out the back towards LLVM worker
863 /// This is intended to only get used during the trans phase of the compiler
864 /// when satisfying the query for a particular codegen unit. Internally in
865 /// the query it'll send data along this channel to get processed later.
866 pub tx_to_llvm_workers: mpsc::Sender<Box<Any + Send>>,
868 output_filenames: Arc<OutputFilenames>,
871 impl<'tcx> GlobalCtxt<'tcx> {
872 /// Get the global TyCtxt.
873 pub fn global_tcx<'a>(&'a self) -> TyCtxt<'a, 'tcx, 'tcx> {
876 interners: &self.global_interners
881 impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
882 pub fn alloc_generics(self, generics: ty::Generics) -> &'gcx ty::Generics {
883 self.global_arenas.generics.alloc(generics)
886 pub fn alloc_steal_mir(self, mir: Mir<'gcx>) -> &'gcx Steal<Mir<'gcx>> {
887 self.global_arenas.steal_mir.alloc(Steal::new(mir))
890 pub fn alloc_mir(self, mir: Mir<'gcx>) -> &'gcx Mir<'gcx> {
891 self.global_arenas.mir.alloc(mir)
894 pub fn alloc_tables(self, tables: ty::TypeckTables<'gcx>) -> &'gcx ty::TypeckTables<'gcx> {
895 self.global_arenas.tables.alloc(tables)
898 pub fn alloc_trait_def(self, def: ty::TraitDef) -> &'gcx ty::TraitDef {
899 self.global_arenas.trait_def.alloc(def)
902 pub fn alloc_adt_def(self,
905 variants: Vec<ty::VariantDef>,
907 -> &'gcx ty::AdtDef {
908 let def = ty::AdtDef::new(self, did, kind, variants, repr);
909 self.global_arenas.adt_def.alloc(def)
912 pub fn alloc_byte_array(self, bytes: &[u8]) -> &'gcx [u8] {
913 if bytes.is_empty() {
916 self.global_interners.arena.alloc_slice(bytes)
920 pub fn alloc_const_slice(self, values: &[&'tcx ty::Const<'tcx>])
921 -> &'tcx [&'tcx ty::Const<'tcx>] {
922 if values.is_empty() {
925 self.interners.arena.alloc_slice(values)
929 pub fn alloc_name_const_slice(self, values: &[(ast::Name, &'tcx ty::Const<'tcx>)])
930 -> &'tcx [(ast::Name, &'tcx ty::Const<'tcx>)] {
931 if values.is_empty() {
934 self.interners.arena.alloc_slice(values)
938 pub fn intern_stability(self, stab: attr::Stability) -> &'gcx attr::Stability {
939 if let Some(st) = self.stability_interner.borrow().get(&stab) {
943 let interned = self.global_interners.arena.alloc(stab);
944 if let Some(prev) = self.stability_interner.borrow_mut().replace(interned) {
945 bug!("Tried to overwrite interned Stability: {:?}", prev)
950 pub fn intern_layout(self, layout: LayoutDetails) -> &'gcx LayoutDetails {
951 if let Some(layout) = self.layout_interner.borrow().get(&layout) {
955 let interned = self.global_arenas.layout.alloc(layout);
956 if let Some(prev) = self.layout_interner.borrow_mut().replace(interned) {
957 bug!("Tried to overwrite interned Layout: {:?}", prev)
962 pub fn lift<T: ?Sized + Lift<'tcx>>(self, value: &T) -> Option<T::Lifted> {
963 value.lift_to_tcx(self)
966 /// Like lift, but only tries in the global tcx.
967 pub fn lift_to_global<T: ?Sized + Lift<'gcx>>(self, value: &T) -> Option<T::Lifted> {
968 value.lift_to_tcx(self.global_tcx())
971 /// Returns true if self is the same as self.global_tcx().
972 fn is_global(self) -> bool {
973 let local = self.interners as *const _;
974 let global = &self.global_interners as *const _;
975 local as usize == global as usize
978 /// Create a type context and call the closure with a `TyCtxt` reference
979 /// to the context. The closure enforces that the type context and any interned
980 /// value (types, substs, etc.) can only be used while `ty::tls` has a valid
981 /// reference to the context, to allow formatting values that need it.
982 pub fn create_and_enter<F, R>(s: &'tcx Session,
983 cstore: &'tcx CrateStore,
984 local_providers: ty::maps::Providers<'tcx>,
985 extern_providers: ty::maps::Providers<'tcx>,
986 arenas: &'tcx GlobalArenas<'tcx>,
987 arena: &'tcx DroplessArena,
988 resolutions: ty::Resolutions,
989 named_region_map: resolve_lifetime::NamedRegionMap,
990 hir: hir_map::Map<'tcx>,
991 on_disk_query_result_cache: maps::OnDiskCache<'tcx>,
993 tx: mpsc::Sender<Box<Any + Send>>,
994 output_filenames: &OutputFilenames,
996 where F: for<'b> FnOnce(TyCtxt<'b, 'tcx, 'tcx>) -> R
998 let data_layout = TargetDataLayout::parse(s);
999 let interners = CtxtInterners::new(arena);
1000 let common_types = CommonTypes::new(&interners);
1001 let dep_graph = hir.dep_graph.clone();
1002 let max_cnum = cstore.crates_untracked().iter().map(|c| c.as_usize()).max().unwrap_or(0);
1003 let mut providers = IndexVec::from_elem_n(extern_providers, max_cnum + 1);
1004 providers[LOCAL_CRATE] = local_providers;
1006 let def_path_hash_to_def_id = if s.opts.build_dep_graph() {
1007 let upstream_def_path_tables: Vec<(CrateNum, Rc<_>)> = cstore
1010 .map(|&cnum| (cnum, cstore.def_path_table(cnum)))
1013 let def_path_tables = || {
1014 upstream_def_path_tables
1016 .map(|&(cnum, ref rc)| (cnum, &**rc))
1017 .chain(iter::once((LOCAL_CRATE, hir.definitions().def_path_table())))
1020 // Precompute the capacity of the hashmap so we don't have to
1021 // re-allocate when populating it.
1022 let capacity = def_path_tables().map(|(_, t)| t.size()).sum::<usize>();
1024 let mut map: FxHashMap<_, _> = FxHashMap::with_capacity_and_hasher(
1026 ::std::default::Default::default()
1029 for (cnum, def_path_table) in def_path_tables() {
1030 def_path_table.add_def_path_hashes_to(cnum, &mut map);
1038 let mut trait_map = FxHashMap();
1039 for (k, v) in resolutions.trait_map {
1040 let hir_id = hir.node_to_hir_id(k);
1041 let map = trait_map.entry(hir_id.owner)
1042 .or_insert_with(|| Rc::new(FxHashMap()));
1043 Rc::get_mut(map).unwrap()
1044 .insert(hir_id.local_id,
1045 Rc::new(StableVec::new(v)));
1047 let mut defs = FxHashMap();
1048 for (k, v) in named_region_map.defs {
1049 let hir_id = hir.node_to_hir_id(k);
1050 let map = defs.entry(hir_id.owner)
1051 .or_insert_with(|| Rc::new(FxHashMap()));
1052 Rc::get_mut(map).unwrap().insert(hir_id.local_id, v);
1054 let mut late_bound = FxHashMap();
1055 for k in named_region_map.late_bound {
1056 let hir_id = hir.node_to_hir_id(k);
1057 let map = late_bound.entry(hir_id.owner)
1058 .or_insert_with(|| Rc::new(FxHashSet()));
1059 Rc::get_mut(map).unwrap().insert(hir_id.local_id);
1061 let mut object_lifetime_defaults = FxHashMap();
1062 for (k, v) in named_region_map.object_lifetime_defaults {
1063 let hir_id = hir.node_to_hir_id(k);
1064 let map = object_lifetime_defaults.entry(hir_id.owner)
1065 .or_insert_with(|| Rc::new(FxHashMap()));
1066 Rc::get_mut(map).unwrap().insert(hir_id.local_id, Rc::new(v));
1069 tls::enter_global(GlobalCtxt {
1072 global_arenas: arenas,
1073 global_interners: interners,
1074 dep_graph: dep_graph.clone(),
1075 on_disk_query_result_cache,
1076 types: common_types,
1077 named_region_map: NamedRegionMap {
1080 object_lifetime_defaults,
1083 export_map: resolutions.export_map.into_iter().map(|(k, v)| {
1086 freevars: resolutions.freevars.into_iter().map(|(k, v)| {
1087 (hir.local_def_id(k), Rc::new(v))
1089 maybe_unused_trait_imports:
1090 resolutions.maybe_unused_trait_imports
1092 .map(|id| hir.local_def_id(id))
1094 maybe_unused_extern_crates:
1095 resolutions.maybe_unused_extern_crates
1097 .map(|(id, sp)| (hir.local_def_id(id), sp))
1100 def_path_hash_to_def_id,
1101 maps: maps::Maps::new(providers),
1102 rcache: RefCell::new(FxHashMap()),
1103 selection_cache: traits::SelectionCache::new(),
1104 evaluation_cache: traits::EvaluationCache::new(),
1105 crate_name: Symbol::intern(crate_name),
1107 layout_interner: RefCell::new(FxHashSet()),
1108 layout_depth: Cell::new(0),
1109 derive_macros: RefCell::new(NodeMap()),
1110 stability_interner: RefCell::new(FxHashSet()),
1111 all_traits: RefCell::new(None),
1112 tx_to_llvm_workers: tx,
1113 output_filenames: Arc::new(output_filenames.clone()),
1117 pub fn consider_optimizing<T: Fn() -> String>(&self, msg: T) -> bool {
1118 let cname = self.crate_name(LOCAL_CRATE).as_str();
1119 self.sess.consider_optimizing(&cname, msg)
1122 pub fn lang_items(self) -> Rc<middle::lang_items::LanguageItems> {
1123 self.get_lang_items(LOCAL_CRATE)
1126 pub fn stability(self) -> Rc<stability::Index<'tcx>> {
1127 // FIXME(#42293) we should actually track this, but fails too many tests
1129 self.dep_graph.with_ignore(|| {
1130 self.stability_index(LOCAL_CRATE)
1134 pub fn crates(self) -> Rc<Vec<CrateNum>> {
1135 self.all_crate_nums(LOCAL_CRATE)
1138 pub fn def_key(self, id: DefId) -> hir_map::DefKey {
1140 self.hir.def_key(id)
1142 self.cstore.def_key(id)
1146 /// Convert a `DefId` into its fully expanded `DefPath` (every
1147 /// `DefId` is really just an interned def-path).
1149 /// Note that if `id` is not local to this crate, the result will
1150 /// be a non-local `DefPath`.
1151 pub fn def_path(self, id: DefId) -> hir_map::DefPath {
1153 self.hir.def_path(id)
1155 self.cstore.def_path(id)
1160 pub fn def_path_hash(self, def_id: DefId) -> hir_map::DefPathHash {
1161 if def_id.is_local() {
1162 self.hir.definitions().def_path_hash(def_id.index)
1164 self.cstore.def_path_hash(def_id)
1168 pub fn def_path_debug_str(self, def_id: DefId) -> String {
1169 // We are explicitly not going through queries here in order to get
1170 // crate name and disambiguator since this code is called from debug!()
1171 // statements within the query system and we'd run into endless
1172 // recursion otherwise.
1173 let (crate_name, crate_disambiguator) = if def_id.is_local() {
1174 (self.crate_name.clone(),
1175 self.sess.local_crate_disambiguator())
1177 (self.cstore.crate_name_untracked(def_id.krate),
1178 self.cstore.crate_disambiguator_untracked(def_id.krate))
1183 // Don't print the whole crate disambiguator. That's just
1184 // annoying in debug output.
1185 &(crate_disambiguator.to_fingerprint().to_hex())[..4],
1186 self.def_path(def_id).to_string_no_crate())
1189 pub fn metadata_encoding_version(self) -> Vec<u8> {
1190 self.cstore.metadata_encoding_version().to_vec()
1193 // Note that this is *untracked* and should only be used within the query
1194 // system if the result is otherwise tracked through queries
1195 pub fn crate_data_as_rc_any(self, cnum: CrateNum) -> Rc<Any> {
1196 self.cstore.crate_data_as_rc_any(cnum)
1199 pub fn create_stable_hashing_context(self) -> StableHashingContext<'gcx> {
1200 let krate = self.dep_graph.with_ignore(|| self.gcx.hir.krate());
1202 StableHashingContext::new(self.sess,
1204 self.hir.definitions(),
1208 // This method makes sure that we have a DepNode and a Fingerprint for
1209 // every upstream crate. It needs to be called once right after the tcx is
1211 // With full-fledged red/green, the method will probably become unnecessary
1212 // as this will be done on-demand.
1213 pub fn allocate_metadata_dep_nodes(self) {
1214 // We cannot use the query versions of crates() and crate_hash(), since
1215 // those would need the DepNodes that we are allocating here.
1216 for cnum in self.cstore.crates_untracked() {
1217 let dep_node = DepNode::new(self, DepConstructor::CrateMetadata(cnum));
1218 let crate_hash = self.cstore.crate_hash_untracked(cnum);
1219 self.dep_graph.with_task(dep_node,
1222 |_, x| x // No transformation needed
1227 // This method exercises the `in_scope_traits_map` query for all possible
1228 // values so that we have their fingerprints available in the DepGraph.
1229 // This is only required as long as we still use the old dependency tracking
1230 // which needs to have the fingerprints of all input nodes beforehand.
1231 pub fn precompute_in_scope_traits_hashes(self) {
1232 for &def_index in self.trait_map.keys() {
1233 self.in_scope_traits_map(def_index);
1237 pub fn serialize_query_result_cache<E>(self,
1239 -> Result<(), E::Error>
1240 where E: ty::codec::TyEncoder
1242 self.on_disk_query_result_cache.serialize(self.global_tcx(), self.cstore, encoder)
1247 impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
1248 pub fn encode_metadata(self, link_meta: &LinkMeta, reachable: &NodeSet)
1249 -> (EncodedMetadata, EncodedMetadataHashes)
1251 self.cstore.encode_metadata(self, link_meta, reachable)
1255 impl<'gcx: 'tcx, 'tcx> GlobalCtxt<'gcx> {
1256 /// Call the closure with a local `TyCtxt` using the given arena.
1257 pub fn enter_local<F, R>(&self, arena: &'tcx DroplessArena, f: F) -> R
1258 where F: for<'a> FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R
1260 let interners = CtxtInterners::new(arena);
1261 tls::enter(self, &interners, f)
1265 /// A trait implemented for all X<'a> types which can be safely and
1266 /// efficiently converted to X<'tcx> as long as they are part of the
1267 /// provided TyCtxt<'tcx>.
1268 /// This can be done, for example, for Ty<'tcx> or &'tcx Substs<'tcx>
1269 /// by looking them up in their respective interners.
1271 /// However, this is still not the best implementation as it does
1272 /// need to compare the components, even for interned values.
1273 /// It would be more efficient if TypedArena provided a way to
1274 /// determine whether the address is in the allocated range.
1276 /// None is returned if the value or one of the components is not part
1277 /// of the provided context.
1278 /// For Ty, None can be returned if either the type interner doesn't
1279 /// contain the TypeVariants key or if the address of the interned
1280 /// pointer differs. The latter case is possible if a primitive type,
1281 /// e.g. `()` or `u8`, was interned in a different context.
1282 pub trait Lift<'tcx> {
1284 fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option<Self::Lifted>;
1287 impl<'a, 'tcx> Lift<'tcx> for Ty<'a> {
1288 type Lifted = Ty<'tcx>;
1289 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Ty<'tcx>> {
1290 if tcx.interners.arena.in_arena(*self as *const _) {
1291 return Some(unsafe { mem::transmute(*self) });
1293 // Also try in the global tcx if we're not that.
1294 if !tcx.is_global() {
1295 self.lift_to_tcx(tcx.global_tcx())
1302 impl<'a, 'tcx> Lift<'tcx> for Region<'a> {
1303 type Lifted = Region<'tcx>;
1304 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Region<'tcx>> {
1305 if tcx.interners.arena.in_arena(*self as *const _) {
1306 return Some(unsafe { mem::transmute(*self) });
1308 // Also try in the global tcx if we're not that.
1309 if !tcx.is_global() {
1310 self.lift_to_tcx(tcx.global_tcx())
1317 impl<'a, 'tcx> Lift<'tcx> for &'a Const<'a> {
1318 type Lifted = &'tcx Const<'tcx>;
1319 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<&'tcx Const<'tcx>> {
1320 if tcx.interners.arena.in_arena(*self as *const _) {
1321 return Some(unsafe { mem::transmute(*self) });
1323 // Also try in the global tcx if we're not that.
1324 if !tcx.is_global() {
1325 self.lift_to_tcx(tcx.global_tcx())
1332 impl<'a, 'tcx> Lift<'tcx> for &'a Substs<'a> {
1333 type Lifted = &'tcx Substs<'tcx>;
1334 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<&'tcx Substs<'tcx>> {
1335 if self.len() == 0 {
1336 return Some(Slice::empty());
1338 if tcx.interners.arena.in_arena(&self[..] as *const _) {
1339 return Some(unsafe { mem::transmute(*self) });
1341 // Also try in the global tcx if we're not that.
1342 if !tcx.is_global() {
1343 self.lift_to_tcx(tcx.global_tcx())
1350 impl<'a, 'tcx> Lift<'tcx> for &'a Slice<Ty<'a>> {
1351 type Lifted = &'tcx Slice<Ty<'tcx>>;
1352 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>)
1353 -> Option<&'tcx Slice<Ty<'tcx>>> {
1354 if self.len() == 0 {
1355 return Some(Slice::empty());
1357 if tcx.interners.arena.in_arena(*self as *const _) {
1358 return Some(unsafe { mem::transmute(*self) });
1360 // Also try in the global tcx if we're not that.
1361 if !tcx.is_global() {
1362 self.lift_to_tcx(tcx.global_tcx())
1369 impl<'a, 'tcx> Lift<'tcx> for &'a Slice<ExistentialPredicate<'a>> {
1370 type Lifted = &'tcx Slice<ExistentialPredicate<'tcx>>;
1371 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>)
1372 -> Option<&'tcx Slice<ExistentialPredicate<'tcx>>> {
1373 if self.is_empty() {
1374 return Some(Slice::empty());
1376 if tcx.interners.arena.in_arena(*self as *const _) {
1377 return Some(unsafe { mem::transmute(*self) });
1379 // Also try in the global tcx if we're not that.
1380 if !tcx.is_global() {
1381 self.lift_to_tcx(tcx.global_tcx())
1388 impl<'a, 'tcx> Lift<'tcx> for &'a Slice<Predicate<'a>> {
1389 type Lifted = &'tcx Slice<Predicate<'tcx>>;
1390 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>)
1391 -> Option<&'tcx Slice<Predicate<'tcx>>> {
1392 if self.is_empty() {
1393 return Some(Slice::empty());
1395 if tcx.interners.arena.in_arena(*self as *const _) {
1396 return Some(unsafe { mem::transmute(*self) });
1398 // Also try in the global tcx if we're not that.
1399 if !tcx.is_global() {
1400 self.lift_to_tcx(tcx.global_tcx())
1408 use super::{CtxtInterners, GlobalCtxt, TyCtxt};
1410 use std::cell::Cell;
1414 /// Marker types used for the scoped TLS slot.
1415 /// The type context cannot be used directly because the scoped TLS
1416 /// in libstd doesn't allow types generic over lifetimes.
1417 enum ThreadLocalGlobalCtxt {}
1418 enum ThreadLocalInterners {}
1421 static TLS_TCX: Cell<Option<(*const ThreadLocalGlobalCtxt,
1422 *const ThreadLocalInterners)>> = Cell::new(None)
1425 fn span_debug(span: syntax_pos::Span, f: &mut fmt::Formatter) -> fmt::Result {
1427 write!(f, "{}", tcx.sess.codemap().span_to_string(span))
1431 pub fn enter_global<'gcx, F, R>(gcx: GlobalCtxt<'gcx>, f: F) -> R
1432 where F: for<'a> FnOnce(TyCtxt<'a, 'gcx, 'gcx>) -> R
1434 syntax_pos::SPAN_DEBUG.with(|span_dbg| {
1435 let original_span_debug = span_dbg.get();
1436 span_dbg.set(span_debug);
1437 let result = enter(&gcx, &gcx.global_interners, f);
1438 span_dbg.set(original_span_debug);
1443 pub fn enter<'a, 'gcx: 'tcx, 'tcx, F, R>(gcx: &'a GlobalCtxt<'gcx>,
1444 interners: &'a CtxtInterners<'tcx>,
1446 where F: FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R
1448 let gcx_ptr = gcx as *const _ as *const ThreadLocalGlobalCtxt;
1449 let interners_ptr = interners as *const _ as *const ThreadLocalInterners;
1450 TLS_TCX.with(|tls| {
1451 let prev = tls.get();
1452 tls.set(Some((gcx_ptr, interners_ptr)));
1453 let ret = f(TyCtxt {
1462 pub fn with<F, R>(f: F) -> R
1463 where F: for<'a, 'gcx, 'tcx> FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R
1465 TLS_TCX.with(|tcx| {
1466 let (gcx, interners) = tcx.get().unwrap();
1467 let gcx = unsafe { &*(gcx as *const GlobalCtxt) };
1468 let interners = unsafe { &*(interners as *const CtxtInterners) };
1476 pub fn with_opt<F, R>(f: F) -> R
1477 where F: for<'a, 'gcx, 'tcx> FnOnce(Option<TyCtxt<'a, 'gcx, 'tcx>>) -> R
1479 if TLS_TCX.with(|tcx| tcx.get().is_some()) {
1480 with(|v| f(Some(v)))
1487 macro_rules! sty_debug_print {
1488 ($ctxt: expr, $($variant: ident),*) => {{
1489 // curious inner module to allow variant names to be used as
1491 #[allow(non_snake_case)]
1493 use ty::{self, TyCtxt};
1494 use ty::context::Interned;
1496 #[derive(Copy, Clone)]
1499 region_infer: usize,
1504 pub fn go(tcx: TyCtxt) {
1505 let mut total = DebugStat {
1507 region_infer: 0, ty_infer: 0, both_infer: 0,
1509 $(let mut $variant = total;)*
1512 for &Interned(t) in tcx.interners.type_.borrow().iter() {
1513 let variant = match t.sty {
1514 ty::TyBool | ty::TyChar | ty::TyInt(..) | ty::TyUint(..) |
1515 ty::TyFloat(..) | ty::TyStr | ty::TyNever => continue,
1516 ty::TyError => /* unimportant */ continue,
1517 $(ty::$variant(..) => &mut $variant,)*
1519 let region = t.flags.intersects(ty::TypeFlags::HAS_RE_INFER);
1520 let ty = t.flags.intersects(ty::TypeFlags::HAS_TY_INFER);
1524 if region { total.region_infer += 1; variant.region_infer += 1 }
1525 if ty { total.ty_infer += 1; variant.ty_infer += 1 }
1526 if region && ty { total.both_infer += 1; variant.both_infer += 1 }
1528 println!("Ty interner total ty region both");
1529 $(println!(" {:18}: {uses:6} {usespc:4.1}%, \
1530 {ty:4.1}% {region:5.1}% {both:4.1}%",
1531 stringify!($variant),
1532 uses = $variant.total,
1533 usespc = $variant.total as f64 * 100.0 / total.total as f64,
1534 ty = $variant.ty_infer as f64 * 100.0 / total.total as f64,
1535 region = $variant.region_infer as f64 * 100.0 / total.total as f64,
1536 both = $variant.both_infer as f64 * 100.0 / total.total as f64);
1538 println!(" total {uses:6} \
1539 {ty:4.1}% {region:5.1}% {both:4.1}%",
1541 ty = total.ty_infer as f64 * 100.0 / total.total as f64,
1542 region = total.region_infer as f64 * 100.0 / total.total as f64,
1543 both = total.both_infer as f64 * 100.0 / total.total as f64)
1551 impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
1552 pub fn print_debug_stats(self) {
1555 TyAdt, TyArray, TySlice, TyRawPtr, TyRef, TyFnDef, TyFnPtr, TyGenerator, TyForeign,
1556 TyDynamic, TyClosure, TyTuple, TyParam, TyInfer, TyProjection, TyAnon);
1558 println!("Substs interner: #{}", self.interners.substs.borrow().len());
1559 println!("Region interner: #{}", self.interners.region.borrow().len());
1560 println!("Stability interner: #{}", self.stability_interner.borrow().len());
1561 println!("Layout interner: #{}", self.layout_interner.borrow().len());
1566 /// An entry in an interner.
1567 struct Interned<'tcx, T: 'tcx+?Sized>(&'tcx T);
1569 // NB: An Interned<Ty> compares and hashes as a sty.
1570 impl<'tcx> PartialEq for Interned<'tcx, TyS<'tcx>> {
1571 fn eq(&self, other: &Interned<'tcx, TyS<'tcx>>) -> bool {
1572 self.0.sty == other.0.sty
1576 impl<'tcx> Eq for Interned<'tcx, TyS<'tcx>> {}
1578 impl<'tcx> Hash for Interned<'tcx, TyS<'tcx>> {
1579 fn hash<H: Hasher>(&self, s: &mut H) {
1584 impl<'tcx: 'lcx, 'lcx> Borrow<TypeVariants<'lcx>> for Interned<'tcx, TyS<'tcx>> {
1585 fn borrow<'a>(&'a self) -> &'a TypeVariants<'lcx> {
1590 // NB: An Interned<Slice<T>> compares and hashes as its elements.
1591 impl<'tcx, T: PartialEq> PartialEq for Interned<'tcx, Slice<T>> {
1592 fn eq(&self, other: &Interned<'tcx, Slice<T>>) -> bool {
1593 self.0[..] == other.0[..]
1597 impl<'tcx, T: Eq> Eq for Interned<'tcx, Slice<T>> {}
1599 impl<'tcx, T: Hash> Hash for Interned<'tcx, Slice<T>> {
1600 fn hash<H: Hasher>(&self, s: &mut H) {
1605 impl<'tcx: 'lcx, 'lcx> Borrow<[Ty<'lcx>]> for Interned<'tcx, Slice<Ty<'tcx>>> {
1606 fn borrow<'a>(&'a self) -> &'a [Ty<'lcx>] {
1611 impl<'tcx: 'lcx, 'lcx> Borrow<[Kind<'lcx>]> for Interned<'tcx, Substs<'tcx>> {
1612 fn borrow<'a>(&'a self) -> &'a [Kind<'lcx>] {
1617 impl<'tcx> Borrow<RegionKind> for Interned<'tcx, RegionKind> {
1618 fn borrow<'a>(&'a self) -> &'a RegionKind {
1623 impl<'tcx: 'lcx, 'lcx> Borrow<[ExistentialPredicate<'lcx>]>
1624 for Interned<'tcx, Slice<ExistentialPredicate<'tcx>>> {
1625 fn borrow<'a>(&'a self) -> &'a [ExistentialPredicate<'lcx>] {
1630 impl<'tcx: 'lcx, 'lcx> Borrow<[Predicate<'lcx>]>
1631 for Interned<'tcx, Slice<Predicate<'tcx>>> {
1632 fn borrow<'a>(&'a self) -> &'a [Predicate<'lcx>] {
1637 impl<'tcx: 'lcx, 'lcx> Borrow<Const<'lcx>> for Interned<'tcx, Const<'tcx>> {
1638 fn borrow<'a>(&'a self) -> &'a Const<'lcx> {
1643 macro_rules! intern_method {
1644 ($lt_tcx:tt, $name:ident: $method:ident($alloc:ty,
1645 $alloc_method:ident,
1648 $needs_infer:expr) -> $ty:ty) => {
1649 impl<'a, 'gcx, $lt_tcx> TyCtxt<'a, 'gcx, $lt_tcx> {
1650 pub fn $method(self, v: $alloc) -> &$lt_tcx $ty {
1652 let key = ($alloc_to_key)(&v);
1653 if let Some(i) = self.interners.$name.borrow().get(key) {
1656 if !self.is_global() {
1657 if let Some(i) = self.global_interners.$name.borrow().get(key) {
1663 // HACK(eddyb) Depend on flags being accurate to
1664 // determine that all contents are in the global tcx.
1665 // See comments on Lift for why we can't use that.
1666 if !($needs_infer)(&v) {
1667 if !self.is_global() {
1671 let i = ($alloc_to_ret)(self.global_interners.arena.$alloc_method(v));
1672 self.global_interners.$name.borrow_mut().insert(Interned(i));
1676 // Make sure we don't end up with inference
1677 // types/regions in the global tcx.
1678 if self.is_global() {
1679 bug!("Attempted to intern `{:?}` which contains \
1680 inference types/regions in the global type context",
1685 let i = ($alloc_to_ret)(self.interners.arena.$alloc_method(v));
1686 self.interners.$name.borrow_mut().insert(Interned(i));
1693 macro_rules! direct_interners {
1694 ($lt_tcx:tt, $($name:ident: $method:ident($needs_infer:expr) -> $ty:ty),+) => {
1695 $(impl<$lt_tcx> PartialEq for Interned<$lt_tcx, $ty> {
1696 fn eq(&self, other: &Self) -> bool {
1701 impl<$lt_tcx> Eq for Interned<$lt_tcx, $ty> {}
1703 impl<$lt_tcx> Hash for Interned<$lt_tcx, $ty> {
1704 fn hash<H: Hasher>(&self, s: &mut H) {
1709 intern_method!($lt_tcx, $name: $method($ty, alloc, |x| x, |x| x, $needs_infer) -> $ty);)+
1713 pub fn keep_local<'tcx, T: ty::TypeFoldable<'tcx>>(x: &T) -> bool {
1714 x.has_type_flags(ty::TypeFlags::KEEP_IN_LOCAL_TCX)
1717 direct_interners!('tcx,
1718 region: mk_region(|r| {
1720 &ty::ReVar(_) | &ty::ReSkolemized(..) => true,
1724 const_: mk_const(|c: &Const| keep_local(&c.ty) || keep_local(&c.val)) -> Const<'tcx>
1727 macro_rules! slice_interners {
1728 ($($field:ident: $method:ident($ty:ident)),+) => (
1729 $(intern_method!('tcx, $field: $method(&[$ty<'tcx>], alloc_slice, Deref::deref,
1730 |xs: &[$ty]| -> &Slice<$ty> {
1731 unsafe { mem::transmute(xs) }
1732 }, |xs: &[$ty]| xs.iter().any(keep_local)) -> Slice<$ty<'tcx>>);)+
1737 existential_predicates: _intern_existential_predicates(ExistentialPredicate),
1738 predicates: _intern_predicates(Predicate),
1739 type_list: _intern_type_list(Ty),
1740 substs: _intern_substs(Kind)
1743 impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
1744 /// Create an unsafe fn ty based on a safe fn ty.
1745 pub fn safe_to_unsafe_fn_ty(self, sig: PolyFnSig<'tcx>) -> Ty<'tcx> {
1746 assert_eq!(sig.unsafety(), hir::Unsafety::Normal);
1747 self.mk_fn_ptr(sig.map_bound(|sig| ty::FnSig {
1748 unsafety: hir::Unsafety::Unsafe,
1753 // Interns a type/name combination, stores the resulting box in cx.interners,
1754 // and returns the box as cast to an unsafe ptr (see comments for Ty above).
1755 pub fn mk_ty(self, st: TypeVariants<'tcx>) -> Ty<'tcx> {
1756 let global_interners = if !self.is_global() {
1757 Some(&self.global_interners)
1761 self.interners.intern_ty(st, global_interners)
1764 pub fn mk_mach_int(self, tm: ast::IntTy) -> Ty<'tcx> {
1766 ast::IntTy::Is => self.types.isize,
1767 ast::IntTy::I8 => self.types.i8,
1768 ast::IntTy::I16 => self.types.i16,
1769 ast::IntTy::I32 => self.types.i32,
1770 ast::IntTy::I64 => self.types.i64,
1771 ast::IntTy::I128 => self.types.i128,
1775 pub fn mk_mach_uint(self, tm: ast::UintTy) -> Ty<'tcx> {
1777 ast::UintTy::Us => self.types.usize,
1778 ast::UintTy::U8 => self.types.u8,
1779 ast::UintTy::U16 => self.types.u16,
1780 ast::UintTy::U32 => self.types.u32,
1781 ast::UintTy::U64 => self.types.u64,
1782 ast::UintTy::U128 => self.types.u128,
1786 pub fn mk_mach_float(self, tm: ast::FloatTy) -> Ty<'tcx> {
1788 ast::FloatTy::F32 => self.types.f32,
1789 ast::FloatTy::F64 => self.types.f64,
1793 pub fn mk_str(self) -> Ty<'tcx> {
1797 pub fn mk_static_str(self) -> Ty<'tcx> {
1798 self.mk_imm_ref(self.types.re_static, self.mk_str())
1801 pub fn mk_adt(self, def: &'tcx AdtDef, substs: &'tcx Substs<'tcx>) -> Ty<'tcx> {
1802 // take a copy of substs so that we own the vectors inside
1803 self.mk_ty(TyAdt(def, substs))
1806 pub fn mk_foreign(self, def_id: DefId) -> Ty<'tcx> {
1807 self.mk_ty(TyForeign(def_id))
1810 pub fn mk_box(self, ty: Ty<'tcx>) -> Ty<'tcx> {
1811 let def_id = self.require_lang_item(lang_items::OwnedBoxLangItem);
1812 let adt_def = self.adt_def(def_id);
1813 let substs = self.mk_substs(iter::once(Kind::from(ty)));
1814 self.mk_ty(TyAdt(adt_def, substs))
1817 pub fn mk_ptr(self, tm: TypeAndMut<'tcx>) -> Ty<'tcx> {
1818 self.mk_ty(TyRawPtr(tm))
1821 pub fn mk_ref(self, r: Region<'tcx>, tm: TypeAndMut<'tcx>) -> Ty<'tcx> {
1822 self.mk_ty(TyRef(r, tm))
1825 pub fn mk_mut_ref(self, r: Region<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> {
1826 self.mk_ref(r, TypeAndMut {ty: ty, mutbl: hir::MutMutable})
1829 pub fn mk_imm_ref(self, r: Region<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> {
1830 self.mk_ref(r, TypeAndMut {ty: ty, mutbl: hir::MutImmutable})
1833 pub fn mk_mut_ptr(self, ty: Ty<'tcx>) -> Ty<'tcx> {
1834 self.mk_ptr(TypeAndMut {ty: ty, mutbl: hir::MutMutable})
1837 pub fn mk_imm_ptr(self, ty: Ty<'tcx>) -> Ty<'tcx> {
1838 self.mk_ptr(TypeAndMut {ty: ty, mutbl: hir::MutImmutable})
1841 pub fn mk_nil_ptr(self) -> Ty<'tcx> {
1842 self.mk_imm_ptr(self.mk_nil())
1845 pub fn mk_array(self, ty: Ty<'tcx>, n: u64) -> Ty<'tcx> {
1846 let n = ConstUsize::new(n, self.sess.target.usize_ty).unwrap();
1847 self.mk_array_const_usize(ty, n)
1850 pub fn mk_array_const_usize(self, ty: Ty<'tcx>, n: ConstUsize) -> Ty<'tcx> {
1851 self.mk_ty(TyArray(ty, self.mk_const(ty::Const {
1852 val: ConstVal::Integral(ConstInt::Usize(n)),
1853 ty: self.types.usize
1857 pub fn mk_slice(self, ty: Ty<'tcx>) -> Ty<'tcx> {
1858 self.mk_ty(TySlice(ty))
1861 pub fn intern_tup(self, ts: &[Ty<'tcx>], defaulted: bool) -> Ty<'tcx> {
1862 self.mk_ty(TyTuple(self.intern_type_list(ts), defaulted))
1865 pub fn mk_tup<I: InternAs<[Ty<'tcx>], Ty<'tcx>>>(self, iter: I,
1866 defaulted: bool) -> I::Output {
1867 iter.intern_with(|ts| self.mk_ty(TyTuple(self.intern_type_list(ts), defaulted)))
1870 pub fn mk_nil(self) -> Ty<'tcx> {
1871 self.intern_tup(&[], false)
1874 pub fn mk_diverging_default(self) -> Ty<'tcx> {
1875 if self.sess.features.borrow().never_type {
1878 self.intern_tup(&[], true)
1882 pub fn mk_bool(self) -> Ty<'tcx> {
1886 pub fn mk_fn_def(self, def_id: DefId,
1887 substs: &'tcx Substs<'tcx>) -> Ty<'tcx> {
1888 self.mk_ty(TyFnDef(def_id, substs))
1891 pub fn mk_fn_ptr(self, fty: PolyFnSig<'tcx>) -> Ty<'tcx> {
1892 self.mk_ty(TyFnPtr(fty))
1897 obj: ty::Binder<&'tcx Slice<ExistentialPredicate<'tcx>>>,
1898 reg: ty::Region<'tcx>
1900 self.mk_ty(TyDynamic(obj, reg))
1903 pub fn mk_projection(self,
1905 substs: &'tcx Substs<'tcx>)
1907 self.mk_ty(TyProjection(ProjectionTy {
1913 pub fn mk_closure(self,
1915 substs: ClosureSubsts<'tcx>)
1917 self.mk_closure_from_closure_substs(closure_id, substs)
1920 pub fn mk_closure_from_closure_substs(self,
1922 closure_substs: ClosureSubsts<'tcx>)
1924 self.mk_ty(TyClosure(closure_id, closure_substs))
1927 pub fn mk_generator(self,
1929 closure_substs: ClosureSubsts<'tcx>,
1930 interior: GeneratorInterior<'tcx>)
1932 self.mk_ty(TyGenerator(id, closure_substs, interior))
1935 pub fn mk_var(self, v: TyVid) -> Ty<'tcx> {
1936 self.mk_infer(TyVar(v))
1939 pub fn mk_int_var(self, v: IntVid) -> Ty<'tcx> {
1940 self.mk_infer(IntVar(v))
1943 pub fn mk_float_var(self, v: FloatVid) -> Ty<'tcx> {
1944 self.mk_infer(FloatVar(v))
1947 pub fn mk_infer(self, it: InferTy) -> Ty<'tcx> {
1948 self.mk_ty(TyInfer(it))
1951 pub fn mk_param(self,
1953 name: Name) -> Ty<'tcx> {
1954 self.mk_ty(TyParam(ParamTy { idx: index, name: name }))
1957 pub fn mk_self_type(self) -> Ty<'tcx> {
1958 self.mk_param(0, keywords::SelfType.name())
1961 pub fn mk_param_from_def(self, def: &ty::TypeParameterDef) -> Ty<'tcx> {
1962 self.mk_param(def.index, def.name)
1965 pub fn mk_anon(self, def_id: DefId, substs: &'tcx Substs<'tcx>) -> Ty<'tcx> {
1966 self.mk_ty(TyAnon(def_id, substs))
1969 pub fn intern_existential_predicates(self, eps: &[ExistentialPredicate<'tcx>])
1970 -> &'tcx Slice<ExistentialPredicate<'tcx>> {
1971 assert!(!eps.is_empty());
1972 assert!(eps.windows(2).all(|w| w[0].cmp(self, &w[1]) != Ordering::Greater));
1973 self._intern_existential_predicates(eps)
1976 pub fn intern_predicates(self, preds: &[Predicate<'tcx>])
1977 -> &'tcx Slice<Predicate<'tcx>> {
1978 // FIXME consider asking the input slice to be sorted to avoid
1979 // re-interning permutations, in which case that would be asserted
1981 if preds.len() == 0 {
1982 // The macro-generated method below asserts we don't intern an empty slice.
1985 self._intern_predicates(preds)
1989 pub fn intern_type_list(self, ts: &[Ty<'tcx>]) -> &'tcx Slice<Ty<'tcx>> {
1993 self._intern_type_list(ts)
1997 pub fn intern_substs(self, ts: &[Kind<'tcx>]) -> &'tcx Slice<Kind<'tcx>> {
2001 self._intern_substs(ts)
2005 pub fn mk_fn_sig<I>(self,
2009 unsafety: hir::Unsafety,
2011 -> <I::Item as InternIteratorElement<Ty<'tcx>, ty::FnSig<'tcx>>>::Output
2013 I::Item: InternIteratorElement<Ty<'tcx>, ty::FnSig<'tcx>>
2015 inputs.chain(iter::once(output)).intern_with(|xs| ty::FnSig {
2016 inputs_and_output: self.intern_type_list(xs),
2017 variadic, unsafety, abi
2021 pub fn mk_existential_predicates<I: InternAs<[ExistentialPredicate<'tcx>],
2022 &'tcx Slice<ExistentialPredicate<'tcx>>>>(self, iter: I)
2024 iter.intern_with(|xs| self.intern_existential_predicates(xs))
2027 pub fn mk_predicates<I: InternAs<[Predicate<'tcx>],
2028 &'tcx Slice<Predicate<'tcx>>>>(self, iter: I)
2030 iter.intern_with(|xs| self.intern_predicates(xs))
2033 pub fn mk_type_list<I: InternAs<[Ty<'tcx>],
2034 &'tcx Slice<Ty<'tcx>>>>(self, iter: I) -> I::Output {
2035 iter.intern_with(|xs| self.intern_type_list(xs))
2038 pub fn mk_substs<I: InternAs<[Kind<'tcx>],
2039 &'tcx Slice<Kind<'tcx>>>>(self, iter: I) -> I::Output {
2040 iter.intern_with(|xs| self.intern_substs(xs))
2043 pub fn mk_substs_trait(self,
2046 -> &'tcx Substs<'tcx>
2048 self.mk_substs(iter::once(s).chain(t.into_iter().cloned()).map(Kind::from))
2051 pub fn lint_node<S: Into<MultiSpan>>(self,
2052 lint: &'static Lint,
2056 self.struct_span_lint_node(lint, id, span.into(), msg).emit()
2059 pub fn lint_node_note<S: Into<MultiSpan>>(self,
2060 lint: &'static Lint,
2065 let mut err = self.struct_span_lint_node(lint, id, span.into(), msg);
2070 pub fn lint_level_at_node(self, lint: &'static Lint, mut id: NodeId)
2071 -> (lint::Level, lint::LintSource)
2073 // Right now we insert a `with_ignore` node in the dep graph here to
2074 // ignore the fact that `lint_levels` below depends on the entire crate.
2075 // For now this'll prevent false positives of recompiling too much when
2076 // anything changes.
2078 // Once red/green incremental compilation lands we should be able to
2079 // remove this because while the crate changes often the lint level map
2080 // will change rarely.
2081 self.dep_graph.with_ignore(|| {
2082 let sets = self.lint_levels(LOCAL_CRATE);
2084 let hir_id = self.hir.definitions().node_to_hir_id(id);
2085 if let Some(pair) = sets.level_and_source(lint, hir_id) {
2088 let next = self.hir.get_parent_node(id);
2090 bug!("lint traversal reached the root of the crate");
2097 pub fn struct_span_lint_node<S: Into<MultiSpan>>(self,
2098 lint: &'static Lint,
2102 -> DiagnosticBuilder<'tcx>
2104 let (level, src) = self.lint_level_at_node(lint, id);
2105 lint::struct_lint_level(self.sess, lint, level, src, Some(span.into()), msg)
2108 pub fn struct_lint_node(self, lint: &'static Lint, id: NodeId, msg: &str)
2109 -> DiagnosticBuilder<'tcx>
2111 let (level, src) = self.lint_level_at_node(lint, id);
2112 lint::struct_lint_level(self.sess, lint, level, src, None, msg)
2115 pub fn in_scope_traits(self, id: HirId) -> Option<Rc<StableVec<TraitCandidate>>> {
2116 self.in_scope_traits_map(id.owner)
2117 .and_then(|map| map.get(&id.local_id).cloned())
2120 pub fn named_region(self, id: HirId) -> Option<resolve_lifetime::Region> {
2121 self.named_region_map(id.owner)
2122 .and_then(|map| map.get(&id.local_id).cloned())
2125 pub fn is_late_bound(self, id: HirId) -> bool {
2126 self.is_late_bound_map(id.owner)
2127 .map(|set| set.contains(&id.local_id))
2131 pub fn object_lifetime_defaults(self, id: HirId)
2132 -> Option<Rc<Vec<ObjectLifetimeDefault>>>
2134 self.object_lifetime_defaults_map(id.owner)
2135 .and_then(|map| map.get(&id.local_id).cloned())
2139 pub trait InternAs<T: ?Sized, R> {
2141 fn intern_with<F>(self, f: F) -> Self::Output
2142 where F: FnOnce(&T) -> R;
2145 impl<I, T, R, E> InternAs<[T], R> for I
2146 where E: InternIteratorElement<T, R>,
2147 I: Iterator<Item=E> {
2148 type Output = E::Output;
2149 fn intern_with<F>(self, f: F) -> Self::Output
2150 where F: FnOnce(&[T]) -> R {
2151 E::intern_with(self, f)
2155 pub trait InternIteratorElement<T, R>: Sized {
2157 fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output;
2160 impl<T, R> InternIteratorElement<T, R> for T {
2162 fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output {
2163 f(&iter.collect::<AccumulateVec<[_; 8]>>())
2167 impl<'a, T, R> InternIteratorElement<T, R> for &'a T
2171 fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output {
2172 f(&iter.cloned().collect::<AccumulateVec<[_; 8]>>())
2176 impl<T, R, E> InternIteratorElement<T, R> for Result<T, E> {
2177 type Output = Result<R, E>;
2178 fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output {
2179 Ok(f(&iter.collect::<Result<AccumulateVec<[_; 8]>, _>>()?))
2183 struct NamedRegionMap {
2184 defs: FxHashMap<DefIndex, Rc<FxHashMap<ItemLocalId, resolve_lifetime::Region>>>,
2185 late_bound: FxHashMap<DefIndex, Rc<FxHashSet<ItemLocalId>>>,
2186 object_lifetime_defaults:
2189 Rc<FxHashMap<ItemLocalId, Rc<Vec<ObjectLifetimeDefault>>>>,
2193 pub fn provide(providers: &mut ty::maps::Providers) {
2194 // FIXME(#44234) - almost all of these queries have no sub-queries and
2195 // therefore no actual inputs, they're just reading tables calculated in
2196 // resolve! Does this work? Unsure! That's what the issue is about
2197 providers.in_scope_traits_map = |tcx, id| tcx.gcx.trait_map.get(&id).cloned();
2198 providers.module_exports = |tcx, id| tcx.gcx.export_map.get(&id).cloned();
2199 providers.named_region_map = |tcx, id| tcx.gcx.named_region_map.defs.get(&id).cloned();
2200 providers.is_late_bound_map = |tcx, id| tcx.gcx.named_region_map.late_bound.get(&id).cloned();
2201 providers.object_lifetime_defaults_map = |tcx, id| {
2202 tcx.gcx.named_region_map.object_lifetime_defaults.get(&id).cloned()
2204 providers.crate_name = |tcx, id| {
2205 assert_eq!(id, LOCAL_CRATE);
2208 providers.get_lang_items = |tcx, id| {
2209 assert_eq!(id, LOCAL_CRATE);
2210 // FIXME(#42293) Right now we insert a `with_ignore` node in the dep
2211 // graph here to ignore the fact that `get_lang_items` below depends on
2212 // the entire crate. For now this'll prevent false positives of
2213 // recompiling too much when anything changes.
2215 // Once red/green incremental compilation lands we should be able to
2216 // remove this because while the crate changes often the lint level map
2217 // will change rarely.
2218 tcx.dep_graph.with_ignore(|| Rc::new(middle::lang_items::collect(tcx)))
2220 providers.freevars = |tcx, id| tcx.gcx.freevars.get(&id).cloned();
2221 providers.maybe_unused_trait_import = |tcx, id| {
2222 tcx.maybe_unused_trait_imports.contains(&id)
2224 providers.maybe_unused_extern_crates = |tcx, cnum| {
2225 assert_eq!(cnum, LOCAL_CRATE);
2226 Rc::new(tcx.maybe_unused_extern_crates.clone())
2229 providers.stability_index = |tcx, cnum| {
2230 assert_eq!(cnum, LOCAL_CRATE);
2231 Rc::new(stability::Index::new(tcx))
2233 providers.lookup_stability = |tcx, id| {
2234 assert_eq!(id.krate, LOCAL_CRATE);
2235 let id = tcx.hir.definitions().def_index_to_hir_id(id.index);
2236 tcx.stability().local_stability(id)
2238 providers.lookup_deprecation_entry = |tcx, id| {
2239 assert_eq!(id.krate, LOCAL_CRATE);
2240 let id = tcx.hir.definitions().def_index_to_hir_id(id.index);
2241 tcx.stability().local_deprecation_entry(id)
2243 providers.extern_mod_stmt_cnum = |tcx, id| {
2244 let id = tcx.hir.as_local_node_id(id).unwrap();
2245 tcx.cstore.extern_mod_stmt_cnum_untracked(id)
2247 providers.all_crate_nums = |tcx, cnum| {
2248 assert_eq!(cnum, LOCAL_CRATE);
2249 Rc::new(tcx.cstore.crates_untracked())
2251 providers.postorder_cnums = |tcx, cnum| {
2252 assert_eq!(cnum, LOCAL_CRATE);
2253 Rc::new(tcx.cstore.postorder_cnums_untracked())
2255 providers.output_filenames = |tcx, cnum| {
2256 assert_eq!(cnum, LOCAL_CRATE);
2257 tcx.output_filenames.clone()
2259 providers.has_copy_closures = |tcx, cnum| {
2260 assert_eq!(cnum, LOCAL_CRATE);
2261 tcx.sess.features.borrow().copy_closures
2263 providers.has_clone_closures = |tcx, cnum| {
2264 assert_eq!(cnum, LOCAL_CRATE);
2265 tcx.sess.features.borrow().clone_closures
2267 providers.fully_normalize_monormophic_ty = |tcx, ty| {
2268 tcx.fully_normalize_associated_types_in(&ty)