1 // Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! type context book-keeping
13 use dep_graph::DepGraph;
14 use dep_graph::{DepNode, DepConstructor};
15 use errors::DiagnosticBuilder;
17 use session::config::OutputFilenames;
19 use hir::{TraitCandidate, HirId, ItemLocalId};
20 use hir::def::{Def, Export};
21 use hir::def_id::{CrateNum, DefId, DefIndex, LOCAL_CRATE};
22 use hir::map as hir_map;
23 use hir::map::DefPathHash;
24 use lint::{self, Lint};
25 use ich::{StableHashingContext, NodeIdHashingMode};
26 use infer::outlives::free_region_map::FreeRegionMap;
27 use middle::const_val::ConstVal;
28 use middle::cstore::{CrateStore, LinkMeta};
29 use middle::cstore::EncodedMetadata;
30 use middle::lang_items;
31 use middle::resolve_lifetime::{self, ObjectLifetimeDefault};
32 use middle::stability;
33 use mir::{Mir, interpret};
34 use ty::subst::{Kind, Substs};
38 use ty::{self, Ty, TypeAndMut};
39 use ty::{TyS, TypeVariants, Slice};
40 use ty::{AdtKind, AdtDef, ClosureSubsts, GeneratorInterior, Region, Const};
41 use ty::{PolyFnSig, InferTy, ParamTy, ProjectionTy, ExistentialPredicate, Predicate};
43 use ty::{TyVar, TyVid, IntVar, IntVid, FloatVar, FloatVid};
44 use ty::TypeVariants::*;
45 use ty::layout::{LayoutDetails, TargetDataLayout};
49 use util::nodemap::{NodeMap, NodeSet, DefIdSet, ItemLocalMap};
50 use util::nodemap::{FxHashMap, FxHashSet};
51 use rustc_data_structures::accumulate_vec::AccumulateVec;
52 use rustc_data_structures::stable_hasher::{HashStable, hash_stable_hashmap,
53 StableHasher, StableHasherResult,
55 use arena::{TypedArena, DroplessArena};
56 use rustc_const_math::{ConstInt, ConstUsize};
57 use rustc_data_structures::indexed_vec::IndexVec;
58 use rustc_data_structures::sync::Lrc;
60 use std::borrow::Borrow;
61 use std::cell::{Cell, RefCell};
62 use std::cmp::Ordering;
63 use std::collections::hash_map::{self, Entry};
64 use std::hash::{Hash, Hasher};
71 use syntax::ast::{self, Name, NodeId};
73 use syntax::codemap::MultiSpan;
74 use syntax::symbol::{Symbol, keywords};
79 pub struct AllArenas<'tcx> {
80 pub global: GlobalArenas<'tcx>,
81 pub interner: DroplessArena,
84 impl<'tcx> AllArenas<'tcx> {
85 pub fn new() -> Self {
87 global: GlobalArenas::new(),
88 interner: DroplessArena::new(),
94 pub struct GlobalArenas<'tcx> {
96 layout: TypedArena<LayoutDetails>,
99 generics: TypedArena<ty::Generics>,
100 trait_def: TypedArena<ty::TraitDef>,
101 adt_def: TypedArena<ty::AdtDef>,
102 steal_mir: TypedArena<Steal<Mir<'tcx>>>,
103 mir: TypedArena<Mir<'tcx>>,
104 tables: TypedArena<ty::TypeckTables<'tcx>>,
106 const_allocs: TypedArena<interpret::Allocation>,
109 impl<'tcx> GlobalArenas<'tcx> {
110 pub fn new() -> GlobalArenas<'tcx> {
112 layout: TypedArena::new(),
113 generics: TypedArena::new(),
114 trait_def: TypedArena::new(),
115 adt_def: TypedArena::new(),
116 steal_mir: TypedArena::new(),
117 mir: TypedArena::new(),
118 tables: TypedArena::new(),
119 const_allocs: TypedArena::new(),
124 pub struct CtxtInterners<'tcx> {
125 /// The arena that types, regions, etc are allocated from
126 arena: &'tcx DroplessArena,
128 /// Specifically use a speedy hash algorithm for these hash sets,
129 /// they're accessed quite often.
130 type_: RefCell<FxHashSet<Interned<'tcx, TyS<'tcx>>>>,
131 type_list: RefCell<FxHashSet<Interned<'tcx, Slice<Ty<'tcx>>>>>,
132 substs: RefCell<FxHashSet<Interned<'tcx, Substs<'tcx>>>>,
133 region: RefCell<FxHashSet<Interned<'tcx, RegionKind>>>,
134 existential_predicates: RefCell<FxHashSet<Interned<'tcx, Slice<ExistentialPredicate<'tcx>>>>>,
135 predicates: RefCell<FxHashSet<Interned<'tcx, Slice<Predicate<'tcx>>>>>,
136 const_: RefCell<FxHashSet<Interned<'tcx, Const<'tcx>>>>,
139 impl<'gcx: 'tcx, 'tcx> CtxtInterners<'tcx> {
140 fn new(arena: &'tcx DroplessArena) -> CtxtInterners<'tcx> {
143 type_: RefCell::new(FxHashSet()),
144 type_list: RefCell::new(FxHashSet()),
145 substs: RefCell::new(FxHashSet()),
146 region: RefCell::new(FxHashSet()),
147 existential_predicates: RefCell::new(FxHashSet()),
148 predicates: RefCell::new(FxHashSet()),
149 const_: RefCell::new(FxHashSet()),
153 /// Intern a type. global_interners is Some only if this is
154 /// a local interner and global_interners is its counterpart.
155 fn intern_ty(&self, st: TypeVariants<'tcx>,
156 global_interners: Option<&CtxtInterners<'gcx>>)
159 let mut interner = self.type_.borrow_mut();
160 let global_interner = global_interners.map(|interners| {
161 interners.type_.borrow_mut()
163 if let Some(&Interned(ty)) = interner.get(&st) {
166 if let Some(ref interner) = global_interner {
167 if let Some(&Interned(ty)) = interner.get(&st) {
172 let flags = super::flags::FlagComputation::for_sty(&st);
173 let ty_struct = TyS {
176 region_depth: flags.depth,
179 // HACK(eddyb) Depend on flags being accurate to
180 // determine that all contents are in the global tcx.
181 // See comments on Lift for why we can't use that.
182 if !flags.flags.intersects(ty::TypeFlags::KEEP_IN_LOCAL_TCX) {
183 if let Some(interner) = global_interners {
184 let ty_struct: TyS<'gcx> = unsafe {
185 mem::transmute(ty_struct)
187 let ty: Ty<'gcx> = interner.arena.alloc(ty_struct);
188 global_interner.unwrap().insert(Interned(ty));
192 // Make sure we don't end up with inference
193 // types/regions in the global tcx.
194 if global_interners.is_none() {
196 bug!("Attempted to intern `{:?}` which contains \
197 inference types/regions in the global type context",
202 // Don't be &mut TyS.
203 let ty: Ty<'tcx> = self.arena.alloc(ty_struct);
204 interner.insert(Interned(ty));
208 debug!("Interned type: {:?} Pointer: {:?}",
209 ty, ty as *const TyS);
215 pub struct CommonTypes<'tcx> {
235 pub re_empty: Region<'tcx>,
236 pub re_static: Region<'tcx>,
237 pub re_erased: Region<'tcx>,
240 pub struct LocalTableInContext<'a, V: 'a> {
241 local_id_root: Option<DefId>,
242 data: &'a ItemLocalMap<V>
245 /// Validate that the given HirId (respectively its `local_id` part) can be
246 /// safely used as a key in the tables of a TypeckTable. For that to be
247 /// the case, the HirId must have the same `owner` as all the other IDs in
248 /// this table (signified by `local_id_root`). Otherwise the HirId
249 /// would be in a different frame of reference and using its `local_id`
250 /// would result in lookup errors, or worse, in silently wrong data being
252 fn validate_hir_id_for_typeck_tables(local_id_root: Option<DefId>,
255 if cfg!(debug_assertions) {
256 if let Some(local_id_root) = local_id_root {
257 if hir_id.owner != local_id_root.index {
258 ty::tls::with(|tcx| {
259 let node_id = tcx.hir
261 .find_node_for_hir_id(hir_id);
263 bug!("node {} with HirId::owner {:?} cannot be placed in \
264 TypeckTables with local_id_root {:?}",
265 tcx.hir.node_to_string(node_id),
266 DefId::local(hir_id.owner),
271 // We use "Null Object" TypeckTables in some of the analysis passes.
272 // These are just expected to be empty and their `local_id_root` is
273 // `None`. Therefore we cannot verify whether a given `HirId` would
274 // be a valid key for the given table. Instead we make sure that
275 // nobody tries to write to such a Null Object table.
277 bug!("access to invalid TypeckTables")
283 impl<'a, V> LocalTableInContext<'a, V> {
284 pub fn contains_key(&self, id: hir::HirId) -> bool {
285 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
286 self.data.contains_key(&id.local_id)
289 pub fn get(&self, id: hir::HirId) -> Option<&V> {
290 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
291 self.data.get(&id.local_id)
294 pub fn iter(&self) -> hash_map::Iter<hir::ItemLocalId, V> {
299 impl<'a, V> ::std::ops::Index<hir::HirId> for LocalTableInContext<'a, V> {
302 fn index(&self, key: hir::HirId) -> &V {
303 self.get(key).expect("LocalTableInContext: key not found")
307 pub struct LocalTableInContextMut<'a, V: 'a> {
308 local_id_root: Option<DefId>,
309 data: &'a mut ItemLocalMap<V>
312 impl<'a, V> LocalTableInContextMut<'a, V> {
313 pub fn get_mut(&mut self, id: hir::HirId) -> Option<&mut V> {
314 validate_hir_id_for_typeck_tables(self.local_id_root, id, true);
315 self.data.get_mut(&id.local_id)
318 pub fn entry(&mut self, id: hir::HirId) -> Entry<hir::ItemLocalId, V> {
319 validate_hir_id_for_typeck_tables(self.local_id_root, id, true);
320 self.data.entry(id.local_id)
323 pub fn insert(&mut self, id: hir::HirId, val: V) -> Option<V> {
324 validate_hir_id_for_typeck_tables(self.local_id_root, id, true);
325 self.data.insert(id.local_id, val)
328 pub fn remove(&mut self, id: hir::HirId) -> Option<V> {
329 validate_hir_id_for_typeck_tables(self.local_id_root, id, true);
330 self.data.remove(&id.local_id)
334 #[derive(RustcEncodable, RustcDecodable, Debug)]
335 pub struct TypeckTables<'tcx> {
336 /// The HirId::owner all ItemLocalIds in this table are relative to.
337 pub local_id_root: Option<DefId>,
339 /// Resolved definitions for `<T>::X` associated paths and
340 /// method calls, including those of overloaded operators.
341 type_dependent_defs: ItemLocalMap<Def>,
343 /// Stores the types for various nodes in the AST. Note that this table
344 /// is not guaranteed to be populated until after typeck. See
345 /// typeck::check::fn_ctxt for details.
346 node_types: ItemLocalMap<Ty<'tcx>>,
348 /// Stores the type parameters which were substituted to obtain the type
349 /// of this node. This only applies to nodes that refer to entities
350 /// parameterized by type parameters, such as generic fns, types, or
352 node_substs: ItemLocalMap<&'tcx Substs<'tcx>>,
354 adjustments: ItemLocalMap<Vec<ty::adjustment::Adjustment<'tcx>>>,
356 /// Stores the actual binding mode for all instances of hir::BindingAnnotation.
357 pat_binding_modes: ItemLocalMap<BindingMode>,
359 /// Stores the types which were implicitly dereferenced in pattern binding modes
360 /// for later usage in HAIR lowering. For example,
363 /// match &&Some(5i32) {
368 /// leads to a `vec![&&Option<i32>, &Option<i32>]`. Empty vectors are not stored.
371 /// https://github.com/rust-lang/rfcs/blob/master/text/2005-match-ergonomics.md#definitions
372 pat_adjustments: ItemLocalMap<Vec<Ty<'tcx>>>,
375 pub upvar_capture_map: ty::UpvarCaptureMap<'tcx>,
377 /// Records the reasons that we picked the kind of each closure;
378 /// not all closures are present in the map.
379 closure_kind_origins: ItemLocalMap<(Span, ast::Name)>,
381 /// For each fn, records the "liberated" types of its arguments
382 /// and return type. Liberated means that all bound regions
383 /// (including late-bound regions) are replaced with free
384 /// equivalents. This table is not used in trans (since regions
385 /// are erased there) and hence is not serialized to metadata.
386 liberated_fn_sigs: ItemLocalMap<ty::FnSig<'tcx>>,
388 /// For each FRU expression, record the normalized types of the fields
389 /// of the struct - this is needed because it is non-trivial to
390 /// normalize while preserving regions. This table is used only in
391 /// MIR construction and hence is not serialized to metadata.
392 fru_field_types: ItemLocalMap<Vec<Ty<'tcx>>>,
394 /// Maps a cast expression to its kind. This is keyed on the
395 /// *from* expression of the cast, not the cast itself.
396 cast_kinds: ItemLocalMap<ty::cast::CastKind>,
398 /// Set of trait imports actually used in the method resolution.
399 /// This is used for warning unused imports. During type
400 /// checking, this `Lrc` should not be cloned: it must have a ref-count
401 /// of 1 so that we can insert things into the set mutably.
402 pub used_trait_imports: Lrc<DefIdSet>,
404 /// If any errors occurred while type-checking this body,
405 /// this field will be set to `true`.
406 pub tainted_by_errors: bool,
408 /// Stores the free-region relationships that were deduced from
409 /// its where clauses and parameter types. These are then
410 /// read-again by borrowck.
411 pub free_region_map: FreeRegionMap<'tcx>,
414 impl<'tcx> TypeckTables<'tcx> {
415 pub fn empty(local_id_root: Option<DefId>) -> TypeckTables<'tcx> {
418 type_dependent_defs: ItemLocalMap(),
419 node_types: ItemLocalMap(),
420 node_substs: ItemLocalMap(),
421 adjustments: ItemLocalMap(),
422 pat_binding_modes: ItemLocalMap(),
423 pat_adjustments: ItemLocalMap(),
424 upvar_capture_map: FxHashMap(),
425 closure_kind_origins: ItemLocalMap(),
426 liberated_fn_sigs: ItemLocalMap(),
427 fru_field_types: ItemLocalMap(),
428 cast_kinds: ItemLocalMap(),
429 used_trait_imports: Lrc::new(DefIdSet()),
430 tainted_by_errors: false,
431 free_region_map: FreeRegionMap::new(),
435 /// Returns the final resolution of a `QPath` in an `Expr` or `Pat` node.
436 pub fn qpath_def(&self, qpath: &hir::QPath, id: hir::HirId) -> Def {
438 hir::QPath::Resolved(_, ref path) => path.def,
439 hir::QPath::TypeRelative(..) => {
440 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
441 self.type_dependent_defs.get(&id.local_id).cloned().unwrap_or(Def::Err)
446 pub fn type_dependent_defs(&self) -> LocalTableInContext<Def> {
447 LocalTableInContext {
448 local_id_root: self.local_id_root,
449 data: &self.type_dependent_defs
453 pub fn type_dependent_defs_mut(&mut self) -> LocalTableInContextMut<Def> {
454 LocalTableInContextMut {
455 local_id_root: self.local_id_root,
456 data: &mut self.type_dependent_defs
460 pub fn node_types(&self) -> LocalTableInContext<Ty<'tcx>> {
461 LocalTableInContext {
462 local_id_root: self.local_id_root,
463 data: &self.node_types
467 pub fn node_types_mut(&mut self) -> LocalTableInContextMut<Ty<'tcx>> {
468 LocalTableInContextMut {
469 local_id_root: self.local_id_root,
470 data: &mut self.node_types
474 pub fn node_id_to_type(&self, id: hir::HirId) -> Ty<'tcx> {
475 match self.node_id_to_type_opt(id) {
478 bug!("node_id_to_type: no type for node `{}`",
480 let id = tcx.hir.definitions().find_node_for_hir_id(id);
481 tcx.hir.node_to_string(id)
487 pub fn node_id_to_type_opt(&self, id: hir::HirId) -> Option<Ty<'tcx>> {
488 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
489 self.node_types.get(&id.local_id).cloned()
492 pub fn node_substs_mut(&mut self) -> LocalTableInContextMut<&'tcx Substs<'tcx>> {
493 LocalTableInContextMut {
494 local_id_root: self.local_id_root,
495 data: &mut self.node_substs
499 pub fn node_substs(&self, id: hir::HirId) -> &'tcx Substs<'tcx> {
500 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
501 self.node_substs.get(&id.local_id).cloned().unwrap_or(Substs::empty())
504 pub fn node_substs_opt(&self, id: hir::HirId) -> Option<&'tcx Substs<'tcx>> {
505 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
506 self.node_substs.get(&id.local_id).cloned()
509 // Returns the type of a pattern as a monotype. Like @expr_ty, this function
510 // doesn't provide type parameter substitutions.
511 pub fn pat_ty(&self, pat: &hir::Pat) -> Ty<'tcx> {
512 self.node_id_to_type(pat.hir_id)
515 pub fn pat_ty_opt(&self, pat: &hir::Pat) -> Option<Ty<'tcx>> {
516 self.node_id_to_type_opt(pat.hir_id)
519 // Returns the type of an expression as a monotype.
521 // NB (1): This is the PRE-ADJUSTMENT TYPE for the expression. That is, in
522 // some cases, we insert `Adjustment` annotations such as auto-deref or
523 // auto-ref. The type returned by this function does not consider such
524 // adjustments. See `expr_ty_adjusted()` instead.
526 // NB (2): This type doesn't provide type parameter substitutions; e.g. if you
527 // ask for the type of "id" in "id(3)", it will return "fn(&isize) -> isize"
528 // instead of "fn(ty) -> T with T = isize".
529 pub fn expr_ty(&self, expr: &hir::Expr) -> Ty<'tcx> {
530 self.node_id_to_type(expr.hir_id)
533 pub fn expr_ty_opt(&self, expr: &hir::Expr) -> Option<Ty<'tcx>> {
534 self.node_id_to_type_opt(expr.hir_id)
537 pub fn adjustments(&self) -> LocalTableInContext<Vec<ty::adjustment::Adjustment<'tcx>>> {
538 LocalTableInContext {
539 local_id_root: self.local_id_root,
540 data: &self.adjustments
544 pub fn adjustments_mut(&mut self)
545 -> LocalTableInContextMut<Vec<ty::adjustment::Adjustment<'tcx>>> {
546 LocalTableInContextMut {
547 local_id_root: self.local_id_root,
548 data: &mut self.adjustments
552 pub fn expr_adjustments(&self, expr: &hir::Expr)
553 -> &[ty::adjustment::Adjustment<'tcx>] {
554 validate_hir_id_for_typeck_tables(self.local_id_root, expr.hir_id, false);
555 self.adjustments.get(&expr.hir_id.local_id).map_or(&[], |a| &a[..])
558 /// Returns the type of `expr`, considering any `Adjustment`
559 /// entry recorded for that expression.
560 pub fn expr_ty_adjusted(&self, expr: &hir::Expr) -> Ty<'tcx> {
561 self.expr_adjustments(expr)
563 .map_or_else(|| self.expr_ty(expr), |adj| adj.target)
566 pub fn expr_ty_adjusted_opt(&self, expr: &hir::Expr) -> Option<Ty<'tcx>> {
567 self.expr_adjustments(expr)
569 .map(|adj| adj.target)
570 .or_else(|| self.expr_ty_opt(expr))
573 pub fn is_method_call(&self, expr: &hir::Expr) -> bool {
574 // Only paths and method calls/overloaded operators have
575 // entries in type_dependent_defs, ignore the former here.
576 if let hir::ExprPath(_) = expr.node {
580 match self.type_dependent_defs().get(expr.hir_id) {
581 Some(&Def::Method(_)) => true,
586 pub fn pat_binding_modes(&self) -> LocalTableInContext<BindingMode> {
587 LocalTableInContext {
588 local_id_root: self.local_id_root,
589 data: &self.pat_binding_modes
593 pub fn pat_binding_modes_mut(&mut self)
594 -> LocalTableInContextMut<BindingMode> {
595 LocalTableInContextMut {
596 local_id_root: self.local_id_root,
597 data: &mut self.pat_binding_modes
601 pub fn pat_adjustments(&self) -> LocalTableInContext<Vec<Ty<'tcx>>> {
602 LocalTableInContext {
603 local_id_root: self.local_id_root,
604 data: &self.pat_adjustments,
608 pub fn pat_adjustments_mut(&mut self)
609 -> LocalTableInContextMut<Vec<Ty<'tcx>>> {
610 LocalTableInContextMut {
611 local_id_root: self.local_id_root,
612 data: &mut self.pat_adjustments,
616 pub fn upvar_capture(&self, upvar_id: ty::UpvarId) -> ty::UpvarCapture<'tcx> {
617 self.upvar_capture_map[&upvar_id]
620 pub fn closure_kind_origins(&self) -> LocalTableInContext<(Span, ast::Name)> {
621 LocalTableInContext {
622 local_id_root: self.local_id_root,
623 data: &self.closure_kind_origins
627 pub fn closure_kind_origins_mut(&mut self) -> LocalTableInContextMut<(Span, ast::Name)> {
628 LocalTableInContextMut {
629 local_id_root: self.local_id_root,
630 data: &mut self.closure_kind_origins
634 pub fn liberated_fn_sigs(&self) -> LocalTableInContext<ty::FnSig<'tcx>> {
635 LocalTableInContext {
636 local_id_root: self.local_id_root,
637 data: &self.liberated_fn_sigs
641 pub fn liberated_fn_sigs_mut(&mut self) -> LocalTableInContextMut<ty::FnSig<'tcx>> {
642 LocalTableInContextMut {
643 local_id_root: self.local_id_root,
644 data: &mut self.liberated_fn_sigs
648 pub fn fru_field_types(&self) -> LocalTableInContext<Vec<Ty<'tcx>>> {
649 LocalTableInContext {
650 local_id_root: self.local_id_root,
651 data: &self.fru_field_types
655 pub fn fru_field_types_mut(&mut self) -> LocalTableInContextMut<Vec<Ty<'tcx>>> {
656 LocalTableInContextMut {
657 local_id_root: self.local_id_root,
658 data: &mut self.fru_field_types
662 pub fn cast_kinds(&self) -> LocalTableInContext<ty::cast::CastKind> {
663 LocalTableInContext {
664 local_id_root: self.local_id_root,
665 data: &self.cast_kinds
669 pub fn cast_kinds_mut(&mut self) -> LocalTableInContextMut<ty::cast::CastKind> {
670 LocalTableInContextMut {
671 local_id_root: self.local_id_root,
672 data: &mut self.cast_kinds
677 impl<'gcx> HashStable<StableHashingContext<'gcx>> for TypeckTables<'gcx> {
678 fn hash_stable<W: StableHasherResult>(&self,
679 hcx: &mut StableHashingContext<'gcx>,
680 hasher: &mut StableHasher<W>) {
681 let ty::TypeckTables {
683 ref type_dependent_defs,
687 ref pat_binding_modes,
689 ref upvar_capture_map,
690 ref closure_kind_origins,
691 ref liberated_fn_sigs,
696 ref used_trait_imports,
701 hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| {
702 type_dependent_defs.hash_stable(hcx, hasher);
703 node_types.hash_stable(hcx, hasher);
704 node_substs.hash_stable(hcx, hasher);
705 adjustments.hash_stable(hcx, hasher);
706 pat_binding_modes.hash_stable(hcx, hasher);
707 pat_adjustments.hash_stable(hcx, hasher);
708 hash_stable_hashmap(hcx, hasher, upvar_capture_map, |up_var_id, hcx| {
715 local_id_root.expect("trying to hash invalid TypeckTables");
717 let var_owner_def_id = DefId {
718 krate: local_id_root.krate,
721 let closure_def_id = DefId {
722 krate: local_id_root.krate,
723 index: closure_expr_id.to_def_id().index,
725 (hcx.def_path_hash(var_owner_def_id),
727 hcx.def_path_hash(closure_def_id))
730 closure_kind_origins.hash_stable(hcx, hasher);
731 liberated_fn_sigs.hash_stable(hcx, hasher);
732 fru_field_types.hash_stable(hcx, hasher);
733 cast_kinds.hash_stable(hcx, hasher);
734 used_trait_imports.hash_stable(hcx, hasher);
735 tainted_by_errors.hash_stable(hcx, hasher);
736 free_region_map.hash_stable(hcx, hasher);
741 impl<'tcx> CommonTypes<'tcx> {
742 fn new(interners: &CtxtInterners<'tcx>) -> CommonTypes<'tcx> {
743 let mk = |sty| interners.intern_ty(sty, None);
744 let mk_region = |r| {
745 if let Some(r) = interners.region.borrow().get(&r) {
748 let r = interners.arena.alloc(r);
749 interners.region.borrow_mut().insert(Interned(r));
757 isize: mk(TyInt(ast::IntTy::Isize)),
758 i8: mk(TyInt(ast::IntTy::I8)),
759 i16: mk(TyInt(ast::IntTy::I16)),
760 i32: mk(TyInt(ast::IntTy::I32)),
761 i64: mk(TyInt(ast::IntTy::I64)),
762 i128: mk(TyInt(ast::IntTy::I128)),
763 usize: mk(TyUint(ast::UintTy::Usize)),
764 u8: mk(TyUint(ast::UintTy::U8)),
765 u16: mk(TyUint(ast::UintTy::U16)),
766 u32: mk(TyUint(ast::UintTy::U32)),
767 u64: mk(TyUint(ast::UintTy::U64)),
768 u128: mk(TyUint(ast::UintTy::U128)),
769 f32: mk(TyFloat(ast::FloatTy::F32)),
770 f64: mk(TyFloat(ast::FloatTy::F64)),
772 re_empty: mk_region(RegionKind::ReEmpty),
773 re_static: mk_region(RegionKind::ReStatic),
774 re_erased: mk_region(RegionKind::ReErased),
779 /// The central data structure of the compiler. It stores references
780 /// to the various **arenas** and also houses the results of the
781 /// various **compiler queries** that have been performed. See the
782 /// [rustc guide] for more details.
784 /// [rustc guide]: https://rust-lang-nursery.github.io/rustc-guide/ty.html
785 #[derive(Copy, Clone)]
786 pub struct TyCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
787 gcx: &'a GlobalCtxt<'gcx>,
788 interners: &'a CtxtInterners<'tcx>
791 impl<'a, 'gcx, 'tcx> Deref for TyCtxt<'a, 'gcx, 'tcx> {
792 type Target = &'a GlobalCtxt<'gcx>;
793 fn deref(&self) -> &Self::Target {
798 pub struct GlobalCtxt<'tcx> {
799 global_arenas: &'tcx GlobalArenas<'tcx>,
800 global_interners: CtxtInterners<'tcx>,
802 cstore: &'tcx CrateStore,
804 pub sess: &'tcx Session,
806 pub dep_graph: DepGraph,
808 /// This provides access to the incr. comp. on-disk cache for query results.
809 /// Do not access this directly. It is only meant to be used by
810 /// `DepGraph::try_mark_green()` and the query infrastructure in `ty::maps`.
811 pub(crate) on_disk_query_result_cache: maps::OnDiskCache<'tcx>,
813 /// Common types, pre-interned for your convenience.
814 pub types: CommonTypes<'tcx>,
816 /// Map indicating what traits are in scope for places where this
817 /// is relevant; generated by resolve.
818 trait_map: FxHashMap<DefIndex,
819 Lrc<FxHashMap<ItemLocalId,
820 Lrc<StableVec<TraitCandidate>>>>>,
822 /// Export map produced by name resolution.
823 export_map: FxHashMap<DefId, Lrc<Vec<Export>>>,
825 pub hir: hir_map::Map<'tcx>,
827 /// A map from DefPathHash -> DefId. Includes DefIds from the local crate
828 /// as well as all upstream crates. Only populated in incremental mode.
829 pub def_path_hash_to_def_id: Option<FxHashMap<DefPathHash, DefId>>,
831 pub maps: maps::Maps<'tcx>,
833 // Records the free variables refrenced by every closure
834 // expression. Do not track deps for this, just recompute it from
835 // scratch every time.
836 freevars: FxHashMap<DefId, Lrc<Vec<hir::Freevar>>>,
838 maybe_unused_trait_imports: FxHashSet<DefId>,
840 maybe_unused_extern_crates: Vec<(DefId, Span)>,
842 // Internal cache for metadata decoding. No need to track deps on this.
843 pub rcache: RefCell<FxHashMap<ty::CReaderCacheKey, Ty<'tcx>>>,
845 /// Caches the results of trait selection. This cache is used
846 /// for things that do not have to do with the parameters in scope.
847 pub selection_cache: traits::SelectionCache<'tcx>,
849 /// Caches the results of trait evaluation. This cache is used
850 /// for things that do not have to do with the parameters in scope.
851 /// Merge this with `selection_cache`?
852 pub evaluation_cache: traits::EvaluationCache<'tcx>,
854 /// The definite name of the current crate after taking into account
855 /// attributes, commandline parameters, etc.
856 pub crate_name: Symbol,
858 /// Data layout specification for the current target.
859 pub data_layout: TargetDataLayout,
861 /// Used to prevent layout from recursing too deeply.
862 pub layout_depth: Cell<usize>,
864 /// Map from function to the `#[derive]` mode that it's defining. Only used
865 /// by `proc-macro` crates.
866 pub derive_macros: RefCell<NodeMap<Symbol>>,
868 stability_interner: RefCell<FxHashSet<&'tcx attr::Stability>>,
870 pub interpret_interner: RefCell<InterpretInterner<'tcx>>,
872 layout_interner: RefCell<FxHashSet<&'tcx LayoutDetails>>,
874 /// A vector of every trait accessible in the whole crate
875 /// (i.e. including those from subcrates). This is used only for
876 /// error reporting, and so is lazily initialized and generally
877 /// shouldn't taint the common path (hence the RefCell).
878 pub all_traits: RefCell<Option<Vec<DefId>>>,
880 /// A general purpose channel to throw data out the back towards LLVM worker
883 /// This is intended to only get used during the trans phase of the compiler
884 /// when satisfying the query for a particular codegen unit. Internally in
885 /// the query it'll send data along this channel to get processed later.
886 pub tx_to_llvm_workers: mpsc::Sender<Box<Any + Send>>,
888 output_filenames: Arc<OutputFilenames>,
891 /// Everything needed to efficiently work with interned allocations
892 #[derive(Debug, Default)]
893 pub struct InterpretInterner<'tcx> {
894 /// Stores the value of constants (and deduplicates the actual memory)
895 allocs: FxHashSet<&'tcx interpret::Allocation>,
897 /// Allows obtaining function instance handles via a unique identifier
898 functions: FxHashMap<interpret::AllocId, Instance<'tcx>>,
900 /// Inverse map of `interpret_functions`.
901 /// Used so we don't allocate a new pointer every time we need one
902 function_cache: FxHashMap<Instance<'tcx>, interpret::AllocId>,
904 /// Allows obtaining const allocs via a unique identifier
905 alloc_by_id: FxHashMap<interpret::AllocId, &'tcx interpret::Allocation>,
907 /// The AllocId to assign to the next new regular allocation.
908 /// Always incremented, never gets smaller.
909 next_id: interpret::AllocId,
911 /// Allows checking whether a constant already has an allocation
912 alloc_cache: FxHashMap<interpret::GlobalId<'tcx>, interpret::AllocId>,
914 /// A cache for basic byte allocations keyed by their contents. This is used to deduplicate
915 /// allocations for string and bytestring literals.
916 literal_alloc_cache: FxHashMap<Vec<u8>, interpret::AllocId>,
919 impl<'tcx> InterpretInterner<'tcx> {
920 pub fn create_fn_alloc(&mut self, instance: Instance<'tcx>) -> interpret::AllocId {
921 if let Some(&alloc_id) = self.function_cache.get(&instance) {
924 let id = self.reserve();
925 debug!("creating fn ptr: {}", id);
926 self.functions.insert(id, instance);
927 self.function_cache.insert(instance, id);
933 id: interpret::AllocId,
934 ) -> Option<Instance<'tcx>> {
935 self.functions.get(&id).cloned()
940 id: interpret::AllocId,
941 ) -> Option<&'tcx interpret::Allocation> {
942 self.alloc_by_id.get(&id).cloned()
947 global_id: interpret::GlobalId<'tcx>,
948 ) -> Option<interpret::AllocId> {
949 self.alloc_cache.get(&global_id).cloned()
954 global_id: interpret::GlobalId<'tcx>,
955 ptr: interpret::AllocId,
957 if let Some(old) = self.alloc_cache.insert(global_id, ptr) {
958 bug!("tried to cache {:?}, but was already existing as {:#?}", global_id, old);
962 pub fn intern_at_reserved(
964 id: interpret::AllocId,
965 alloc: &'tcx interpret::Allocation,
967 if let Some(old) = self.alloc_by_id.insert(id, alloc) {
968 bug!("tried to intern allocation at {}, but was already existing as {:#?}", id, old);
972 /// obtains a new allocation ID that can be referenced but does not
973 /// yet have an allocation backing it.
976 ) -> interpret::AllocId {
977 let next = self.next_id;
978 self.next_id.0 = self.next_id.0
980 .expect("You overflowed a u64 by incrementing by 1... \
981 You've just earned yourself a free drink if we ever meet. \
982 Seriously, how did you do that?!");
987 impl<'tcx> GlobalCtxt<'tcx> {
988 /// Get the global TyCtxt.
989 pub fn global_tcx<'a>(&'a self) -> TyCtxt<'a, 'tcx, 'tcx> {
992 interners: &self.global_interners
997 impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
998 pub fn alloc_generics(self, generics: ty::Generics) -> &'gcx ty::Generics {
999 self.global_arenas.generics.alloc(generics)
1002 pub fn alloc_steal_mir(self, mir: Mir<'gcx>) -> &'gcx Steal<Mir<'gcx>> {
1003 self.global_arenas.steal_mir.alloc(Steal::new(mir))
1006 pub fn alloc_mir(self, mir: Mir<'gcx>) -> &'gcx Mir<'gcx> {
1007 self.global_arenas.mir.alloc(mir)
1010 pub fn alloc_tables(self, tables: ty::TypeckTables<'gcx>) -> &'gcx ty::TypeckTables<'gcx> {
1011 self.global_arenas.tables.alloc(tables)
1014 pub fn alloc_trait_def(self, def: ty::TraitDef) -> &'gcx ty::TraitDef {
1015 self.global_arenas.trait_def.alloc(def)
1018 pub fn alloc_adt_def(self,
1021 variants: Vec<ty::VariantDef>,
1023 -> &'gcx ty::AdtDef {
1024 let def = ty::AdtDef::new(self, did, kind, variants, repr);
1025 self.global_arenas.adt_def.alloc(def)
1028 pub fn alloc_byte_array(self, bytes: &[u8]) -> &'gcx [u8] {
1029 if bytes.is_empty() {
1032 self.global_interners.arena.alloc_slice(bytes)
1036 pub fn alloc_const_slice(self, values: &[&'tcx ty::Const<'tcx>])
1037 -> &'tcx [&'tcx ty::Const<'tcx>] {
1038 if values.is_empty() {
1041 self.interners.arena.alloc_slice(values)
1045 pub fn alloc_name_const_slice(self, values: &[(ast::Name, &'tcx ty::Const<'tcx>)])
1046 -> &'tcx [(ast::Name, &'tcx ty::Const<'tcx>)] {
1047 if values.is_empty() {
1050 self.interners.arena.alloc_slice(values)
1054 pub fn intern_const_alloc(
1056 alloc: interpret::Allocation,
1057 ) -> &'gcx interpret::Allocation {
1058 if let Some(alloc) = self.interpret_interner.borrow().allocs.get(&alloc) {
1062 let interned = self.global_arenas.const_allocs.alloc(alloc);
1063 if let Some(prev) = self.interpret_interner.borrow_mut().allocs.replace(interned) {
1064 bug!("Tried to overwrite interned Allocation: {:#?}", prev)
1069 /// Allocates a byte or string literal for `mir::interpret`
1070 pub fn allocate_cached(self, bytes: &[u8]) -> interpret::AllocId {
1071 // check whether we already allocated this literal or a constant with the same memory
1072 if let Some(&alloc_id) = self.interpret_interner.borrow().literal_alloc_cache.get(bytes) {
1075 // create an allocation that just contains these bytes
1076 let alloc = interpret::Allocation::from_bytes(bytes);
1077 let alloc = self.intern_const_alloc(alloc);
1079 let mut int = self.interpret_interner.borrow_mut();
1080 // the next unique id
1081 let id = int.reserve();
1082 // make the allocation identifiable
1083 int.alloc_by_id.insert(id, alloc);
1084 // cache it for the future
1085 int.literal_alloc_cache.insert(bytes.to_owned(), id);
1089 pub fn intern_stability(self, stab: attr::Stability) -> &'gcx attr::Stability {
1090 if let Some(st) = self.stability_interner.borrow().get(&stab) {
1094 let interned = self.global_interners.arena.alloc(stab);
1095 if let Some(prev) = self.stability_interner.borrow_mut().replace(interned) {
1096 bug!("Tried to overwrite interned Stability: {:?}", prev)
1101 pub fn intern_layout(self, layout: LayoutDetails) -> &'gcx LayoutDetails {
1102 if let Some(layout) = self.layout_interner.borrow().get(&layout) {
1106 let interned = self.global_arenas.layout.alloc(layout);
1107 if let Some(prev) = self.layout_interner.borrow_mut().replace(interned) {
1108 bug!("Tried to overwrite interned Layout: {:?}", prev)
1113 pub fn lift<T: ?Sized + Lift<'tcx>>(self, value: &T) -> Option<T::Lifted> {
1114 value.lift_to_tcx(self)
1117 /// Like lift, but only tries in the global tcx.
1118 pub fn lift_to_global<T: ?Sized + Lift<'gcx>>(self, value: &T) -> Option<T::Lifted> {
1119 value.lift_to_tcx(self.global_tcx())
1122 /// Returns true if self is the same as self.global_tcx().
1123 fn is_global(self) -> bool {
1124 let local = self.interners as *const _;
1125 let global = &self.global_interners as *const _;
1126 local as usize == global as usize
1129 /// Create a type context and call the closure with a `TyCtxt` reference
1130 /// to the context. The closure enforces that the type context and any interned
1131 /// value (types, substs, etc.) can only be used while `ty::tls` has a valid
1132 /// reference to the context, to allow formatting values that need it.
1133 pub fn create_and_enter<F, R>(s: &'tcx Session,
1134 cstore: &'tcx CrateStore,
1135 local_providers: ty::maps::Providers<'tcx>,
1136 extern_providers: ty::maps::Providers<'tcx>,
1137 arenas: &'tcx AllArenas<'tcx>,
1138 resolutions: ty::Resolutions,
1139 hir: hir_map::Map<'tcx>,
1140 on_disk_query_result_cache: maps::OnDiskCache<'tcx>,
1142 tx: mpsc::Sender<Box<Any + Send>>,
1143 output_filenames: &OutputFilenames,
1145 where F: for<'b> FnOnce(TyCtxt<'b, 'tcx, 'tcx>) -> R
1147 let data_layout = TargetDataLayout::parse(s);
1148 let interners = CtxtInterners::new(&arenas.interner);
1149 let common_types = CommonTypes::new(&interners);
1150 let dep_graph = hir.dep_graph.clone();
1151 let max_cnum = cstore.crates_untracked().iter().map(|c| c.as_usize()).max().unwrap_or(0);
1152 let mut providers = IndexVec::from_elem_n(extern_providers, max_cnum + 1);
1153 providers[LOCAL_CRATE] = local_providers;
1155 let def_path_hash_to_def_id = if s.opts.build_dep_graph() {
1156 let upstream_def_path_tables: Vec<(CrateNum, Lrc<_>)> = cstore
1159 .map(|&cnum| (cnum, cstore.def_path_table(cnum)))
1162 let def_path_tables = || {
1163 upstream_def_path_tables
1165 .map(|&(cnum, ref rc)| (cnum, &**rc))
1166 .chain(iter::once((LOCAL_CRATE, hir.definitions().def_path_table())))
1169 // Precompute the capacity of the hashmap so we don't have to
1170 // re-allocate when populating it.
1171 let capacity = def_path_tables().map(|(_, t)| t.size()).sum::<usize>();
1173 let mut map: FxHashMap<_, _> = FxHashMap::with_capacity_and_hasher(
1175 ::std::default::Default::default()
1178 for (cnum, def_path_table) in def_path_tables() {
1179 def_path_table.add_def_path_hashes_to(cnum, &mut map);
1187 let mut trait_map = FxHashMap();
1188 for (k, v) in resolutions.trait_map {
1189 let hir_id = hir.node_to_hir_id(k);
1190 let map = trait_map.entry(hir_id.owner)
1191 .or_insert_with(|| Lrc::new(FxHashMap()));
1192 Lrc::get_mut(map).unwrap()
1193 .insert(hir_id.local_id,
1194 Lrc::new(StableVec::new(v)));
1197 tls::enter_global(GlobalCtxt {
1200 global_arenas: &arenas.global,
1201 global_interners: interners,
1202 dep_graph: dep_graph.clone(),
1203 on_disk_query_result_cache,
1204 types: common_types,
1206 export_map: resolutions.export_map.into_iter().map(|(k, v)| {
1209 freevars: resolutions.freevars.into_iter().map(|(k, v)| {
1210 (hir.local_def_id(k), Lrc::new(v))
1212 maybe_unused_trait_imports:
1213 resolutions.maybe_unused_trait_imports
1215 .map(|id| hir.local_def_id(id))
1217 maybe_unused_extern_crates:
1218 resolutions.maybe_unused_extern_crates
1220 .map(|(id, sp)| (hir.local_def_id(id), sp))
1223 def_path_hash_to_def_id,
1224 maps: maps::Maps::new(providers),
1225 rcache: RefCell::new(FxHashMap()),
1226 selection_cache: traits::SelectionCache::new(),
1227 evaluation_cache: traits::EvaluationCache::new(),
1228 crate_name: Symbol::intern(crate_name),
1230 layout_interner: RefCell::new(FxHashSet()),
1231 layout_depth: Cell::new(0),
1232 derive_macros: RefCell::new(NodeMap()),
1233 stability_interner: RefCell::new(FxHashSet()),
1234 interpret_interner: Default::default(),
1235 all_traits: RefCell::new(None),
1236 tx_to_llvm_workers: tx,
1237 output_filenames: Arc::new(output_filenames.clone()),
1241 pub fn consider_optimizing<T: Fn() -> String>(&self, msg: T) -> bool {
1242 let cname = self.crate_name(LOCAL_CRATE).as_str();
1243 self.sess.consider_optimizing(&cname, msg)
1246 pub fn lang_items(self) -> Lrc<middle::lang_items::LanguageItems> {
1247 self.get_lang_items(LOCAL_CRATE)
1250 pub fn stability(self) -> Lrc<stability::Index<'tcx>> {
1251 self.stability_index(LOCAL_CRATE)
1254 pub fn crates(self) -> Lrc<Vec<CrateNum>> {
1255 self.all_crate_nums(LOCAL_CRATE)
1258 pub fn def_key(self, id: DefId) -> hir_map::DefKey {
1260 self.hir.def_key(id)
1262 self.cstore.def_key(id)
1266 /// Convert a `DefId` into its fully expanded `DefPath` (every
1267 /// `DefId` is really just an interned def-path).
1269 /// Note that if `id` is not local to this crate, the result will
1270 /// be a non-local `DefPath`.
1271 pub fn def_path(self, id: DefId) -> hir_map::DefPath {
1273 self.hir.def_path(id)
1275 self.cstore.def_path(id)
1280 pub fn def_path_hash(self, def_id: DefId) -> hir_map::DefPathHash {
1281 if def_id.is_local() {
1282 self.hir.definitions().def_path_hash(def_id.index)
1284 self.cstore.def_path_hash(def_id)
1288 pub fn def_path_debug_str(self, def_id: DefId) -> String {
1289 // We are explicitly not going through queries here in order to get
1290 // crate name and disambiguator since this code is called from debug!()
1291 // statements within the query system and we'd run into endless
1292 // recursion otherwise.
1293 let (crate_name, crate_disambiguator) = if def_id.is_local() {
1294 (self.crate_name.clone(),
1295 self.sess.local_crate_disambiguator())
1297 (self.cstore.crate_name_untracked(def_id.krate),
1298 self.cstore.crate_disambiguator_untracked(def_id.krate))
1303 // Don't print the whole crate disambiguator. That's just
1304 // annoying in debug output.
1305 &(crate_disambiguator.to_fingerprint().to_hex())[..4],
1306 self.def_path(def_id).to_string_no_crate())
1309 pub fn metadata_encoding_version(self) -> Vec<u8> {
1310 self.cstore.metadata_encoding_version().to_vec()
1313 // Note that this is *untracked* and should only be used within the query
1314 // system if the result is otherwise tracked through queries
1315 pub fn crate_data_as_rc_any(self, cnum: CrateNum) -> Lrc<Any> {
1316 self.cstore.crate_data_as_rc_any(cnum)
1319 pub fn create_stable_hashing_context(self) -> StableHashingContext<'gcx> {
1320 let krate = self.dep_graph.with_ignore(|| self.gcx.hir.krate());
1322 StableHashingContext::new(self.sess,
1324 self.hir.definitions(),
1328 // This method makes sure that we have a DepNode and a Fingerprint for
1329 // every upstream crate. It needs to be called once right after the tcx is
1331 // With full-fledged red/green, the method will probably become unnecessary
1332 // as this will be done on-demand.
1333 pub fn allocate_metadata_dep_nodes(self) {
1334 // We cannot use the query versions of crates() and crate_hash(), since
1335 // those would need the DepNodes that we are allocating here.
1336 for cnum in self.cstore.crates_untracked() {
1337 let dep_node = DepNode::new(self, DepConstructor::CrateMetadata(cnum));
1338 let crate_hash = self.cstore.crate_hash_untracked(cnum);
1339 self.dep_graph.with_task(dep_node,
1342 |_, x| x // No transformation needed
1347 // This method exercises the `in_scope_traits_map` query for all possible
1348 // values so that we have their fingerprints available in the DepGraph.
1349 // This is only required as long as we still use the old dependency tracking
1350 // which needs to have the fingerprints of all input nodes beforehand.
1351 pub fn precompute_in_scope_traits_hashes(self) {
1352 for &def_index in self.trait_map.keys() {
1353 self.in_scope_traits_map(def_index);
1357 pub fn serialize_query_result_cache<E>(self,
1359 -> Result<(), E::Error>
1360 where E: ty::codec::TyEncoder
1362 self.on_disk_query_result_cache.serialize(self.global_tcx(), encoder)
1367 impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
1368 pub fn encode_metadata(self, link_meta: &LinkMeta, reachable: &NodeSet)
1371 self.cstore.encode_metadata(self, link_meta, reachable)
1375 impl<'gcx: 'tcx, 'tcx> GlobalCtxt<'gcx> {
1376 /// Call the closure with a local `TyCtxt` using the given arena.
1377 pub fn enter_local<F, R>(&self, arena: &'tcx DroplessArena, f: F) -> R
1378 where F: for<'a> FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R
1380 let interners = CtxtInterners::new(arena);
1381 tls::enter(self, &interners, f)
1385 /// A trait implemented for all X<'a> types which can be safely and
1386 /// efficiently converted to X<'tcx> as long as they are part of the
1387 /// provided TyCtxt<'tcx>.
1388 /// This can be done, for example, for Ty<'tcx> or &'tcx Substs<'tcx>
1389 /// by looking them up in their respective interners.
1391 /// However, this is still not the best implementation as it does
1392 /// need to compare the components, even for interned values.
1393 /// It would be more efficient if TypedArena provided a way to
1394 /// determine whether the address is in the allocated range.
1396 /// None is returned if the value or one of the components is not part
1397 /// of the provided context.
1398 /// For Ty, None can be returned if either the type interner doesn't
1399 /// contain the TypeVariants key or if the address of the interned
1400 /// pointer differs. The latter case is possible if a primitive type,
1401 /// e.g. `()` or `u8`, was interned in a different context.
1402 pub trait Lift<'tcx> {
1404 fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option<Self::Lifted>;
1407 impl<'a, 'tcx> Lift<'tcx> for Ty<'a> {
1408 type Lifted = Ty<'tcx>;
1409 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Ty<'tcx>> {
1410 if tcx.interners.arena.in_arena(*self as *const _) {
1411 return Some(unsafe { mem::transmute(*self) });
1413 // Also try in the global tcx if we're not that.
1414 if !tcx.is_global() {
1415 self.lift_to_tcx(tcx.global_tcx())
1422 impl<'a, 'tcx> Lift<'tcx> for Region<'a> {
1423 type Lifted = Region<'tcx>;
1424 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Region<'tcx>> {
1425 if tcx.interners.arena.in_arena(*self as *const _) {
1426 return Some(unsafe { mem::transmute(*self) });
1428 // Also try in the global tcx if we're not that.
1429 if !tcx.is_global() {
1430 self.lift_to_tcx(tcx.global_tcx())
1437 impl<'a, 'tcx> Lift<'tcx> for &'a Const<'a> {
1438 type Lifted = &'tcx Const<'tcx>;
1439 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<&'tcx Const<'tcx>> {
1440 if tcx.interners.arena.in_arena(*self as *const _) {
1441 return Some(unsafe { mem::transmute(*self) });
1443 // Also try in the global tcx if we're not that.
1444 if !tcx.is_global() {
1445 self.lift_to_tcx(tcx.global_tcx())
1452 impl<'a, 'tcx> Lift<'tcx> for &'a Substs<'a> {
1453 type Lifted = &'tcx Substs<'tcx>;
1454 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<&'tcx Substs<'tcx>> {
1455 if self.len() == 0 {
1456 return Some(Slice::empty());
1458 if tcx.interners.arena.in_arena(&self[..] as *const _) {
1459 return Some(unsafe { mem::transmute(*self) });
1461 // Also try in the global tcx if we're not that.
1462 if !tcx.is_global() {
1463 self.lift_to_tcx(tcx.global_tcx())
1470 impl<'a, 'tcx> Lift<'tcx> for &'a Slice<Ty<'a>> {
1471 type Lifted = &'tcx Slice<Ty<'tcx>>;
1472 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>)
1473 -> Option<&'tcx Slice<Ty<'tcx>>> {
1474 if self.len() == 0 {
1475 return Some(Slice::empty());
1477 if tcx.interners.arena.in_arena(*self as *const _) {
1478 return Some(unsafe { mem::transmute(*self) });
1480 // Also try in the global tcx if we're not that.
1481 if !tcx.is_global() {
1482 self.lift_to_tcx(tcx.global_tcx())
1489 impl<'a, 'tcx> Lift<'tcx> for &'a Slice<ExistentialPredicate<'a>> {
1490 type Lifted = &'tcx Slice<ExistentialPredicate<'tcx>>;
1491 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>)
1492 -> Option<&'tcx Slice<ExistentialPredicate<'tcx>>> {
1493 if self.is_empty() {
1494 return Some(Slice::empty());
1496 if tcx.interners.arena.in_arena(*self as *const _) {
1497 return Some(unsafe { mem::transmute(*self) });
1499 // Also try in the global tcx if we're not that.
1500 if !tcx.is_global() {
1501 self.lift_to_tcx(tcx.global_tcx())
1508 impl<'a, 'tcx> Lift<'tcx> for &'a Slice<Predicate<'a>> {
1509 type Lifted = &'tcx Slice<Predicate<'tcx>>;
1510 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>)
1511 -> Option<&'tcx Slice<Predicate<'tcx>>> {
1512 if self.is_empty() {
1513 return Some(Slice::empty());
1515 if tcx.interners.arena.in_arena(*self as *const _) {
1516 return Some(unsafe { mem::transmute(*self) });
1518 // Also try in the global tcx if we're not that.
1519 if !tcx.is_global() {
1520 self.lift_to_tcx(tcx.global_tcx())
1528 use super::{CtxtInterners, GlobalCtxt, TyCtxt};
1530 use std::cell::Cell;
1534 /// Marker types used for the scoped TLS slot.
1535 /// The type context cannot be used directly because the scoped TLS
1536 /// in libstd doesn't allow types generic over lifetimes.
1537 enum ThreadLocalGlobalCtxt {}
1538 enum ThreadLocalInterners {}
1541 static TLS_TCX: Cell<Option<(*const ThreadLocalGlobalCtxt,
1542 *const ThreadLocalInterners)>> = Cell::new(None)
1545 fn span_debug(span: syntax_pos::Span, f: &mut fmt::Formatter) -> fmt::Result {
1547 write!(f, "{}", tcx.sess.codemap().span_to_string(span))
1551 pub fn enter_global<'gcx, F, R>(gcx: GlobalCtxt<'gcx>, f: F) -> R
1552 where F: for<'a> FnOnce(TyCtxt<'a, 'gcx, 'gcx>) -> R
1554 syntax_pos::SPAN_DEBUG.with(|span_dbg| {
1555 let original_span_debug = span_dbg.get();
1556 span_dbg.set(span_debug);
1557 let result = enter(&gcx, &gcx.global_interners, f);
1558 span_dbg.set(original_span_debug);
1563 pub fn enter<'a, 'gcx: 'tcx, 'tcx, F, R>(gcx: &'a GlobalCtxt<'gcx>,
1564 interners: &'a CtxtInterners<'tcx>,
1566 where F: FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R
1568 let gcx_ptr = gcx as *const _ as *const ThreadLocalGlobalCtxt;
1569 let interners_ptr = interners as *const _ as *const ThreadLocalInterners;
1570 TLS_TCX.with(|tls| {
1571 let prev = tls.get();
1572 tls.set(Some((gcx_ptr, interners_ptr)));
1573 let ret = f(TyCtxt {
1582 pub fn with<F, R>(f: F) -> R
1583 where F: for<'a, 'gcx, 'tcx> FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R
1585 TLS_TCX.with(|tcx| {
1586 let (gcx, interners) = tcx.get().unwrap();
1587 let gcx = unsafe { &*(gcx as *const GlobalCtxt) };
1588 let interners = unsafe { &*(interners as *const CtxtInterners) };
1596 pub fn with_opt<F, R>(f: F) -> R
1597 where F: for<'a, 'gcx, 'tcx> FnOnce(Option<TyCtxt<'a, 'gcx, 'tcx>>) -> R
1599 if TLS_TCX.with(|tcx| tcx.get().is_some()) {
1600 with(|v| f(Some(v)))
1607 macro_rules! sty_debug_print {
1608 ($ctxt: expr, $($variant: ident),*) => {{
1609 // curious inner module to allow variant names to be used as
1611 #[allow(non_snake_case)]
1613 use ty::{self, TyCtxt};
1614 use ty::context::Interned;
1616 #[derive(Copy, Clone)]
1619 region_infer: usize,
1624 pub fn go(tcx: TyCtxt) {
1625 let mut total = DebugStat {
1627 region_infer: 0, ty_infer: 0, both_infer: 0,
1629 $(let mut $variant = total;)*
1632 for &Interned(t) in tcx.interners.type_.borrow().iter() {
1633 let variant = match t.sty {
1634 ty::TyBool | ty::TyChar | ty::TyInt(..) | ty::TyUint(..) |
1635 ty::TyFloat(..) | ty::TyStr | ty::TyNever => continue,
1636 ty::TyError => /* unimportant */ continue,
1637 $(ty::$variant(..) => &mut $variant,)*
1639 let region = t.flags.intersects(ty::TypeFlags::HAS_RE_INFER);
1640 let ty = t.flags.intersects(ty::TypeFlags::HAS_TY_INFER);
1644 if region { total.region_infer += 1; variant.region_infer += 1 }
1645 if ty { total.ty_infer += 1; variant.ty_infer += 1 }
1646 if region && ty { total.both_infer += 1; variant.both_infer += 1 }
1648 println!("Ty interner total ty region both");
1649 $(println!(" {:18}: {uses:6} {usespc:4.1}%, \
1650 {ty:4.1}% {region:5.1}% {both:4.1}%",
1651 stringify!($variant),
1652 uses = $variant.total,
1653 usespc = $variant.total as f64 * 100.0 / total.total as f64,
1654 ty = $variant.ty_infer as f64 * 100.0 / total.total as f64,
1655 region = $variant.region_infer as f64 * 100.0 / total.total as f64,
1656 both = $variant.both_infer as f64 * 100.0 / total.total as f64);
1658 println!(" total {uses:6} \
1659 {ty:4.1}% {region:5.1}% {both:4.1}%",
1661 ty = total.ty_infer as f64 * 100.0 / total.total as f64,
1662 region = total.region_infer as f64 * 100.0 / total.total as f64,
1663 both = total.both_infer as f64 * 100.0 / total.total as f64)
1671 impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
1672 pub fn print_debug_stats(self) {
1675 TyAdt, TyArray, TySlice, TyRawPtr, TyRef, TyFnDef, TyFnPtr,
1676 TyGenerator, TyGeneratorWitness, TyDynamic, TyClosure, TyTuple,
1677 TyParam, TyInfer, TyProjection, TyAnon, TyForeign);
1679 println!("Substs interner: #{}", self.interners.substs.borrow().len());
1680 println!("Region interner: #{}", self.interners.region.borrow().len());
1681 println!("Stability interner: #{}", self.stability_interner.borrow().len());
1682 println!("Interpret interner: #{}", self.interpret_interner.borrow().allocs.len());
1683 println!("Layout interner: #{}", self.layout_interner.borrow().len());
1688 /// An entry in an interner.
1689 struct Interned<'tcx, T: 'tcx+?Sized>(&'tcx T);
1691 // NB: An Interned<Ty> compares and hashes as a sty.
1692 impl<'tcx> PartialEq for Interned<'tcx, TyS<'tcx>> {
1693 fn eq(&self, other: &Interned<'tcx, TyS<'tcx>>) -> bool {
1694 self.0.sty == other.0.sty
1698 impl<'tcx> Eq for Interned<'tcx, TyS<'tcx>> {}
1700 impl<'tcx> Hash for Interned<'tcx, TyS<'tcx>> {
1701 fn hash<H: Hasher>(&self, s: &mut H) {
1706 impl<'tcx: 'lcx, 'lcx> Borrow<TypeVariants<'lcx>> for Interned<'tcx, TyS<'tcx>> {
1707 fn borrow<'a>(&'a self) -> &'a TypeVariants<'lcx> {
1712 // NB: An Interned<Slice<T>> compares and hashes as its elements.
1713 impl<'tcx, T: PartialEq> PartialEq for Interned<'tcx, Slice<T>> {
1714 fn eq(&self, other: &Interned<'tcx, Slice<T>>) -> bool {
1715 self.0[..] == other.0[..]
1719 impl<'tcx, T: Eq> Eq for Interned<'tcx, Slice<T>> {}
1721 impl<'tcx, T: Hash> Hash for Interned<'tcx, Slice<T>> {
1722 fn hash<H: Hasher>(&self, s: &mut H) {
1727 impl<'tcx: 'lcx, 'lcx> Borrow<[Ty<'lcx>]> for Interned<'tcx, Slice<Ty<'tcx>>> {
1728 fn borrow<'a>(&'a self) -> &'a [Ty<'lcx>] {
1733 impl<'tcx: 'lcx, 'lcx> Borrow<[Kind<'lcx>]> for Interned<'tcx, Substs<'tcx>> {
1734 fn borrow<'a>(&'a self) -> &'a [Kind<'lcx>] {
1739 impl<'tcx> Borrow<RegionKind> for Interned<'tcx, RegionKind> {
1740 fn borrow<'a>(&'a self) -> &'a RegionKind {
1745 impl<'tcx: 'lcx, 'lcx> Borrow<[ExistentialPredicate<'lcx>]>
1746 for Interned<'tcx, Slice<ExistentialPredicate<'tcx>>> {
1747 fn borrow<'a>(&'a self) -> &'a [ExistentialPredicate<'lcx>] {
1752 impl<'tcx: 'lcx, 'lcx> Borrow<[Predicate<'lcx>]>
1753 for Interned<'tcx, Slice<Predicate<'tcx>>> {
1754 fn borrow<'a>(&'a self) -> &'a [Predicate<'lcx>] {
1759 impl<'tcx: 'lcx, 'lcx> Borrow<Const<'lcx>> for Interned<'tcx, Const<'tcx>> {
1760 fn borrow<'a>(&'a self) -> &'a Const<'lcx> {
1765 macro_rules! intern_method {
1766 ($lt_tcx:tt, $name:ident: $method:ident($alloc:ty,
1767 $alloc_method:ident,
1770 $needs_infer:expr) -> $ty:ty) => {
1771 impl<'a, 'gcx, $lt_tcx> TyCtxt<'a, 'gcx, $lt_tcx> {
1772 pub fn $method(self, v: $alloc) -> &$lt_tcx $ty {
1774 let key = ($alloc_to_key)(&v);
1775 if let Some(i) = self.interners.$name.borrow().get(key) {
1778 if !self.is_global() {
1779 if let Some(i) = self.global_interners.$name.borrow().get(key) {
1785 // HACK(eddyb) Depend on flags being accurate to
1786 // determine that all contents are in the global tcx.
1787 // See comments on Lift for why we can't use that.
1788 if !($needs_infer)(&v) {
1789 if !self.is_global() {
1793 let i = ($alloc_to_ret)(self.global_interners.arena.$alloc_method(v));
1794 self.global_interners.$name.borrow_mut().insert(Interned(i));
1798 // Make sure we don't end up with inference
1799 // types/regions in the global tcx.
1800 if self.is_global() {
1801 bug!("Attempted to intern `{:?}` which contains \
1802 inference types/regions in the global type context",
1807 let i = ($alloc_to_ret)(self.interners.arena.$alloc_method(v));
1808 self.interners.$name.borrow_mut().insert(Interned(i));
1815 macro_rules! direct_interners {
1816 ($lt_tcx:tt, $($name:ident: $method:ident($needs_infer:expr) -> $ty:ty),+) => {
1817 $(impl<$lt_tcx> PartialEq for Interned<$lt_tcx, $ty> {
1818 fn eq(&self, other: &Self) -> bool {
1823 impl<$lt_tcx> Eq for Interned<$lt_tcx, $ty> {}
1825 impl<$lt_tcx> Hash for Interned<$lt_tcx, $ty> {
1826 fn hash<H: Hasher>(&self, s: &mut H) {
1831 intern_method!($lt_tcx, $name: $method($ty, alloc, |x| x, |x| x, $needs_infer) -> $ty);)+
1835 pub fn keep_local<'tcx, T: ty::TypeFoldable<'tcx>>(x: &T) -> bool {
1836 x.has_type_flags(ty::TypeFlags::KEEP_IN_LOCAL_TCX)
1839 direct_interners!('tcx,
1840 region: mk_region(|r| {
1842 &ty::ReVar(_) | &ty::ReSkolemized(..) => true,
1846 const_: mk_const(|c: &Const| keep_local(&c.ty) || keep_local(&c.val)) -> Const<'tcx>
1849 macro_rules! slice_interners {
1850 ($($field:ident: $method:ident($ty:ident)),+) => (
1851 $(intern_method!('tcx, $field: $method(&[$ty<'tcx>], alloc_slice, Deref::deref,
1852 |xs: &[$ty]| -> &Slice<$ty> {
1853 unsafe { mem::transmute(xs) }
1854 }, |xs: &[$ty]| xs.iter().any(keep_local)) -> Slice<$ty<'tcx>>);)+
1859 existential_predicates: _intern_existential_predicates(ExistentialPredicate),
1860 predicates: _intern_predicates(Predicate),
1861 type_list: _intern_type_list(Ty),
1862 substs: _intern_substs(Kind)
1865 impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
1866 /// Given a `fn` type, returns an equivalent `unsafe fn` type;
1867 /// that is, a `fn` type that is equivalent in every way for being
1869 pub fn safe_to_unsafe_fn_ty(self, sig: PolyFnSig<'tcx>) -> Ty<'tcx> {
1870 assert_eq!(sig.unsafety(), hir::Unsafety::Normal);
1871 self.mk_fn_ptr(sig.map_bound(|sig| ty::FnSig {
1872 unsafety: hir::Unsafety::Unsafe,
1877 /// Given a closure signature `sig`, returns an equivalent `fn`
1878 /// type with the same signature. Detuples and so forth -- so
1879 /// e.g. if we have a sig with `Fn<(u32, i32)>` then you would get
1880 /// a `fn(u32, i32)`.
1881 pub fn coerce_closure_fn_ty(self, sig: PolyFnSig<'tcx>) -> Ty<'tcx> {
1882 let converted_sig = sig.map_bound(|s| {
1883 let params_iter = match s.inputs()[0].sty {
1884 ty::TyTuple(params, _) => {
1885 params.into_iter().cloned()
1893 hir::Unsafety::Normal,
1898 self.mk_fn_ptr(converted_sig)
1901 // Interns a type/name combination, stores the resulting box in cx.interners,
1902 // and returns the box as cast to an unsafe ptr (see comments for Ty above).
1903 pub fn mk_ty(self, st: TypeVariants<'tcx>) -> Ty<'tcx> {
1904 let global_interners = if !self.is_global() {
1905 Some(&self.global_interners)
1909 self.interners.intern_ty(st, global_interners)
1912 pub fn mk_mach_int(self, tm: ast::IntTy) -> Ty<'tcx> {
1914 ast::IntTy::Isize => self.types.isize,
1915 ast::IntTy::I8 => self.types.i8,
1916 ast::IntTy::I16 => self.types.i16,
1917 ast::IntTy::I32 => self.types.i32,
1918 ast::IntTy::I64 => self.types.i64,
1919 ast::IntTy::I128 => self.types.i128,
1923 pub fn mk_mach_uint(self, tm: ast::UintTy) -> Ty<'tcx> {
1925 ast::UintTy::Usize => self.types.usize,
1926 ast::UintTy::U8 => self.types.u8,
1927 ast::UintTy::U16 => self.types.u16,
1928 ast::UintTy::U32 => self.types.u32,
1929 ast::UintTy::U64 => self.types.u64,
1930 ast::UintTy::U128 => self.types.u128,
1934 pub fn mk_mach_float(self, tm: ast::FloatTy) -> Ty<'tcx> {
1936 ast::FloatTy::F32 => self.types.f32,
1937 ast::FloatTy::F64 => self.types.f64,
1941 pub fn mk_str(self) -> Ty<'tcx> {
1945 pub fn mk_static_str(self) -> Ty<'tcx> {
1946 self.mk_imm_ref(self.types.re_static, self.mk_str())
1949 pub fn mk_adt(self, def: &'tcx AdtDef, substs: &'tcx Substs<'tcx>) -> Ty<'tcx> {
1950 // take a copy of substs so that we own the vectors inside
1951 self.mk_ty(TyAdt(def, substs))
1954 pub fn mk_foreign(self, def_id: DefId) -> Ty<'tcx> {
1955 self.mk_ty(TyForeign(def_id))
1958 pub fn mk_box(self, ty: Ty<'tcx>) -> Ty<'tcx> {
1959 let def_id = self.require_lang_item(lang_items::OwnedBoxLangItem);
1960 let adt_def = self.adt_def(def_id);
1961 let substs = self.mk_substs(iter::once(Kind::from(ty)));
1962 self.mk_ty(TyAdt(adt_def, substs))
1965 pub fn mk_ptr(self, tm: TypeAndMut<'tcx>) -> Ty<'tcx> {
1966 self.mk_ty(TyRawPtr(tm))
1969 pub fn mk_ref(self, r: Region<'tcx>, tm: TypeAndMut<'tcx>) -> Ty<'tcx> {
1970 self.mk_ty(TyRef(r, tm))
1973 pub fn mk_mut_ref(self, r: Region<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> {
1974 self.mk_ref(r, TypeAndMut {ty: ty, mutbl: hir::MutMutable})
1977 pub fn mk_imm_ref(self, r: Region<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> {
1978 self.mk_ref(r, TypeAndMut {ty: ty, mutbl: hir::MutImmutable})
1981 pub fn mk_mut_ptr(self, ty: Ty<'tcx>) -> Ty<'tcx> {
1982 self.mk_ptr(TypeAndMut {ty: ty, mutbl: hir::MutMutable})
1985 pub fn mk_imm_ptr(self, ty: Ty<'tcx>) -> Ty<'tcx> {
1986 self.mk_ptr(TypeAndMut {ty: ty, mutbl: hir::MutImmutable})
1989 pub fn mk_nil_ptr(self) -> Ty<'tcx> {
1990 self.mk_imm_ptr(self.mk_nil())
1993 pub fn mk_array(self, ty: Ty<'tcx>, n: u64) -> Ty<'tcx> {
1994 let n = ConstUsize::new(n, self.sess.target.usize_ty).unwrap();
1995 self.mk_array_const_usize(ty, n)
1998 pub fn mk_array_const_usize(self, ty: Ty<'tcx>, n: ConstUsize) -> Ty<'tcx> {
1999 self.mk_ty(TyArray(ty, self.mk_const(ty::Const {
2000 val: ConstVal::Integral(ConstInt::Usize(n)),
2001 ty: self.types.usize
2005 pub fn mk_slice(self, ty: Ty<'tcx>) -> Ty<'tcx> {
2006 self.mk_ty(TySlice(ty))
2009 pub fn intern_tup(self, ts: &[Ty<'tcx>], defaulted: bool) -> Ty<'tcx> {
2010 self.mk_ty(TyTuple(self.intern_type_list(ts), defaulted))
2013 pub fn mk_tup<I: InternAs<[Ty<'tcx>], Ty<'tcx>>>(self, iter: I,
2014 defaulted: bool) -> I::Output {
2015 iter.intern_with(|ts| self.mk_ty(TyTuple(self.intern_type_list(ts), defaulted)))
2018 pub fn mk_nil(self) -> Ty<'tcx> {
2019 self.intern_tup(&[], false)
2022 pub fn mk_diverging_default(self) -> Ty<'tcx> {
2023 if self.sess.features.borrow().never_type {
2026 self.intern_tup(&[], true)
2030 pub fn mk_bool(self) -> Ty<'tcx> {
2034 pub fn mk_fn_def(self, def_id: DefId,
2035 substs: &'tcx Substs<'tcx>) -> Ty<'tcx> {
2036 self.mk_ty(TyFnDef(def_id, substs))
2039 pub fn mk_fn_ptr(self, fty: PolyFnSig<'tcx>) -> Ty<'tcx> {
2040 self.mk_ty(TyFnPtr(fty))
2045 obj: ty::Binder<&'tcx Slice<ExistentialPredicate<'tcx>>>,
2046 reg: ty::Region<'tcx>
2048 self.mk_ty(TyDynamic(obj, reg))
2051 pub fn mk_projection(self,
2053 substs: &'tcx Substs<'tcx>)
2055 self.mk_ty(TyProjection(ProjectionTy {
2061 pub fn mk_closure(self,
2063 substs: ClosureSubsts<'tcx>)
2065 self.mk_closure_from_closure_substs(closure_id, substs)
2068 pub fn mk_closure_from_closure_substs(self,
2070 closure_substs: ClosureSubsts<'tcx>)
2072 self.mk_ty(TyClosure(closure_id, closure_substs))
2075 pub fn mk_generator(self,
2077 closure_substs: ClosureSubsts<'tcx>,
2078 interior: GeneratorInterior<'tcx>)
2080 self.mk_ty(TyGenerator(id, closure_substs, interior))
2083 pub fn mk_generator_witness(self, types: ty::Binder<&'tcx Slice<Ty<'tcx>>>) -> Ty<'tcx> {
2084 self.mk_ty(TyGeneratorWitness(types))
2087 pub fn mk_var(self, v: TyVid) -> Ty<'tcx> {
2088 self.mk_infer(TyVar(v))
2091 pub fn mk_int_var(self, v: IntVid) -> Ty<'tcx> {
2092 self.mk_infer(IntVar(v))
2095 pub fn mk_float_var(self, v: FloatVid) -> Ty<'tcx> {
2096 self.mk_infer(FloatVar(v))
2099 pub fn mk_infer(self, it: InferTy) -> Ty<'tcx> {
2100 self.mk_ty(TyInfer(it))
2103 pub fn mk_param(self,
2105 name: Name) -> Ty<'tcx> {
2106 self.mk_ty(TyParam(ParamTy { idx: index, name: name }))
2109 pub fn mk_self_type(self) -> Ty<'tcx> {
2110 self.mk_param(0, keywords::SelfType.name())
2113 pub fn mk_param_from_def(self, def: &ty::TypeParameterDef) -> Ty<'tcx> {
2114 self.mk_param(def.index, def.name)
2117 pub fn mk_anon(self, def_id: DefId, substs: &'tcx Substs<'tcx>) -> Ty<'tcx> {
2118 self.mk_ty(TyAnon(def_id, substs))
2121 pub fn intern_existential_predicates(self, eps: &[ExistentialPredicate<'tcx>])
2122 -> &'tcx Slice<ExistentialPredicate<'tcx>> {
2123 assert!(!eps.is_empty());
2124 assert!(eps.windows(2).all(|w| w[0].cmp(self, &w[1]) != Ordering::Greater));
2125 self._intern_existential_predicates(eps)
2128 pub fn intern_predicates(self, preds: &[Predicate<'tcx>])
2129 -> &'tcx Slice<Predicate<'tcx>> {
2130 // FIXME consider asking the input slice to be sorted to avoid
2131 // re-interning permutations, in which case that would be asserted
2133 if preds.len() == 0 {
2134 // The macro-generated method below asserts we don't intern an empty slice.
2137 self._intern_predicates(preds)
2141 pub fn intern_type_list(self, ts: &[Ty<'tcx>]) -> &'tcx Slice<Ty<'tcx>> {
2145 self._intern_type_list(ts)
2149 pub fn intern_substs(self, ts: &[Kind<'tcx>]) -> &'tcx Slice<Kind<'tcx>> {
2153 self._intern_substs(ts)
2157 pub fn mk_fn_sig<I>(self,
2161 unsafety: hir::Unsafety,
2163 -> <I::Item as InternIteratorElement<Ty<'tcx>, ty::FnSig<'tcx>>>::Output
2165 I::Item: InternIteratorElement<Ty<'tcx>, ty::FnSig<'tcx>>
2167 inputs.chain(iter::once(output)).intern_with(|xs| ty::FnSig {
2168 inputs_and_output: self.intern_type_list(xs),
2169 variadic, unsafety, abi
2173 pub fn mk_existential_predicates<I: InternAs<[ExistentialPredicate<'tcx>],
2174 &'tcx Slice<ExistentialPredicate<'tcx>>>>(self, iter: I)
2176 iter.intern_with(|xs| self.intern_existential_predicates(xs))
2179 pub fn mk_predicates<I: InternAs<[Predicate<'tcx>],
2180 &'tcx Slice<Predicate<'tcx>>>>(self, iter: I)
2182 iter.intern_with(|xs| self.intern_predicates(xs))
2185 pub fn mk_type_list<I: InternAs<[Ty<'tcx>],
2186 &'tcx Slice<Ty<'tcx>>>>(self, iter: I) -> I::Output {
2187 iter.intern_with(|xs| self.intern_type_list(xs))
2190 pub fn mk_substs<I: InternAs<[Kind<'tcx>],
2191 &'tcx Slice<Kind<'tcx>>>>(self, iter: I) -> I::Output {
2192 iter.intern_with(|xs| self.intern_substs(xs))
2195 pub fn mk_substs_trait(self,
2198 -> &'tcx Substs<'tcx>
2200 self.mk_substs(iter::once(s).chain(t.into_iter().cloned()).map(Kind::from))
2203 pub fn lint_node<S: Into<MultiSpan>>(self,
2204 lint: &'static Lint,
2208 self.struct_span_lint_node(lint, id, span.into(), msg).emit()
2211 pub fn lint_node_note<S: Into<MultiSpan>>(self,
2212 lint: &'static Lint,
2217 let mut err = self.struct_span_lint_node(lint, id, span.into(), msg);
2222 pub fn lint_level_at_node(self, lint: &'static Lint, mut id: NodeId)
2223 -> (lint::Level, lint::LintSource)
2225 // Right now we insert a `with_ignore` node in the dep graph here to
2226 // ignore the fact that `lint_levels` below depends on the entire crate.
2227 // For now this'll prevent false positives of recompiling too much when
2228 // anything changes.
2230 // Once red/green incremental compilation lands we should be able to
2231 // remove this because while the crate changes often the lint level map
2232 // will change rarely.
2233 self.dep_graph.with_ignore(|| {
2234 let sets = self.lint_levels(LOCAL_CRATE);
2236 let hir_id = self.hir.definitions().node_to_hir_id(id);
2237 if let Some(pair) = sets.level_and_source(lint, hir_id, self.sess) {
2240 let next = self.hir.get_parent_node(id);
2242 bug!("lint traversal reached the root of the crate");
2249 pub fn struct_span_lint_node<S: Into<MultiSpan>>(self,
2250 lint: &'static Lint,
2254 -> DiagnosticBuilder<'tcx>
2256 let (level, src) = self.lint_level_at_node(lint, id);
2257 lint::struct_lint_level(self.sess, lint, level, src, Some(span.into()), msg)
2260 pub fn struct_lint_node(self, lint: &'static Lint, id: NodeId, msg: &str)
2261 -> DiagnosticBuilder<'tcx>
2263 let (level, src) = self.lint_level_at_node(lint, id);
2264 lint::struct_lint_level(self.sess, lint, level, src, None, msg)
2267 pub fn in_scope_traits(self, id: HirId) -> Option<Lrc<StableVec<TraitCandidate>>> {
2268 self.in_scope_traits_map(id.owner)
2269 .and_then(|map| map.get(&id.local_id).cloned())
2272 pub fn named_region(self, id: HirId) -> Option<resolve_lifetime::Region> {
2273 self.named_region_map(id.owner)
2274 .and_then(|map| map.get(&id.local_id).cloned())
2277 pub fn is_late_bound(self, id: HirId) -> bool {
2278 self.is_late_bound_map(id.owner)
2279 .map(|set| set.contains(&id.local_id))
2283 pub fn object_lifetime_defaults(self, id: HirId)
2284 -> Option<Lrc<Vec<ObjectLifetimeDefault>>>
2286 self.object_lifetime_defaults_map(id.owner)
2287 .and_then(|map| map.get(&id.local_id).cloned())
2291 pub trait InternAs<T: ?Sized, R> {
2293 fn intern_with<F>(self, f: F) -> Self::Output
2294 where F: FnOnce(&T) -> R;
2297 impl<I, T, R, E> InternAs<[T], R> for I
2298 where E: InternIteratorElement<T, R>,
2299 I: Iterator<Item=E> {
2300 type Output = E::Output;
2301 fn intern_with<F>(self, f: F) -> Self::Output
2302 where F: FnOnce(&[T]) -> R {
2303 E::intern_with(self, f)
2307 pub trait InternIteratorElement<T, R>: Sized {
2309 fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output;
2312 impl<T, R> InternIteratorElement<T, R> for T {
2314 fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output {
2315 f(&iter.collect::<AccumulateVec<[_; 8]>>())
2319 impl<'a, T, R> InternIteratorElement<T, R> for &'a T
2323 fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output {
2324 f(&iter.cloned().collect::<AccumulateVec<[_; 8]>>())
2328 impl<T, R, E> InternIteratorElement<T, R> for Result<T, E> {
2329 type Output = Result<R, E>;
2330 fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output {
2331 Ok(f(&iter.collect::<Result<AccumulateVec<[_; 8]>, _>>()?))
2335 pub fn provide(providers: &mut ty::maps::Providers) {
2336 // FIXME(#44234) - almost all of these queries have no sub-queries and
2337 // therefore no actual inputs, they're just reading tables calculated in
2338 // resolve! Does this work? Unsure! That's what the issue is about
2339 providers.in_scope_traits_map = |tcx, id| tcx.gcx.trait_map.get(&id).cloned();
2340 providers.module_exports = |tcx, id| tcx.gcx.export_map.get(&id).cloned();
2341 providers.crate_name = |tcx, id| {
2342 assert_eq!(id, LOCAL_CRATE);
2345 providers.get_lang_items = |tcx, id| {
2346 assert_eq!(id, LOCAL_CRATE);
2347 // FIXME(#42293) Right now we insert a `with_ignore` node in the dep
2348 // graph here to ignore the fact that `get_lang_items` below depends on
2349 // the entire crate. For now this'll prevent false positives of
2350 // recompiling too much when anything changes.
2352 // Once red/green incremental compilation lands we should be able to
2353 // remove this because while the crate changes often the lint level map
2354 // will change rarely.
2355 tcx.dep_graph.with_ignore(|| Lrc::new(middle::lang_items::collect(tcx)))
2357 providers.freevars = |tcx, id| tcx.gcx.freevars.get(&id).cloned();
2358 providers.maybe_unused_trait_import = |tcx, id| {
2359 tcx.maybe_unused_trait_imports.contains(&id)
2361 providers.maybe_unused_extern_crates = |tcx, cnum| {
2362 assert_eq!(cnum, LOCAL_CRATE);
2363 Lrc::new(tcx.maybe_unused_extern_crates.clone())
2366 providers.stability_index = |tcx, cnum| {
2367 assert_eq!(cnum, LOCAL_CRATE);
2368 Lrc::new(stability::Index::new(tcx))
2370 providers.lookup_stability = |tcx, id| {
2371 assert_eq!(id.krate, LOCAL_CRATE);
2372 let id = tcx.hir.definitions().def_index_to_hir_id(id.index);
2373 tcx.stability().local_stability(id)
2375 providers.lookup_deprecation_entry = |tcx, id| {
2376 assert_eq!(id.krate, LOCAL_CRATE);
2377 let id = tcx.hir.definitions().def_index_to_hir_id(id.index);
2378 tcx.stability().local_deprecation_entry(id)
2380 providers.extern_mod_stmt_cnum = |tcx, id| {
2381 let id = tcx.hir.as_local_node_id(id).unwrap();
2382 tcx.cstore.extern_mod_stmt_cnum_untracked(id)
2384 providers.all_crate_nums = |tcx, cnum| {
2385 assert_eq!(cnum, LOCAL_CRATE);
2386 Lrc::new(tcx.cstore.crates_untracked())
2388 providers.postorder_cnums = |tcx, cnum| {
2389 assert_eq!(cnum, LOCAL_CRATE);
2390 Lrc::new(tcx.cstore.postorder_cnums_untracked())
2392 providers.output_filenames = |tcx, cnum| {
2393 assert_eq!(cnum, LOCAL_CRATE);
2394 tcx.output_filenames.clone()
2396 providers.has_copy_closures = |tcx, cnum| {
2397 assert_eq!(cnum, LOCAL_CRATE);
2398 tcx.sess.features.borrow().copy_closures
2400 providers.has_clone_closures = |tcx, cnum| {
2401 assert_eq!(cnum, LOCAL_CRATE);
2402 tcx.sess.features.borrow().clone_closures
2404 providers.fully_normalize_monormophic_ty = |tcx, ty| {
2405 tcx.fully_normalize_associated_types_in(&ty)