1 // Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! type context book-keeping
13 use dep_graph::DepGraph;
18 use hir::def::{Def, ExportMap};
19 use hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
20 use hir::map as hir_map;
21 use hir::map::DisambiguatedDefPathData;
22 use middle::free_region::FreeRegionMap;
23 use middle::lang_items;
24 use middle::region::RegionMaps;
25 use middle::resolve_lifetime;
26 use middle::stability;
28 use ty::subst::{Kind, Substs};
31 use ty::{self, TraitRef, Ty, TypeAndMut};
32 use ty::{TyS, TypeVariants, Slice};
33 use ty::{AdtKind, AdtDef, ClosureSubsts, Region};
35 use ty::{PolyFnSig, InferTy, ParamTy, ProjectionTy, ExistentialPredicate};
36 use ty::{TyVar, TyVid, IntVar, IntVid, FloatVar, FloatVid};
37 use ty::TypeVariants::*;
38 use ty::layout::{Layout, TargetDataLayout};
39 use ty::inhabitedness::DefIdForest;
41 use util::nodemap::{NodeMap, NodeSet, DefIdMap, DefIdSet};
42 use util::nodemap::{FxHashMap, FxHashSet};
43 use rustc_data_structures::accumulate_vec::AccumulateVec;
45 use arena::{TypedArena, DroplessArena};
46 use rustc_data_structures::indexed_vec::IndexVec;
47 use std::borrow::Borrow;
48 use std::cell::{Cell, RefCell};
49 use std::hash::{Hash, Hasher};
53 use std::cmp::Ordering;
55 use syntax::ast::{self, Name, NodeId};
57 use syntax::symbol::{Symbol, keywords};
62 pub struct GlobalArenas<'tcx> {
64 layout: TypedArena<Layout>,
67 generics: TypedArena<ty::Generics>,
68 trait_def: TypedArena<ty::TraitDef>,
69 adt_def: TypedArena<ty::AdtDef>,
70 mir: TypedArena<RefCell<Mir<'tcx>>>,
71 tables: TypedArena<ty::TypeckTables<'tcx>>,
74 impl<'tcx> GlobalArenas<'tcx> {
75 pub fn new() -> GlobalArenas<'tcx> {
77 layout: TypedArena::new(),
78 generics: TypedArena::new(),
79 trait_def: TypedArena::new(),
80 adt_def: TypedArena::new(),
81 mir: TypedArena::new(),
82 tables: TypedArena::new(),
87 pub struct CtxtInterners<'tcx> {
88 /// The arena that types, regions, etc are allocated from
89 arena: &'tcx DroplessArena,
91 /// Specifically use a speedy hash algorithm for these hash sets,
92 /// they're accessed quite often.
93 type_: RefCell<FxHashSet<Interned<'tcx, TyS<'tcx>>>>,
94 type_list: RefCell<FxHashSet<Interned<'tcx, Slice<Ty<'tcx>>>>>,
95 substs: RefCell<FxHashSet<Interned<'tcx, Substs<'tcx>>>>,
96 region: RefCell<FxHashSet<Interned<'tcx, Region>>>,
97 existential_predicates: RefCell<FxHashSet<Interned<'tcx, Slice<ExistentialPredicate<'tcx>>>>>,
100 impl<'gcx: 'tcx, 'tcx> CtxtInterners<'tcx> {
101 fn new(arena: &'tcx DroplessArena) -> CtxtInterners<'tcx> {
104 type_: RefCell::new(FxHashSet()),
105 type_list: RefCell::new(FxHashSet()),
106 substs: RefCell::new(FxHashSet()),
107 region: RefCell::new(FxHashSet()),
108 existential_predicates: RefCell::new(FxHashSet()),
112 /// Intern a type. global_interners is Some only if this is
113 /// a local interner and global_interners is its counterpart.
114 fn intern_ty(&self, st: TypeVariants<'tcx>,
115 global_interners: Option<&CtxtInterners<'gcx>>)
118 let mut interner = self.type_.borrow_mut();
119 let global_interner = global_interners.map(|interners| {
120 interners.type_.borrow_mut()
122 if let Some(&Interned(ty)) = interner.get(&st) {
125 if let Some(ref interner) = global_interner {
126 if let Some(&Interned(ty)) = interner.get(&st) {
131 let flags = super::flags::FlagComputation::for_sty(&st);
132 let ty_struct = TyS {
134 flags: Cell::new(flags.flags),
135 region_depth: flags.depth,
138 // HACK(eddyb) Depend on flags being accurate to
139 // determine that all contents are in the global tcx.
140 // See comments on Lift for why we can't use that.
141 if !flags.flags.intersects(ty::TypeFlags::KEEP_IN_LOCAL_TCX) {
142 if let Some(interner) = global_interners {
143 let ty_struct: TyS<'gcx> = unsafe {
144 mem::transmute(ty_struct)
146 let ty: Ty<'gcx> = interner.arena.alloc(ty_struct);
147 global_interner.unwrap().insert(Interned(ty));
151 // Make sure we don't end up with inference
152 // types/regions in the global tcx.
153 if global_interners.is_none() {
155 bug!("Attempted to intern `{:?}` which contains \
156 inference types/regions in the global type context",
161 // Don't be &mut TyS.
162 let ty: Ty<'tcx> = self.arena.alloc(ty_struct);
163 interner.insert(Interned(ty));
167 debug!("Interned type: {:?} Pointer: {:?}",
168 ty, ty as *const TyS);
174 pub struct CommonTypes<'tcx> {
195 #[derive(RustcEncodable, RustcDecodable)]
196 pub struct TypeckTables<'tcx> {
197 /// Resolved definitions for `<T>::X` associated paths.
198 pub type_relative_path_defs: NodeMap<Def>,
200 /// Stores the types for various nodes in the AST. Note that this table
201 /// is not guaranteed to be populated until after typeck. See
202 /// typeck::check::fn_ctxt for details.
203 pub node_types: NodeMap<Ty<'tcx>>,
205 /// Stores the type parameters which were substituted to obtain the type
206 /// of this node. This only applies to nodes that refer to entities
207 /// parameterized by type parameters, such as generic fns, types, or
209 pub item_substs: NodeMap<ty::ItemSubsts<'tcx>>,
211 pub adjustments: NodeMap<ty::adjustment::Adjustment<'tcx>>,
213 pub method_map: ty::MethodMap<'tcx>,
216 pub upvar_capture_map: ty::UpvarCaptureMap<'tcx>,
218 /// Records the type of each closure.
219 pub closure_tys: NodeMap<ty::PolyFnSig<'tcx>>,
221 /// Records the kind of each closure.
222 pub closure_kinds: NodeMap<ty::ClosureKind>,
224 /// For each fn, records the "liberated" types of its arguments
225 /// and return type. Liberated means that all bound regions
226 /// (including late-bound regions) are replaced with free
227 /// equivalents. This table is not used in trans (since regions
228 /// are erased there) and hence is not serialized to metadata.
229 pub liberated_fn_sigs: NodeMap<ty::FnSig<'tcx>>,
231 /// For each FRU expression, record the normalized types of the fields
232 /// of the struct - this is needed because it is non-trivial to
233 /// normalize while preserving regions. This table is used only in
234 /// MIR construction and hence is not serialized to metadata.
235 pub fru_field_types: NodeMap<Vec<Ty<'tcx>>>,
237 /// Maps a cast expression to its kind. This is keyed on the
238 /// *from* expression of the cast, not the cast itself.
239 pub cast_kinds: NodeMap<ty::cast::CastKind>,
241 /// Lints for the body of this fn generated by typeck.
242 pub lints: lint::LintTable,
244 /// Set of trait imports actually used in the method resolution.
245 /// This is used for warning unused imports.
246 pub used_trait_imports: DefIdSet,
248 /// If any errors occurred while type-checking this body,
249 /// this field will be set to `true`.
250 pub tainted_by_errors: bool,
252 /// Stores the free-region relationships that were deduced from
253 /// its where clauses and parameter types. These are then
254 /// read-again by borrowck.
255 pub free_region_map: FreeRegionMap,
258 impl<'tcx> TypeckTables<'tcx> {
259 pub fn empty() -> TypeckTables<'tcx> {
261 type_relative_path_defs: NodeMap(),
262 node_types: FxHashMap(),
263 item_substs: NodeMap(),
264 adjustments: NodeMap(),
265 method_map: FxHashMap(),
266 upvar_capture_map: FxHashMap(),
267 closure_tys: NodeMap(),
268 closure_kinds: NodeMap(),
269 liberated_fn_sigs: NodeMap(),
270 fru_field_types: NodeMap(),
271 cast_kinds: NodeMap(),
272 lints: lint::LintTable::new(),
273 used_trait_imports: DefIdSet(),
274 tainted_by_errors: false,
275 free_region_map: FreeRegionMap::new(),
279 /// Returns the final resolution of a `QPath` in an `Expr` or `Pat` node.
280 pub fn qpath_def(&self, qpath: &hir::QPath, id: NodeId) -> Def {
282 hir::QPath::Resolved(_, ref path) => path.def,
283 hir::QPath::TypeRelative(..) => {
284 self.type_relative_path_defs.get(&id).cloned().unwrap_or(Def::Err)
289 pub fn node_id_to_type(&self, id: NodeId) -> Ty<'tcx> {
290 match self.node_id_to_type_opt(id) {
293 bug!("node_id_to_type: no type for node `{}`",
294 tls::with(|tcx| tcx.hir.node_to_string(id)))
299 pub fn node_id_to_type_opt(&self, id: NodeId) -> Option<Ty<'tcx>> {
300 self.node_types.get(&id).cloned()
303 pub fn node_id_item_substs(&self, id: NodeId) -> Option<&'tcx Substs<'tcx>> {
304 self.item_substs.get(&id).map(|ts| ts.substs)
307 // Returns the type of a pattern as a monotype. Like @expr_ty, this function
308 // doesn't provide type parameter substitutions.
309 pub fn pat_ty(&self, pat: &hir::Pat) -> Ty<'tcx> {
310 self.node_id_to_type(pat.id)
313 pub fn pat_ty_opt(&self, pat: &hir::Pat) -> Option<Ty<'tcx>> {
314 self.node_id_to_type_opt(pat.id)
317 // Returns the type of an expression as a monotype.
319 // NB (1): This is the PRE-ADJUSTMENT TYPE for the expression. That is, in
320 // some cases, we insert `Adjustment` annotations such as auto-deref or
321 // auto-ref. The type returned by this function does not consider such
322 // adjustments. See `expr_ty_adjusted()` instead.
324 // NB (2): This type doesn't provide type parameter substitutions; e.g. if you
325 // ask for the type of "id" in "id(3)", it will return "fn(&isize) -> isize"
326 // instead of "fn(ty) -> T with T = isize".
327 pub fn expr_ty(&self, expr: &hir::Expr) -> Ty<'tcx> {
328 self.node_id_to_type(expr.id)
331 pub fn expr_ty_opt(&self, expr: &hir::Expr) -> Option<Ty<'tcx>> {
332 self.node_id_to_type_opt(expr.id)
335 /// Returns the type of `expr`, considering any `Adjustment`
336 /// entry recorded for that expression.
337 pub fn expr_ty_adjusted(&self, expr: &hir::Expr) -> Ty<'tcx> {
338 self.adjustments.get(&expr.id)
339 .map_or_else(|| self.expr_ty(expr), |adj| adj.target)
342 pub fn expr_ty_adjusted_opt(&self, expr: &hir::Expr) -> Option<Ty<'tcx>> {
343 self.adjustments.get(&expr.id)
344 .map(|adj| adj.target).or_else(|| self.expr_ty_opt(expr))
347 pub fn is_method_call(&self, expr_id: NodeId) -> bool {
348 self.method_map.contains_key(&ty::MethodCall::expr(expr_id))
351 pub fn is_overloaded_autoderef(&self, expr_id: NodeId, autoderefs: u32) -> bool {
352 self.method_map.contains_key(&ty::MethodCall::autoderef(expr_id, autoderefs))
355 pub fn upvar_capture(&self, upvar_id: ty::UpvarId) -> Option<ty::UpvarCapture<'tcx>> {
356 Some(self.upvar_capture_map.get(&upvar_id).unwrap().clone())
360 impl<'tcx> CommonTypes<'tcx> {
361 fn new(interners: &CtxtInterners<'tcx>) -> CommonTypes<'tcx> {
362 let mk = |sty| interners.intern_ty(sty, None);
368 isize: mk(TyInt(ast::IntTy::Is)),
369 i8: mk(TyInt(ast::IntTy::I8)),
370 i16: mk(TyInt(ast::IntTy::I16)),
371 i32: mk(TyInt(ast::IntTy::I32)),
372 i64: mk(TyInt(ast::IntTy::I64)),
373 i128: mk(TyInt(ast::IntTy::I128)),
374 usize: mk(TyUint(ast::UintTy::Us)),
375 u8: mk(TyUint(ast::UintTy::U8)),
376 u16: mk(TyUint(ast::UintTy::U16)),
377 u32: mk(TyUint(ast::UintTy::U32)),
378 u64: mk(TyUint(ast::UintTy::U64)),
379 u128: mk(TyUint(ast::UintTy::U128)),
380 f32: mk(TyFloat(ast::FloatTy::F32)),
381 f64: mk(TyFloat(ast::FloatTy::F64)),
386 /// The data structure to keep track of all the information that typechecker
387 /// generates so that so that it can be reused and doesn't have to be redone
389 #[derive(Copy, Clone)]
390 pub struct TyCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
391 gcx: &'a GlobalCtxt<'gcx>,
392 interners: &'a CtxtInterners<'tcx>
395 impl<'a, 'gcx, 'tcx> Deref for TyCtxt<'a, 'gcx, 'tcx> {
396 type Target = &'a GlobalCtxt<'gcx>;
397 fn deref(&self) -> &Self::Target {
402 pub struct GlobalCtxt<'tcx> {
403 global_arenas: &'tcx GlobalArenas<'tcx>,
404 global_interners: CtxtInterners<'tcx>,
406 pub sess: &'tcx Session,
408 pub specializes_cache: RefCell<traits::SpecializesCache>,
410 pub dep_graph: DepGraph,
412 /// Common types, pre-interned for your convenience.
413 pub types: CommonTypes<'tcx>,
415 /// Map indicating what traits are in scope for places where this
416 /// is relevant; generated by resolve.
417 pub trait_map: TraitMap,
419 /// Export map produced by name resolution.
420 pub export_map: ExportMap,
422 pub named_region_map: resolve_lifetime::NamedRegionMap,
424 pub region_maps: RegionMaps,
426 pub hir: hir_map::Map<'tcx>,
427 pub maps: maps::Maps<'tcx>,
429 // Records the free variables refrenced by every closure
430 // expression. Do not track deps for this, just recompute it from
431 // scratch every time.
432 pub freevars: RefCell<FreevarMap>,
434 pub maybe_unused_trait_imports: NodeSet,
436 // Internal cache for metadata decoding. No need to track deps on this.
437 pub rcache: RefCell<FxHashMap<ty::CReaderCacheKey, Ty<'tcx>>>,
439 // Cache for the type-contents routine. FIXME -- track deps?
440 pub tc_cache: RefCell<FxHashMap<Ty<'tcx>, ty::contents::TypeContents>>,
442 // FIXME dep tracking -- should be harmless enough
443 pub normalized_cache: RefCell<FxHashMap<Ty<'tcx>, Ty<'tcx>>>,
445 pub inhabitedness_cache: RefCell<FxHashMap<Ty<'tcx>, DefIdForest>>,
447 pub lang_items: middle::lang_items::LanguageItems,
449 /// True if the variance has been computed yet; false otherwise.
450 pub variance_computed: Cell<bool>,
452 /// Set of used unsafe nodes (functions or blocks). Unsafe nodes not
453 /// present in this set can be warned about.
454 pub used_unsafe: RefCell<NodeSet>,
456 /// Set of nodes which mark locals as mutable which end up getting used at
457 /// some point. Local variable definitions not in this set can be warned
459 pub used_mut_nodes: RefCell<NodeSet>,
461 /// The set of external nominal types whose implementations have been read.
462 /// This is used for lazy resolution of methods.
463 pub populated_external_types: RefCell<DefIdSet>,
465 /// The set of external primitive types whose implementations have been read.
466 /// FIXME(arielb1): why is this separate from populated_external_types?
467 pub populated_external_primitive_impls: RefCell<DefIdSet>,
469 /// Maps any item's def-id to its stability index.
470 pub stability: RefCell<stability::Index<'tcx>>,
472 /// Caches the results of trait selection. This cache is used
473 /// for things that do not have to do with the parameters in scope.
474 pub selection_cache: traits::SelectionCache<'tcx>,
476 /// Caches the results of trait evaluation. This cache is used
477 /// for things that do not have to do with the parameters in scope.
478 /// Merge this with `selection_cache`?
479 pub evaluation_cache: traits::EvaluationCache<'tcx>,
481 /// A set of predicates that have been fulfilled *somewhere*.
482 /// This is used to avoid duplicate work. Predicates are only
483 /// added to this set when they mention only "global" names
484 /// (i.e., no type or lifetime parameters).
485 pub fulfilled_predicates: RefCell<traits::GlobalFulfilledPredicates<'tcx>>,
487 /// Maps Expr NodeId's to `true` iff `&expr` can have 'static lifetime.
488 pub rvalue_promotable_to_static: RefCell<NodeMap<bool>>,
490 /// Maps Fn items to a collection of fragment infos.
492 /// The main goal is to identify data (each of which may be moved
493 /// or assigned) whose subparts are not moved nor assigned
494 /// (i.e. their state is *unfragmented*) and corresponding ast
495 /// nodes where the path to that data is moved or assigned.
497 /// In the long term, unfragmented values will have their
498 /// destructor entirely driven by a single stack-local drop-flag,
499 /// and their parents, the collections of the unfragmented values
500 /// (or more simply, "fragmented values"), are mapped to the
501 /// corresponding collections of stack-local drop-flags.
503 /// (However, in the short term that is not the case; e.g. some
504 /// unfragmented paths still need to be zeroed, namely when they
505 /// reference parent data from an outer scope that was not
506 /// entirely moved, and therefore that needs to be zeroed so that
507 /// we do not get double-drop when we hit the end of the parent
510 /// Also: currently the table solely holds keys for node-ids of
511 /// unfragmented values (see `FragmentInfo` enum definition), but
512 /// longer-term we will need to also store mappings from
513 /// fragmented data to the set of unfragmented pieces that
515 pub fragment_infos: RefCell<DefIdMap<Vec<ty::FragmentInfo>>>,
517 /// The definite name of the current crate after taking into account
518 /// attributes, commandline parameters, etc.
519 pub crate_name: Symbol,
521 /// Data layout specification for the current target.
522 pub data_layout: TargetDataLayout,
524 /// Cache for layouts computed from types.
525 pub layout_cache: RefCell<FxHashMap<Ty<'tcx>, &'tcx Layout>>,
527 /// Used to prevent layout from recursing too deeply.
528 pub layout_depth: Cell<usize>,
530 /// Map from function to the `#[derive]` mode that it's defining. Only used
531 /// by `proc-macro` crates.
532 pub derive_macros: RefCell<NodeMap<Symbol>>,
534 stability_interner: RefCell<FxHashSet<&'tcx attr::Stability>>,
536 layout_interner: RefCell<FxHashSet<&'tcx Layout>>,
538 /// A vector of every trait accessible in the whole crate
539 /// (i.e. including those from subcrates). This is used only for
540 /// error reporting, and so is lazily initialised and generally
541 /// shouldn't taint the common path (hence the RefCell).
542 pub all_traits: RefCell<Option<Vec<DefId>>>,
544 /// HIR Ty -> Ty lowering cache.
545 pub ast_ty_to_ty_cache: RefCell<NodeMap<Ty<'tcx>>>,
548 impl<'tcx> GlobalCtxt<'tcx> {
549 /// Get the global TyCtxt.
550 pub fn global_tcx<'a>(&'a self) -> TyCtxt<'a, 'tcx, 'tcx> {
553 interners: &self.global_interners
558 impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
559 pub fn crate_name(self, cnum: CrateNum) -> Symbol {
560 if cnum == LOCAL_CRATE {
563 self.sess.cstore.crate_name(cnum)
567 pub fn original_crate_name(self, cnum: CrateNum) -> Symbol {
568 if cnum == LOCAL_CRATE {
569 self.crate_name.clone()
571 self.sess.cstore.original_crate_name(cnum)
575 pub fn crate_disambiguator(self, cnum: CrateNum) -> Symbol {
576 if cnum == LOCAL_CRATE {
577 self.sess.local_crate_disambiguator()
579 self.sess.cstore.crate_disambiguator(cnum)
583 pub fn retrace_path(self,
585 path_data: &[DisambiguatedDefPathData])
587 debug!("retrace_path(path={:?}, krate={:?})", path_data, self.crate_name(krate));
589 if krate == LOCAL_CRATE {
593 .retrace_path(path_data)
594 .map(|def_index| DefId { krate: krate, index: def_index })
596 self.sess.cstore.retrace_path(krate, path_data)
600 pub fn alloc_generics(self, generics: ty::Generics) -> &'gcx ty::Generics {
601 self.global_arenas.generics.alloc(generics)
604 pub fn alloc_mir(self, mir: Mir<'gcx>) -> &'gcx RefCell<Mir<'gcx>> {
605 self.global_arenas.mir.alloc(RefCell::new(mir))
608 pub fn alloc_tables(self, tables: ty::TypeckTables<'gcx>) -> &'gcx ty::TypeckTables<'gcx> {
609 self.global_arenas.tables.alloc(tables)
612 pub fn alloc_trait_def(self, def: ty::TraitDef) -> &'gcx ty::TraitDef {
613 self.global_arenas.trait_def.alloc(def)
616 pub fn alloc_adt_def(self,
619 variants: Vec<ty::VariantDef>,
621 -> &'gcx ty::AdtDef {
622 let def = ty::AdtDef::new(self, did, kind, variants, repr);
623 self.global_arenas.adt_def.alloc(def)
626 pub fn intern_stability(self, stab: attr::Stability) -> &'gcx attr::Stability {
627 if let Some(st) = self.stability_interner.borrow().get(&stab) {
631 let interned = self.global_interners.arena.alloc(stab);
632 if let Some(prev) = self.stability_interner.borrow_mut().replace(interned) {
633 bug!("Tried to overwrite interned Stability: {:?}", prev)
638 pub fn intern_layout(self, layout: Layout) -> &'gcx Layout {
639 if let Some(layout) = self.layout_interner.borrow().get(&layout) {
643 let interned = self.global_arenas.layout.alloc(layout);
644 if let Some(prev) = self.layout_interner.borrow_mut().replace(interned) {
645 bug!("Tried to overwrite interned Layout: {:?}", prev)
650 pub fn lift<T: ?Sized + Lift<'tcx>>(self, value: &T) -> Option<T::Lifted> {
651 value.lift_to_tcx(self)
654 /// Like lift, but only tries in the global tcx.
655 pub fn lift_to_global<T: ?Sized + Lift<'gcx>>(self, value: &T) -> Option<T::Lifted> {
656 value.lift_to_tcx(self.global_tcx())
659 /// Returns true if self is the same as self.global_tcx().
660 fn is_global(self) -> bool {
661 let local = self.interners as *const _;
662 let global = &self.global_interners as *const _;
663 local as usize == global as usize
666 /// Create a type context and call the closure with a `TyCtxt` reference
667 /// to the context. The closure enforces that the type context and any interned
668 /// value (types, substs, etc.) can only be used while `ty::tls` has a valid
669 /// reference to the context, to allow formatting values that need it.
670 pub fn create_and_enter<F, R>(s: &'tcx Session,
671 local_providers: ty::maps::Providers<'tcx>,
672 extern_providers: ty::maps::Providers<'tcx>,
673 arenas: &'tcx GlobalArenas<'tcx>,
674 arena: &'tcx DroplessArena,
675 resolutions: ty::Resolutions,
676 named_region_map: resolve_lifetime::NamedRegionMap,
677 hir: hir_map::Map<'tcx>,
678 region_maps: RegionMaps,
679 lang_items: middle::lang_items::LanguageItems,
680 stability: stability::Index<'tcx>,
683 where F: for<'b> FnOnce(TyCtxt<'b, 'tcx, 'tcx>) -> R
685 let data_layout = TargetDataLayout::parse(s);
686 let interners = CtxtInterners::new(arena);
687 let common_types = CommonTypes::new(&interners);
688 let dep_graph = hir.dep_graph.clone();
689 let fulfilled_predicates = traits::GlobalFulfilledPredicates::new(dep_graph.clone());
690 let max_cnum = s.cstore.crates().iter().map(|c| c.as_usize()).max().unwrap_or(0);
691 let mut providers = IndexVec::from_elem_n(extern_providers, max_cnum + 1);
692 providers[LOCAL_CRATE] = local_providers;
693 tls::enter_global(GlobalCtxt {
695 specializes_cache: RefCell::new(traits::SpecializesCache::new()),
696 global_arenas: arenas,
697 global_interners: interners,
698 dep_graph: dep_graph.clone(),
700 named_region_map: named_region_map,
701 region_maps: region_maps,
702 variance_computed: Cell::new(false),
703 trait_map: resolutions.trait_map,
704 export_map: resolutions.export_map,
705 fulfilled_predicates: RefCell::new(fulfilled_predicates),
707 maps: maps::Maps::new(dep_graph, providers),
708 freevars: RefCell::new(resolutions.freevars),
709 maybe_unused_trait_imports: resolutions.maybe_unused_trait_imports,
710 rcache: RefCell::new(FxHashMap()),
711 tc_cache: RefCell::new(FxHashMap()),
712 normalized_cache: RefCell::new(FxHashMap()),
713 inhabitedness_cache: RefCell::new(FxHashMap()),
714 lang_items: lang_items,
715 used_unsafe: RefCell::new(NodeSet()),
716 used_mut_nodes: RefCell::new(NodeSet()),
717 populated_external_types: RefCell::new(DefIdSet()),
718 populated_external_primitive_impls: RefCell::new(DefIdSet()),
719 stability: RefCell::new(stability),
720 selection_cache: traits::SelectionCache::new(),
721 evaluation_cache: traits::EvaluationCache::new(),
722 rvalue_promotable_to_static: RefCell::new(NodeMap()),
723 fragment_infos: RefCell::new(DefIdMap()),
724 crate_name: Symbol::intern(crate_name),
725 data_layout: data_layout,
726 layout_cache: RefCell::new(FxHashMap()),
727 layout_interner: RefCell::new(FxHashSet()),
728 layout_depth: Cell::new(0),
729 derive_macros: RefCell::new(NodeMap()),
730 stability_interner: RefCell::new(FxHashSet()),
731 all_traits: RefCell::new(None),
732 ast_ty_to_ty_cache: RefCell::new(NodeMap()),
736 pub fn consider_optimizing<T: Fn() -> String>(&self, msg: T) -> bool {
737 let cname = self.crate_name(LOCAL_CRATE).as_str();
738 self.sess.consider_optimizing(&cname, msg)
742 impl<'gcx: 'tcx, 'tcx> GlobalCtxt<'gcx> {
743 /// Call the closure with a local `TyCtxt` using the given arena.
744 pub fn enter_local<F, R>(&self, arena: &'tcx DroplessArena, f: F) -> R
745 where F: for<'a> FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R
747 let interners = CtxtInterners::new(arena);
748 tls::enter(self, &interners, f)
752 /// A trait implemented for all X<'a> types which can be safely and
753 /// efficiently converted to X<'tcx> as long as they are part of the
754 /// provided TyCtxt<'tcx>.
755 /// This can be done, for example, for Ty<'tcx> or &'tcx Substs<'tcx>
756 /// by looking them up in their respective interners.
758 /// However, this is still not the best implementation as it does
759 /// need to compare the components, even for interned values.
760 /// It would be more efficient if TypedArena provided a way to
761 /// determine whether the address is in the allocated range.
763 /// None is returned if the value or one of the components is not part
764 /// of the provided context.
765 /// For Ty, None can be returned if either the type interner doesn't
766 /// contain the TypeVariants key or if the address of the interned
767 /// pointer differs. The latter case is possible if a primitive type,
768 /// e.g. `()` or `u8`, was interned in a different context.
769 pub trait Lift<'tcx> {
771 fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option<Self::Lifted>;
774 impl<'a, 'tcx> Lift<'tcx> for Ty<'a> {
775 type Lifted = Ty<'tcx>;
776 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Ty<'tcx>> {
777 if tcx.interners.arena.in_arena(*self as *const _) {
778 return Some(unsafe { mem::transmute(*self) });
780 // Also try in the global tcx if we're not that.
781 if !tcx.is_global() {
782 self.lift_to_tcx(tcx.global_tcx())
789 impl<'a, 'tcx> Lift<'tcx> for &'a Substs<'a> {
790 type Lifted = &'tcx Substs<'tcx>;
791 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<&'tcx Substs<'tcx>> {
793 return Some(Slice::empty());
795 if tcx.interners.arena.in_arena(&self[..] as *const _) {
796 return Some(unsafe { mem::transmute(*self) });
798 // Also try in the global tcx if we're not that.
799 if !tcx.is_global() {
800 self.lift_to_tcx(tcx.global_tcx())
807 impl<'a, 'tcx> Lift<'tcx> for &'a Region {
808 type Lifted = &'tcx Region;
809 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<&'tcx Region> {
810 if tcx.interners.arena.in_arena(*self as *const _) {
811 return Some(unsafe { mem::transmute(*self) });
813 // Also try in the global tcx if we're not that.
814 if !tcx.is_global() {
815 self.lift_to_tcx(tcx.global_tcx())
822 impl<'a, 'tcx> Lift<'tcx> for &'a Slice<Ty<'a>> {
823 type Lifted = &'tcx Slice<Ty<'tcx>>;
824 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>)
825 -> Option<&'tcx Slice<Ty<'tcx>>> {
827 return Some(Slice::empty());
829 if tcx.interners.arena.in_arena(*self as *const _) {
830 return Some(unsafe { mem::transmute(*self) });
832 // Also try in the global tcx if we're not that.
833 if !tcx.is_global() {
834 self.lift_to_tcx(tcx.global_tcx())
841 impl<'a, 'tcx> Lift<'tcx> for &'a Slice<ExistentialPredicate<'a>> {
842 type Lifted = &'tcx Slice<ExistentialPredicate<'tcx>>;
843 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>)
844 -> Option<&'tcx Slice<ExistentialPredicate<'tcx>>> {
846 return Some(Slice::empty());
848 if tcx.interners.arena.in_arena(*self as *const _) {
849 return Some(unsafe { mem::transmute(*self) });
851 // Also try in the global tcx if we're not that.
852 if !tcx.is_global() {
853 self.lift_to_tcx(tcx.global_tcx())
861 use super::{CtxtInterners, GlobalCtxt, TyCtxt};
867 /// Marker types used for the scoped TLS slot.
868 /// The type context cannot be used directly because the scoped TLS
869 /// in libstd doesn't allow types generic over lifetimes.
870 enum ThreadLocalGlobalCtxt {}
871 enum ThreadLocalInterners {}
874 static TLS_TCX: Cell<Option<(*const ThreadLocalGlobalCtxt,
875 *const ThreadLocalInterners)>> = Cell::new(None)
878 fn span_debug(span: syntax_pos::Span, f: &mut fmt::Formatter) -> fmt::Result {
880 write!(f, "{}", tcx.sess.codemap().span_to_string(span))
884 pub fn enter_global<'gcx, F, R>(gcx: GlobalCtxt<'gcx>, f: F) -> R
885 where F: for<'a> FnOnce(TyCtxt<'a, 'gcx, 'gcx>) -> R
887 syntax_pos::SPAN_DEBUG.with(|span_dbg| {
888 let original_span_debug = span_dbg.get();
889 span_dbg.set(span_debug);
890 let result = enter(&gcx, &gcx.global_interners, f);
891 span_dbg.set(original_span_debug);
896 pub fn enter<'a, 'gcx: 'tcx, 'tcx, F, R>(gcx: &'a GlobalCtxt<'gcx>,
897 interners: &'a CtxtInterners<'tcx>,
899 where F: FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R
901 let gcx_ptr = gcx as *const _ as *const ThreadLocalGlobalCtxt;
902 let interners_ptr = interners as *const _ as *const ThreadLocalInterners;
904 let prev = tls.get();
905 tls.set(Some((gcx_ptr, interners_ptr)));
915 pub fn with<F, R>(f: F) -> R
916 where F: for<'a, 'gcx, 'tcx> FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R
919 let (gcx, interners) = tcx.get().unwrap();
920 let gcx = unsafe { &*(gcx as *const GlobalCtxt) };
921 let interners = unsafe { &*(interners as *const CtxtInterners) };
929 pub fn with_opt<F, R>(f: F) -> R
930 where F: for<'a, 'gcx, 'tcx> FnOnce(Option<TyCtxt<'a, 'gcx, 'tcx>>) -> R
932 if TLS_TCX.with(|tcx| tcx.get().is_some()) {
940 macro_rules! sty_debug_print {
941 ($ctxt: expr, $($variant: ident),*) => {{
942 // curious inner module to allow variant names to be used as
944 #[allow(non_snake_case)]
946 use ty::{self, TyCtxt};
947 use ty::context::Interned;
949 #[derive(Copy, Clone)]
957 pub fn go(tcx: TyCtxt) {
958 let mut total = DebugStat {
960 region_infer: 0, ty_infer: 0, both_infer: 0,
962 $(let mut $variant = total;)*
965 for &Interned(t) in tcx.interners.type_.borrow().iter() {
966 let variant = match t.sty {
967 ty::TyBool | ty::TyChar | ty::TyInt(..) | ty::TyUint(..) |
968 ty::TyFloat(..) | ty::TyStr | ty::TyNever => continue,
969 ty::TyError => /* unimportant */ continue,
970 $(ty::$variant(..) => &mut $variant,)*
972 let region = t.flags.get().intersects(ty::TypeFlags::HAS_RE_INFER);
973 let ty = t.flags.get().intersects(ty::TypeFlags::HAS_TY_INFER);
977 if region { total.region_infer += 1; variant.region_infer += 1 }
978 if ty { total.ty_infer += 1; variant.ty_infer += 1 }
979 if region && ty { total.both_infer += 1; variant.both_infer += 1 }
981 println!("Ty interner total ty region both");
982 $(println!(" {:18}: {uses:6} {usespc:4.1}%, \
983 {ty:4.1}% {region:5.1}% {both:4.1}%",
984 stringify!($variant),
985 uses = $variant.total,
986 usespc = $variant.total as f64 * 100.0 / total.total as f64,
987 ty = $variant.ty_infer as f64 * 100.0 / total.total as f64,
988 region = $variant.region_infer as f64 * 100.0 / total.total as f64,
989 both = $variant.both_infer as f64 * 100.0 / total.total as f64);
991 println!(" total {uses:6} \
992 {ty:4.1}% {region:5.1}% {both:4.1}%",
994 ty = total.ty_infer as f64 * 100.0 / total.total as f64,
995 region = total.region_infer as f64 * 100.0 / total.total as f64,
996 both = total.both_infer as f64 * 100.0 / total.total as f64)
1004 impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
1005 pub fn print_debug_stats(self) {
1008 TyAdt, TyArray, TySlice, TyRawPtr, TyRef, TyFnDef, TyFnPtr,
1009 TyDynamic, TyClosure, TyTuple, TyParam, TyInfer, TyProjection, TyAnon);
1011 println!("Substs interner: #{}", self.interners.substs.borrow().len());
1012 println!("Region interner: #{}", self.interners.region.borrow().len());
1013 println!("Stability interner: #{}", self.stability_interner.borrow().len());
1014 println!("Layout interner: #{}", self.layout_interner.borrow().len());
1019 /// An entry in an interner.
1020 struct Interned<'tcx, T: 'tcx+?Sized>(&'tcx T);
1022 // NB: An Interned<Ty> compares and hashes as a sty.
1023 impl<'tcx> PartialEq for Interned<'tcx, TyS<'tcx>> {
1024 fn eq(&self, other: &Interned<'tcx, TyS<'tcx>>) -> bool {
1025 self.0.sty == other.0.sty
1029 impl<'tcx> Eq for Interned<'tcx, TyS<'tcx>> {}
1031 impl<'tcx> Hash for Interned<'tcx, TyS<'tcx>> {
1032 fn hash<H: Hasher>(&self, s: &mut H) {
1037 impl<'tcx: 'lcx, 'lcx> Borrow<TypeVariants<'lcx>> for Interned<'tcx, TyS<'tcx>> {
1038 fn borrow<'a>(&'a self) -> &'a TypeVariants<'lcx> {
1043 // NB: An Interned<Slice<T>> compares and hashes as its elements.
1044 impl<'tcx, T: PartialEq> PartialEq for Interned<'tcx, Slice<T>> {
1045 fn eq(&self, other: &Interned<'tcx, Slice<T>>) -> bool {
1046 self.0[..] == other.0[..]
1050 impl<'tcx, T: Eq> Eq for Interned<'tcx, Slice<T>> {}
1052 impl<'tcx, T: Hash> Hash for Interned<'tcx, Slice<T>> {
1053 fn hash<H: Hasher>(&self, s: &mut H) {
1058 impl<'tcx: 'lcx, 'lcx> Borrow<[Ty<'lcx>]> for Interned<'tcx, Slice<Ty<'tcx>>> {
1059 fn borrow<'a>(&'a self) -> &'a [Ty<'lcx>] {
1064 impl<'tcx: 'lcx, 'lcx> Borrow<[Kind<'lcx>]> for Interned<'tcx, Substs<'tcx>> {
1065 fn borrow<'a>(&'a self) -> &'a [Kind<'lcx>] {
1070 impl<'tcx> Borrow<Region> for Interned<'tcx, Region> {
1071 fn borrow<'a>(&'a self) -> &'a Region {
1076 impl<'tcx: 'lcx, 'lcx> Borrow<[ExistentialPredicate<'lcx>]>
1077 for Interned<'tcx, Slice<ExistentialPredicate<'tcx>>> {
1078 fn borrow<'a>(&'a self) -> &'a [ExistentialPredicate<'lcx>] {
1083 macro_rules! intern_method {
1084 ($lt_tcx:tt, $name:ident: $method:ident($alloc:ty,
1085 $alloc_method:ident,
1088 $needs_infer:expr) -> $ty:ty) => {
1089 impl<'a, 'gcx, $lt_tcx> TyCtxt<'a, 'gcx, $lt_tcx> {
1090 pub fn $method(self, v: $alloc) -> &$lt_tcx $ty {
1092 let key = ($alloc_to_key)(&v);
1093 if let Some(i) = self.interners.$name.borrow().get(key) {
1096 if !self.is_global() {
1097 if let Some(i) = self.global_interners.$name.borrow().get(key) {
1103 // HACK(eddyb) Depend on flags being accurate to
1104 // determine that all contents are in the global tcx.
1105 // See comments on Lift for why we can't use that.
1106 if !($needs_infer)(&v) {
1107 if !self.is_global() {
1111 let i = ($alloc_to_ret)(self.global_interners.arena.$alloc_method(v));
1112 self.global_interners.$name.borrow_mut().insert(Interned(i));
1116 // Make sure we don't end up with inference
1117 // types/regions in the global tcx.
1118 if self.is_global() {
1119 bug!("Attempted to intern `{:?}` which contains \
1120 inference types/regions in the global type context",
1125 let i = ($alloc_to_ret)(self.interners.arena.$alloc_method(v));
1126 self.interners.$name.borrow_mut().insert(Interned(i));
1133 macro_rules! direct_interners {
1134 ($lt_tcx:tt, $($name:ident: $method:ident($needs_infer:expr) -> $ty:ty),+) => {
1135 $(impl<$lt_tcx> PartialEq for Interned<$lt_tcx, $ty> {
1136 fn eq(&self, other: &Self) -> bool {
1141 impl<$lt_tcx> Eq for Interned<$lt_tcx, $ty> {}
1143 impl<$lt_tcx> Hash for Interned<$lt_tcx, $ty> {
1144 fn hash<H: Hasher>(&self, s: &mut H) {
1149 intern_method!($lt_tcx, $name: $method($ty, alloc, |x| x, |x| x, $needs_infer) -> $ty);)+
1153 fn keep_local<'tcx, T: ty::TypeFoldable<'tcx>>(x: &T) -> bool {
1154 x.has_type_flags(ty::TypeFlags::KEEP_IN_LOCAL_TCX)
1157 direct_interners!('tcx,
1158 region: mk_region(|r| {
1160 &ty::ReVar(_) | &ty::ReSkolemized(..) => true,
1166 macro_rules! slice_interners {
1167 ($($field:ident: $method:ident($ty:ident)),+) => (
1168 $(intern_method!('tcx, $field: $method(&[$ty<'tcx>], alloc_slice, Deref::deref,
1169 |xs: &[$ty]| -> &Slice<$ty> {
1170 unsafe { mem::transmute(xs) }
1171 }, |xs: &[$ty]| xs.iter().any(keep_local)) -> Slice<$ty<'tcx>>);)+
1176 existential_predicates: _intern_existential_predicates(ExistentialPredicate),
1177 type_list: _intern_type_list(Ty),
1178 substs: _intern_substs(Kind)
1181 impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
1182 /// Create an unsafe fn ty based on a safe fn ty.
1183 pub fn safe_to_unsafe_fn_ty(self, sig: PolyFnSig<'tcx>) -> Ty<'tcx> {
1184 assert_eq!(sig.unsafety(), hir::Unsafety::Normal);
1185 self.mk_fn_ptr(sig.map_bound(|sig| ty::FnSig {
1186 unsafety: hir::Unsafety::Unsafe,
1191 // Interns a type/name combination, stores the resulting box in cx.interners,
1192 // and returns the box as cast to an unsafe ptr (see comments for Ty above).
1193 pub fn mk_ty(self, st: TypeVariants<'tcx>) -> Ty<'tcx> {
1194 let global_interners = if !self.is_global() {
1195 Some(&self.global_interners)
1199 self.interners.intern_ty(st, global_interners)
1202 pub fn mk_mach_int(self, tm: ast::IntTy) -> Ty<'tcx> {
1204 ast::IntTy::Is => self.types.isize,
1205 ast::IntTy::I8 => self.types.i8,
1206 ast::IntTy::I16 => self.types.i16,
1207 ast::IntTy::I32 => self.types.i32,
1208 ast::IntTy::I64 => self.types.i64,
1209 ast::IntTy::I128 => self.types.i128,
1213 pub fn mk_mach_uint(self, tm: ast::UintTy) -> Ty<'tcx> {
1215 ast::UintTy::Us => self.types.usize,
1216 ast::UintTy::U8 => self.types.u8,
1217 ast::UintTy::U16 => self.types.u16,
1218 ast::UintTy::U32 => self.types.u32,
1219 ast::UintTy::U64 => self.types.u64,
1220 ast::UintTy::U128 => self.types.u128,
1224 pub fn mk_mach_float(self, tm: ast::FloatTy) -> Ty<'tcx> {
1226 ast::FloatTy::F32 => self.types.f32,
1227 ast::FloatTy::F64 => self.types.f64,
1231 pub fn mk_str(self) -> Ty<'tcx> {
1235 pub fn mk_static_str(self) -> Ty<'tcx> {
1236 self.mk_imm_ref(self.mk_region(ty::ReStatic), self.mk_str())
1239 pub fn mk_adt(self, def: &'tcx AdtDef, substs: &'tcx Substs<'tcx>) -> Ty<'tcx> {
1240 // take a copy of substs so that we own the vectors inside
1241 self.mk_ty(TyAdt(def, substs))
1244 pub fn mk_box(self, ty: Ty<'tcx>) -> Ty<'tcx> {
1245 let def_id = self.require_lang_item(lang_items::OwnedBoxLangItem);
1246 let adt_def = self.lookup_adt_def(def_id);
1247 let substs = self.mk_substs(iter::once(Kind::from(ty)));
1248 self.mk_ty(TyAdt(adt_def, substs))
1251 pub fn mk_ptr(self, tm: TypeAndMut<'tcx>) -> Ty<'tcx> {
1252 self.mk_ty(TyRawPtr(tm))
1255 pub fn mk_ref(self, r: &'tcx Region, tm: TypeAndMut<'tcx>) -> Ty<'tcx> {
1256 self.mk_ty(TyRef(r, tm))
1259 pub fn mk_mut_ref(self, r: &'tcx Region, ty: Ty<'tcx>) -> Ty<'tcx> {
1260 self.mk_ref(r, TypeAndMut {ty: ty, mutbl: hir::MutMutable})
1263 pub fn mk_imm_ref(self, r: &'tcx Region, ty: Ty<'tcx>) -> Ty<'tcx> {
1264 self.mk_ref(r, TypeAndMut {ty: ty, mutbl: hir::MutImmutable})
1267 pub fn mk_mut_ptr(self, ty: Ty<'tcx>) -> Ty<'tcx> {
1268 self.mk_ptr(TypeAndMut {ty: ty, mutbl: hir::MutMutable})
1271 pub fn mk_imm_ptr(self, ty: Ty<'tcx>) -> Ty<'tcx> {
1272 self.mk_ptr(TypeAndMut {ty: ty, mutbl: hir::MutImmutable})
1275 pub fn mk_nil_ptr(self) -> Ty<'tcx> {
1276 self.mk_imm_ptr(self.mk_nil())
1279 pub fn mk_array(self, ty: Ty<'tcx>, n: usize) -> Ty<'tcx> {
1280 self.mk_ty(TyArray(ty, n))
1283 pub fn mk_slice(self, ty: Ty<'tcx>) -> Ty<'tcx> {
1284 self.mk_ty(TySlice(ty))
1287 pub fn intern_tup(self, ts: &[Ty<'tcx>], defaulted: bool) -> Ty<'tcx> {
1288 self.mk_ty(TyTuple(self.intern_type_list(ts), defaulted))
1291 pub fn mk_tup<I: InternAs<[Ty<'tcx>], Ty<'tcx>>>(self, iter: I,
1292 defaulted: bool) -> I::Output {
1293 iter.intern_with(|ts| self.mk_ty(TyTuple(self.intern_type_list(ts), defaulted)))
1296 pub fn mk_nil(self) -> Ty<'tcx> {
1297 self.intern_tup(&[], false)
1300 pub fn mk_diverging_default(self) -> Ty<'tcx> {
1301 if self.sess.features.borrow().never_type {
1304 self.intern_tup(&[], true)
1308 pub fn mk_bool(self) -> Ty<'tcx> {
1312 pub fn mk_fn_def(self, def_id: DefId,
1313 substs: &'tcx Substs<'tcx>,
1314 fty: PolyFnSig<'tcx>) -> Ty<'tcx> {
1315 self.mk_ty(TyFnDef(def_id, substs, fty))
1318 pub fn mk_fn_ptr(self, fty: PolyFnSig<'tcx>) -> Ty<'tcx> {
1319 self.mk_ty(TyFnPtr(fty))
1324 obj: ty::Binder<&'tcx Slice<ExistentialPredicate<'tcx>>>,
1325 reg: &'tcx ty::Region
1327 self.mk_ty(TyDynamic(obj, reg))
1330 pub fn mk_projection(self,
1331 trait_ref: TraitRef<'tcx>,
1334 // take a copy of substs so that we own the vectors inside
1335 let inner = ProjectionTy { trait_ref: trait_ref, item_name: item_name };
1336 self.mk_ty(TyProjection(inner))
1339 pub fn mk_closure(self,
1341 substs: &'tcx Substs<'tcx>)
1343 self.mk_closure_from_closure_substs(closure_id, ClosureSubsts {
1348 pub fn mk_closure_from_closure_substs(self,
1350 closure_substs: ClosureSubsts<'tcx>)
1352 self.mk_ty(TyClosure(closure_id, closure_substs))
1355 pub fn mk_var(self, v: TyVid) -> Ty<'tcx> {
1356 self.mk_infer(TyVar(v))
1359 pub fn mk_int_var(self, v: IntVid) -> Ty<'tcx> {
1360 self.mk_infer(IntVar(v))
1363 pub fn mk_float_var(self, v: FloatVid) -> Ty<'tcx> {
1364 self.mk_infer(FloatVar(v))
1367 pub fn mk_infer(self, it: InferTy) -> Ty<'tcx> {
1368 self.mk_ty(TyInfer(it))
1371 pub fn mk_param(self,
1373 name: Name) -> Ty<'tcx> {
1374 self.mk_ty(TyParam(ParamTy { idx: index, name: name }))
1377 pub fn mk_self_type(self) -> Ty<'tcx> {
1378 self.mk_param(0, keywords::SelfType.name())
1381 pub fn mk_param_from_def(self, def: &ty::TypeParameterDef) -> Ty<'tcx> {
1382 self.mk_param(def.index, def.name)
1385 pub fn mk_anon(self, def_id: DefId, substs: &'tcx Substs<'tcx>) -> Ty<'tcx> {
1386 self.mk_ty(TyAnon(def_id, substs))
1389 pub fn intern_existential_predicates(self, eps: &[ExistentialPredicate<'tcx>])
1390 -> &'tcx Slice<ExistentialPredicate<'tcx>> {
1391 assert!(!eps.is_empty());
1392 assert!(eps.windows(2).all(|w| w[0].cmp(self, &w[1]) != Ordering::Greater));
1393 self._intern_existential_predicates(eps)
1396 pub fn intern_type_list(self, ts: &[Ty<'tcx>]) -> &'tcx Slice<Ty<'tcx>> {
1400 self._intern_type_list(ts)
1404 pub fn intern_substs(self, ts: &[Kind<'tcx>]) -> &'tcx Slice<Kind<'tcx>> {
1408 self._intern_substs(ts)
1412 pub fn mk_fn_sig<I>(self,
1416 unsafety: hir::Unsafety,
1418 -> <I::Item as InternIteratorElement<Ty<'tcx>, ty::FnSig<'tcx>>>::Output
1420 I::Item: InternIteratorElement<Ty<'tcx>, ty::FnSig<'tcx>>
1422 inputs.chain(iter::once(output)).intern_with(|xs| ty::FnSig {
1423 inputs_and_output: self.intern_type_list(xs),
1424 variadic, unsafety, abi
1428 pub fn mk_existential_predicates<I: InternAs<[ExistentialPredicate<'tcx>],
1429 &'tcx Slice<ExistentialPredicate<'tcx>>>>(self, iter: I)
1431 iter.intern_with(|xs| self.intern_existential_predicates(xs))
1434 pub fn mk_type_list<I: InternAs<[Ty<'tcx>],
1435 &'tcx Slice<Ty<'tcx>>>>(self, iter: I) -> I::Output {
1436 iter.intern_with(|xs| self.intern_type_list(xs))
1439 pub fn mk_substs<I: InternAs<[Kind<'tcx>],
1440 &'tcx Slice<Kind<'tcx>>>>(self, iter: I) -> I::Output {
1441 iter.intern_with(|xs| self.intern_substs(xs))
1444 pub fn mk_substs_trait(self,
1447 -> &'tcx Substs<'tcx>
1449 self.mk_substs(iter::once(s).chain(t.into_iter().cloned()).map(Kind::from))
1453 pub trait InternAs<T: ?Sized, R> {
1455 fn intern_with<F>(self, F) -> Self::Output
1456 where F: FnOnce(&T) -> R;
1459 impl<I, T, R, E> InternAs<[T], R> for I
1460 where E: InternIteratorElement<T, R>,
1461 I: Iterator<Item=E> {
1462 type Output = E::Output;
1463 fn intern_with<F>(self, f: F) -> Self::Output
1464 where F: FnOnce(&[T]) -> R {
1465 E::intern_with(self, f)
1469 pub trait InternIteratorElement<T, R>: Sized {
1471 fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output;
1474 impl<T, R> InternIteratorElement<T, R> for T {
1476 fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output {
1477 f(&iter.collect::<AccumulateVec<[_; 8]>>())
1481 impl<'a, T, R> InternIteratorElement<T, R> for &'a T
1485 fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output {
1486 f(&iter.cloned().collect::<AccumulateVec<[_; 8]>>())
1490 impl<T, R, E> InternIteratorElement<T, R> for Result<T, E> {
1491 type Output = Result<R, E>;
1492 fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output {
1493 Ok(f(&iter.collect::<Result<AccumulateVec<[_; 8]>, _>>()?))