1 // Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! type context book-keeping
13 use dep_graph::DepGraph;
18 use hir::def::{Def, ExportMap};
19 use hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
20 use hir::map as hir_map;
21 use hir::map::DisambiguatedDefPathData;
22 use middle::free_region::FreeRegionMap;
23 use middle::lang_items;
24 use middle::resolve_lifetime;
25 use middle::stability;
27 use mir::transform::Passes;
28 use ty::subst::{Kind, Substs};
31 use ty::{self, TraitRef, Ty, TypeAndMut};
32 use ty::{TyS, TypeVariants, Slice};
33 use ty::{AdtKind, AdtDef, ClosureSubsts, Region};
35 use ty::{PolyFnSig, InferTy, ParamTy, ProjectionTy, ExistentialPredicate, Predicate};
37 use ty::{TyVar, TyVid, IntVar, IntVid, FloatVar, FloatVid};
38 use ty::TypeVariants::*;
39 use ty::layout::{Layout, TargetDataLayout};
40 use ty::inhabitedness::DefIdForest;
43 use util::nodemap::{NodeMap, NodeSet, DefIdMap, DefIdSet};
44 use util::nodemap::{FxHashMap, FxHashSet};
45 use rustc_data_structures::accumulate_vec::AccumulateVec;
47 use arena::{TypedArena, DroplessArena};
48 use rustc_data_structures::indexed_vec::IndexVec;
49 use std::borrow::Borrow;
50 use std::cell::{Cell, RefCell};
51 use std::cmp::Ordering;
52 use std::hash::{Hash, Hasher};
58 use syntax::ast::{self, Name, NodeId};
60 use syntax::symbol::{Symbol, keywords};
66 pub struct GlobalArenas<'tcx> {
68 layout: TypedArena<Layout>,
71 generics: TypedArena<ty::Generics>,
72 trait_def: TypedArena<ty::TraitDef>,
73 adt_def: TypedArena<ty::AdtDef>,
74 steal_mir: TypedArena<Steal<Mir<'tcx>>>,
75 mir: TypedArena<Mir<'tcx>>,
76 tables: TypedArena<ty::TypeckTables<'tcx>>,
79 impl<'tcx> GlobalArenas<'tcx> {
80 pub fn new() -> GlobalArenas<'tcx> {
82 layout: TypedArena::new(),
83 generics: TypedArena::new(),
84 trait_def: TypedArena::new(),
85 adt_def: TypedArena::new(),
86 steal_mir: TypedArena::new(),
87 mir: TypedArena::new(),
88 tables: TypedArena::new(),
93 pub struct CtxtInterners<'tcx> {
94 /// The arena that types, regions, etc are allocated from
95 arena: &'tcx DroplessArena,
97 /// Specifically use a speedy hash algorithm for these hash sets,
98 /// they're accessed quite often.
99 type_: RefCell<FxHashSet<Interned<'tcx, TyS<'tcx>>>>,
100 type_list: RefCell<FxHashSet<Interned<'tcx, Slice<Ty<'tcx>>>>>,
101 substs: RefCell<FxHashSet<Interned<'tcx, Substs<'tcx>>>>,
102 region: RefCell<FxHashSet<Interned<'tcx, RegionKind>>>,
103 existential_predicates: RefCell<FxHashSet<Interned<'tcx, Slice<ExistentialPredicate<'tcx>>>>>,
104 predicates: RefCell<FxHashSet<Interned<'tcx, Slice<Predicate<'tcx>>>>>,
107 impl<'gcx: 'tcx, 'tcx> CtxtInterners<'tcx> {
108 fn new(arena: &'tcx DroplessArena) -> CtxtInterners<'tcx> {
111 type_: RefCell::new(FxHashSet()),
112 type_list: RefCell::new(FxHashSet()),
113 substs: RefCell::new(FxHashSet()),
114 region: RefCell::new(FxHashSet()),
115 existential_predicates: RefCell::new(FxHashSet()),
116 predicates: RefCell::new(FxHashSet()),
120 /// Intern a type. global_interners is Some only if this is
121 /// a local interner and global_interners is its counterpart.
122 fn intern_ty(&self, st: TypeVariants<'tcx>,
123 global_interners: Option<&CtxtInterners<'gcx>>)
126 let mut interner = self.type_.borrow_mut();
127 let global_interner = global_interners.map(|interners| {
128 interners.type_.borrow_mut()
130 if let Some(&Interned(ty)) = interner.get(&st) {
133 if let Some(ref interner) = global_interner {
134 if let Some(&Interned(ty)) = interner.get(&st) {
139 let flags = super::flags::FlagComputation::for_sty(&st);
140 let ty_struct = TyS {
143 region_depth: flags.depth,
146 // HACK(eddyb) Depend on flags being accurate to
147 // determine that all contents are in the global tcx.
148 // See comments on Lift for why we can't use that.
149 if !flags.flags.intersects(ty::TypeFlags::KEEP_IN_LOCAL_TCX) {
150 if let Some(interner) = global_interners {
151 let ty_struct: TyS<'gcx> = unsafe {
152 mem::transmute(ty_struct)
154 let ty: Ty<'gcx> = interner.arena.alloc(ty_struct);
155 global_interner.unwrap().insert(Interned(ty));
159 // Make sure we don't end up with inference
160 // types/regions in the global tcx.
161 if global_interners.is_none() {
163 bug!("Attempted to intern `{:?}` which contains \
164 inference types/regions in the global type context",
169 // Don't be &mut TyS.
170 let ty: Ty<'tcx> = self.arena.alloc(ty_struct);
171 interner.insert(Interned(ty));
175 debug!("Interned type: {:?} Pointer: {:?}",
176 ty, ty as *const TyS);
182 pub struct CommonTypes<'tcx> {
202 pub re_empty: Region<'tcx>,
203 pub re_static: Region<'tcx>,
204 pub re_erased: Region<'tcx>,
207 #[derive(RustcEncodable, RustcDecodable)]
208 pub struct TypeckTables<'tcx> {
209 /// Resolved definitions for `<T>::X` associated paths.
210 pub type_relative_path_defs: NodeMap<Def>,
212 /// Stores the types for various nodes in the AST. Note that this table
213 /// is not guaranteed to be populated until after typeck. See
214 /// typeck::check::fn_ctxt for details.
215 pub node_types: NodeMap<Ty<'tcx>>,
217 /// Stores the type parameters which were substituted to obtain the type
218 /// of this node. This only applies to nodes that refer to entities
219 /// parameterized by type parameters, such as generic fns, types, or
221 pub item_substs: NodeMap<ty::ItemSubsts<'tcx>>,
223 pub adjustments: NodeMap<ty::adjustment::Adjustment<'tcx>>,
225 pub method_map: ty::MethodMap<'tcx>,
228 pub upvar_capture_map: ty::UpvarCaptureMap<'tcx>,
230 /// Records the type of each closure.
231 pub closure_tys: NodeMap<ty::PolyFnSig<'tcx>>,
233 /// Records the kind of each closure and the span of the variable that
234 /// cause the closure to be this kind.
235 pub closure_kinds: NodeMap<(ty::ClosureKind, Option<Span>)>,
237 /// For each fn, records the "liberated" types of its arguments
238 /// and return type. Liberated means that all bound regions
239 /// (including late-bound regions) are replaced with free
240 /// equivalents. This table is not used in trans (since regions
241 /// are erased there) and hence is not serialized to metadata.
242 pub liberated_fn_sigs: NodeMap<ty::FnSig<'tcx>>,
244 /// For each FRU expression, record the normalized types of the fields
245 /// of the struct - this is needed because it is non-trivial to
246 /// normalize while preserving regions. This table is used only in
247 /// MIR construction and hence is not serialized to metadata.
248 pub fru_field_types: NodeMap<Vec<Ty<'tcx>>>,
250 /// Maps a cast expression to its kind. This is keyed on the
251 /// *from* expression of the cast, not the cast itself.
252 pub cast_kinds: NodeMap<ty::cast::CastKind>,
254 /// Lints for the body of this fn generated by typeck.
255 pub lints: lint::LintTable,
257 /// Set of trait imports actually used in the method resolution.
258 /// This is used for warning unused imports.
259 pub used_trait_imports: DefIdSet,
261 /// If any errors occurred while type-checking this body,
262 /// this field will be set to `true`.
263 pub tainted_by_errors: bool,
265 /// Stores the free-region relationships that were deduced from
266 /// its where clauses and parameter types. These are then
267 /// read-again by borrowck.
268 pub free_region_map: FreeRegionMap<'tcx>,
271 impl<'tcx> TypeckTables<'tcx> {
272 pub fn empty() -> TypeckTables<'tcx> {
274 type_relative_path_defs: NodeMap(),
275 node_types: FxHashMap(),
276 item_substs: NodeMap(),
277 adjustments: NodeMap(),
278 method_map: FxHashMap(),
279 upvar_capture_map: FxHashMap(),
280 closure_tys: NodeMap(),
281 closure_kinds: NodeMap(),
282 liberated_fn_sigs: NodeMap(),
283 fru_field_types: NodeMap(),
284 cast_kinds: NodeMap(),
285 lints: lint::LintTable::new(),
286 used_trait_imports: DefIdSet(),
287 tainted_by_errors: false,
288 free_region_map: FreeRegionMap::new(),
292 /// Returns the final resolution of a `QPath` in an `Expr` or `Pat` node.
293 pub fn qpath_def(&self, qpath: &hir::QPath, id: NodeId) -> Def {
295 hir::QPath::Resolved(_, ref path) => path.def,
296 hir::QPath::TypeRelative(..) => {
297 self.type_relative_path_defs.get(&id).cloned().unwrap_or(Def::Err)
302 pub fn node_id_to_type(&self, id: NodeId) -> Ty<'tcx> {
303 match self.node_id_to_type_opt(id) {
306 bug!("node_id_to_type: no type for node `{}`",
307 tls::with(|tcx| tcx.hir.node_to_string(id)))
312 pub fn node_id_to_type_opt(&self, id: NodeId) -> Option<Ty<'tcx>> {
313 self.node_types.get(&id).cloned()
316 pub fn node_id_item_substs(&self, id: NodeId) -> Option<&'tcx Substs<'tcx>> {
317 self.item_substs.get(&id).map(|ts| ts.substs)
320 // Returns the type of a pattern as a monotype. Like @expr_ty, this function
321 // doesn't provide type parameter substitutions.
322 pub fn pat_ty(&self, pat: &hir::Pat) -> Ty<'tcx> {
323 self.node_id_to_type(pat.id)
326 pub fn pat_ty_opt(&self, pat: &hir::Pat) -> Option<Ty<'tcx>> {
327 self.node_id_to_type_opt(pat.id)
330 // Returns the type of an expression as a monotype.
332 // NB (1): This is the PRE-ADJUSTMENT TYPE for the expression. That is, in
333 // some cases, we insert `Adjustment` annotations such as auto-deref or
334 // auto-ref. The type returned by this function does not consider such
335 // adjustments. See `expr_ty_adjusted()` instead.
337 // NB (2): This type doesn't provide type parameter substitutions; e.g. if you
338 // ask for the type of "id" in "id(3)", it will return "fn(&isize) -> isize"
339 // instead of "fn(ty) -> T with T = isize".
340 pub fn expr_ty(&self, expr: &hir::Expr) -> Ty<'tcx> {
341 self.node_id_to_type(expr.id)
344 pub fn expr_ty_opt(&self, expr: &hir::Expr) -> Option<Ty<'tcx>> {
345 self.node_id_to_type_opt(expr.id)
348 /// Returns the type of `expr`, considering any `Adjustment`
349 /// entry recorded for that expression.
350 pub fn expr_ty_adjusted(&self, expr: &hir::Expr) -> Ty<'tcx> {
351 self.adjustments.get(&expr.id)
352 .map_or_else(|| self.expr_ty(expr), |adj| adj.target)
355 pub fn expr_ty_adjusted_opt(&self, expr: &hir::Expr) -> Option<Ty<'tcx>> {
356 self.adjustments.get(&expr.id)
357 .map(|adj| adj.target).or_else(|| self.expr_ty_opt(expr))
360 pub fn is_method_call(&self, expr_id: NodeId) -> bool {
361 self.method_map.contains_key(&ty::MethodCall::expr(expr_id))
364 pub fn is_overloaded_autoderef(&self, expr_id: NodeId, autoderefs: u32) -> bool {
365 self.method_map.contains_key(&ty::MethodCall::autoderef(expr_id, autoderefs))
368 pub fn upvar_capture(&self, upvar_id: ty::UpvarId) -> Option<ty::UpvarCapture<'tcx>> {
369 Some(self.upvar_capture_map.get(&upvar_id).unwrap().clone())
373 impl<'tcx> CommonTypes<'tcx> {
374 fn new(interners: &CtxtInterners<'tcx>) -> CommonTypes<'tcx> {
375 let mk = |sty| interners.intern_ty(sty, None);
376 let mk_region = |r| {
377 if let Some(r) = interners.region.borrow().get(&r) {
380 let r = interners.arena.alloc(r);
381 interners.region.borrow_mut().insert(Interned(r));
389 isize: mk(TyInt(ast::IntTy::Is)),
390 i8: mk(TyInt(ast::IntTy::I8)),
391 i16: mk(TyInt(ast::IntTy::I16)),
392 i32: mk(TyInt(ast::IntTy::I32)),
393 i64: mk(TyInt(ast::IntTy::I64)),
394 i128: mk(TyInt(ast::IntTy::I128)),
395 usize: mk(TyUint(ast::UintTy::Us)),
396 u8: mk(TyUint(ast::UintTy::U8)),
397 u16: mk(TyUint(ast::UintTy::U16)),
398 u32: mk(TyUint(ast::UintTy::U32)),
399 u64: mk(TyUint(ast::UintTy::U64)),
400 u128: mk(TyUint(ast::UintTy::U128)),
401 f32: mk(TyFloat(ast::FloatTy::F32)),
402 f64: mk(TyFloat(ast::FloatTy::F64)),
404 re_empty: mk_region(RegionKind::ReEmpty),
405 re_static: mk_region(RegionKind::ReStatic),
406 re_erased: mk_region(RegionKind::ReErased),
411 /// The data structure to keep track of all the information that typechecker
412 /// generates so that so that it can be reused and doesn't have to be redone
414 #[derive(Copy, Clone)]
415 pub struct TyCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
416 gcx: &'a GlobalCtxt<'gcx>,
417 interners: &'a CtxtInterners<'tcx>
420 impl<'a, 'gcx, 'tcx> Deref for TyCtxt<'a, 'gcx, 'tcx> {
421 type Target = &'a GlobalCtxt<'gcx>;
422 fn deref(&self) -> &Self::Target {
427 pub struct GlobalCtxt<'tcx> {
428 global_arenas: &'tcx GlobalArenas<'tcx>,
429 global_interners: CtxtInterners<'tcx>,
431 pub sess: &'tcx Session,
433 pub specializes_cache: RefCell<traits::SpecializesCache>,
435 pub trans_trait_caches: traits::trans::TransTraitCaches<'tcx>,
437 pub dep_graph: DepGraph,
439 /// Common types, pre-interned for your convenience.
440 pub types: CommonTypes<'tcx>,
442 /// Map indicating what traits are in scope for places where this
443 /// is relevant; generated by resolve.
444 pub trait_map: TraitMap,
446 /// Export map produced by name resolution.
447 pub export_map: ExportMap,
449 pub named_region_map: resolve_lifetime::NamedRegionMap,
451 pub hir: hir_map::Map<'tcx>,
453 pub maps: maps::Maps<'tcx>,
455 pub mir_passes: Rc<Passes>,
457 // Records the free variables refrenced by every closure
458 // expression. Do not track deps for this, just recompute it from
459 // scratch every time.
460 pub freevars: RefCell<FreevarMap>,
462 pub maybe_unused_trait_imports: NodeSet,
464 // Internal cache for metadata decoding. No need to track deps on this.
465 pub rcache: RefCell<FxHashMap<ty::CReaderCacheKey, Ty<'tcx>>>,
467 // FIXME dep tracking -- should be harmless enough
468 pub normalized_cache: RefCell<FxHashMap<Ty<'tcx>, Ty<'tcx>>>,
470 pub inhabitedness_cache: RefCell<FxHashMap<Ty<'tcx>, DefIdForest>>,
472 pub lang_items: middle::lang_items::LanguageItems,
474 /// Set of used unsafe nodes (functions or blocks). Unsafe nodes not
475 /// present in this set can be warned about.
476 pub used_unsafe: RefCell<NodeSet>,
478 /// Set of nodes which mark locals as mutable which end up getting used at
479 /// some point. Local variable definitions not in this set can be warned
481 pub used_mut_nodes: RefCell<NodeSet>,
483 /// Maps any item's def-id to its stability index.
484 pub stability: RefCell<stability::Index<'tcx>>,
486 /// Caches the results of trait selection. This cache is used
487 /// for things that do not have to do with the parameters in scope.
488 pub selection_cache: traits::SelectionCache<'tcx>,
490 /// Caches the results of trait evaluation. This cache is used
491 /// for things that do not have to do with the parameters in scope.
492 /// Merge this with `selection_cache`?
493 pub evaluation_cache: traits::EvaluationCache<'tcx>,
495 /// A set of predicates that have been fulfilled *somewhere*.
496 /// This is used to avoid duplicate work. Predicates are only
497 /// added to this set when they mention only "global" names
498 /// (i.e., no type or lifetime parameters).
499 pub fulfilled_predicates: RefCell<traits::GlobalFulfilledPredicates<'tcx>>,
501 /// Maps Expr NodeId's to `true` iff `&expr` can have 'static lifetime.
502 pub rvalue_promotable_to_static: RefCell<NodeMap<bool>>,
504 /// Maps Fn items to a collection of fragment infos.
506 /// The main goal is to identify data (each of which may be moved
507 /// or assigned) whose subparts are not moved nor assigned
508 /// (i.e. their state is *unfragmented*) and corresponding ast
509 /// nodes where the path to that data is moved or assigned.
511 /// In the long term, unfragmented values will have their
512 /// destructor entirely driven by a single stack-local drop-flag,
513 /// and their parents, the collections of the unfragmented values
514 /// (or more simply, "fragmented values"), are mapped to the
515 /// corresponding collections of stack-local drop-flags.
517 /// (However, in the short term that is not the case; e.g. some
518 /// unfragmented paths still need to be zeroed, namely when they
519 /// reference parent data from an outer scope that was not
520 /// entirely moved, and therefore that needs to be zeroed so that
521 /// we do not get double-drop when we hit the end of the parent
524 /// Also: currently the table solely holds keys for node-ids of
525 /// unfragmented values (see `FragmentInfo` enum definition), but
526 /// longer-term we will need to also store mappings from
527 /// fragmented data to the set of unfragmented pieces that
529 pub fragment_infos: RefCell<DefIdMap<Vec<ty::FragmentInfo>>>,
531 /// The definite name of the current crate after taking into account
532 /// attributes, commandline parameters, etc.
533 pub crate_name: Symbol,
535 /// Data layout specification for the current target.
536 pub data_layout: TargetDataLayout,
538 /// Cache for layouts computed from types.
539 pub layout_cache: RefCell<FxHashMap<Ty<'tcx>, &'tcx Layout>>,
541 /// Used to prevent layout from recursing too deeply.
542 pub layout_depth: Cell<usize>,
544 /// Map from function to the `#[derive]` mode that it's defining. Only used
545 /// by `proc-macro` crates.
546 pub derive_macros: RefCell<NodeMap<Symbol>>,
548 stability_interner: RefCell<FxHashSet<&'tcx attr::Stability>>,
550 layout_interner: RefCell<FxHashSet<&'tcx Layout>>,
552 /// A vector of every trait accessible in the whole crate
553 /// (i.e. including those from subcrates). This is used only for
554 /// error reporting, and so is lazily initialised and generally
555 /// shouldn't taint the common path (hence the RefCell).
556 pub all_traits: RefCell<Option<Vec<DefId>>>,
559 impl<'tcx> GlobalCtxt<'tcx> {
560 /// Get the global TyCtxt.
561 pub fn global_tcx<'a>(&'a self) -> TyCtxt<'a, 'tcx, 'tcx> {
564 interners: &self.global_interners
569 impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
570 pub fn crate_name(self, cnum: CrateNum) -> Symbol {
571 if cnum == LOCAL_CRATE {
574 self.sess.cstore.crate_name(cnum)
578 pub fn original_crate_name(self, cnum: CrateNum) -> Symbol {
579 if cnum == LOCAL_CRATE {
580 self.crate_name.clone()
582 self.sess.cstore.original_crate_name(cnum)
586 pub fn crate_disambiguator(self, cnum: CrateNum) -> Symbol {
587 if cnum == LOCAL_CRATE {
588 self.sess.local_crate_disambiguator()
590 self.sess.cstore.crate_disambiguator(cnum)
594 pub fn retrace_path(self,
596 path_data: &[DisambiguatedDefPathData])
598 debug!("retrace_path(path={:?}, krate={:?})", path_data, self.crate_name(krate));
600 if krate == LOCAL_CRATE {
604 .retrace_path(path_data)
605 .map(|def_index| DefId { krate: krate, index: def_index })
607 self.sess.cstore.retrace_path(krate, path_data)
611 pub fn alloc_generics(self, generics: ty::Generics) -> &'gcx ty::Generics {
612 self.global_arenas.generics.alloc(generics)
615 pub fn alloc_steal_mir(self, mir: Mir<'gcx>) -> &'gcx Steal<Mir<'gcx>> {
616 self.global_arenas.steal_mir.alloc(Steal::new(mir))
619 pub fn alloc_mir(self, mir: Mir<'gcx>) -> &'gcx Mir<'gcx> {
620 self.global_arenas.mir.alloc(mir)
623 pub fn alloc_tables(self, tables: ty::TypeckTables<'gcx>) -> &'gcx ty::TypeckTables<'gcx> {
624 self.global_arenas.tables.alloc(tables)
627 pub fn alloc_trait_def(self, def: ty::TraitDef) -> &'gcx ty::TraitDef {
628 self.global_arenas.trait_def.alloc(def)
631 pub fn alloc_adt_def(self,
634 variants: Vec<ty::VariantDef>,
636 -> &'gcx ty::AdtDef {
637 let def = ty::AdtDef::new(self, did, kind, variants, repr);
638 self.global_arenas.adt_def.alloc(def)
641 pub fn intern_stability(self, stab: attr::Stability) -> &'gcx attr::Stability {
642 if let Some(st) = self.stability_interner.borrow().get(&stab) {
646 let interned = self.global_interners.arena.alloc(stab);
647 if let Some(prev) = self.stability_interner.borrow_mut().replace(interned) {
648 bug!("Tried to overwrite interned Stability: {:?}", prev)
653 pub fn intern_layout(self, layout: Layout) -> &'gcx Layout {
654 if let Some(layout) = self.layout_interner.borrow().get(&layout) {
658 let interned = self.global_arenas.layout.alloc(layout);
659 if let Some(prev) = self.layout_interner.borrow_mut().replace(interned) {
660 bug!("Tried to overwrite interned Layout: {:?}", prev)
665 pub fn lift<T: ?Sized + Lift<'tcx>>(self, value: &T) -> Option<T::Lifted> {
666 value.lift_to_tcx(self)
669 /// Like lift, but only tries in the global tcx.
670 pub fn lift_to_global<T: ?Sized + Lift<'gcx>>(self, value: &T) -> Option<T::Lifted> {
671 value.lift_to_tcx(self.global_tcx())
674 /// Returns true if self is the same as self.global_tcx().
675 fn is_global(self) -> bool {
676 let local = self.interners as *const _;
677 let global = &self.global_interners as *const _;
678 local as usize == global as usize
681 /// Create a type context and call the closure with a `TyCtxt` reference
682 /// to the context. The closure enforces that the type context and any interned
683 /// value (types, substs, etc.) can only be used while `ty::tls` has a valid
684 /// reference to the context, to allow formatting values that need it.
685 pub fn create_and_enter<F, R>(s: &'tcx Session,
686 local_providers: ty::maps::Providers<'tcx>,
687 extern_providers: ty::maps::Providers<'tcx>,
688 mir_passes: Rc<Passes>,
689 arenas: &'tcx GlobalArenas<'tcx>,
690 arena: &'tcx DroplessArena,
691 resolutions: ty::Resolutions,
692 named_region_map: resolve_lifetime::NamedRegionMap,
693 hir: hir_map::Map<'tcx>,
694 lang_items: middle::lang_items::LanguageItems,
695 stability: stability::Index<'tcx>,
698 where F: for<'b> FnOnce(TyCtxt<'b, 'tcx, 'tcx>) -> R
700 let data_layout = TargetDataLayout::parse(s);
701 let interners = CtxtInterners::new(arena);
702 let common_types = CommonTypes::new(&interners);
703 let dep_graph = hir.dep_graph.clone();
704 let fulfilled_predicates = traits::GlobalFulfilledPredicates::new(dep_graph.clone());
705 let max_cnum = s.cstore.crates().iter().map(|c| c.as_usize()).max().unwrap_or(0);
706 let mut providers = IndexVec::from_elem_n(extern_providers, max_cnum + 1);
707 providers[LOCAL_CRATE] = local_providers;
708 tls::enter_global(GlobalCtxt {
710 trans_trait_caches: traits::trans::TransTraitCaches::new(dep_graph.clone()),
711 specializes_cache: RefCell::new(traits::SpecializesCache::new()),
712 global_arenas: arenas,
713 global_interners: interners,
714 dep_graph: dep_graph.clone(),
716 named_region_map: named_region_map,
717 trait_map: resolutions.trait_map,
718 export_map: resolutions.export_map,
719 fulfilled_predicates: RefCell::new(fulfilled_predicates),
721 maps: maps::Maps::new(dep_graph, providers),
723 freevars: RefCell::new(resolutions.freevars),
724 maybe_unused_trait_imports: resolutions.maybe_unused_trait_imports,
725 rcache: RefCell::new(FxHashMap()),
726 normalized_cache: RefCell::new(FxHashMap()),
727 inhabitedness_cache: RefCell::new(FxHashMap()),
728 lang_items: lang_items,
729 used_unsafe: RefCell::new(NodeSet()),
730 used_mut_nodes: RefCell::new(NodeSet()),
731 stability: RefCell::new(stability),
732 selection_cache: traits::SelectionCache::new(),
733 evaluation_cache: traits::EvaluationCache::new(),
734 rvalue_promotable_to_static: RefCell::new(NodeMap()),
735 fragment_infos: RefCell::new(DefIdMap()),
736 crate_name: Symbol::intern(crate_name),
737 data_layout: data_layout,
738 layout_cache: RefCell::new(FxHashMap()),
739 layout_interner: RefCell::new(FxHashSet()),
740 layout_depth: Cell::new(0),
741 derive_macros: RefCell::new(NodeMap()),
742 stability_interner: RefCell::new(FxHashSet()),
743 all_traits: RefCell::new(None),
747 pub fn consider_optimizing<T: Fn() -> String>(&self, msg: T) -> bool {
748 let cname = self.crate_name(LOCAL_CRATE).as_str();
749 self.sess.consider_optimizing(&cname, msg)
753 impl<'gcx: 'tcx, 'tcx> GlobalCtxt<'gcx> {
754 /// Call the closure with a local `TyCtxt` using the given arena.
755 pub fn enter_local<F, R>(&self, arena: &'tcx DroplessArena, f: F) -> R
756 where F: for<'a> FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R
758 let interners = CtxtInterners::new(arena);
759 tls::enter(self, &interners, f)
763 /// A trait implemented for all X<'a> types which can be safely and
764 /// efficiently converted to X<'tcx> as long as they are part of the
765 /// provided TyCtxt<'tcx>.
766 /// This can be done, for example, for Ty<'tcx> or &'tcx Substs<'tcx>
767 /// by looking them up in their respective interners.
769 /// However, this is still not the best implementation as it does
770 /// need to compare the components, even for interned values.
771 /// It would be more efficient if TypedArena provided a way to
772 /// determine whether the address is in the allocated range.
774 /// None is returned if the value or one of the components is not part
775 /// of the provided context.
776 /// For Ty, None can be returned if either the type interner doesn't
777 /// contain the TypeVariants key or if the address of the interned
778 /// pointer differs. The latter case is possible if a primitive type,
779 /// e.g. `()` or `u8`, was interned in a different context.
780 pub trait Lift<'tcx> {
782 fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option<Self::Lifted>;
785 impl<'a, 'tcx> Lift<'tcx> for Ty<'a> {
786 type Lifted = Ty<'tcx>;
787 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Ty<'tcx>> {
788 if tcx.interners.arena.in_arena(*self as *const _) {
789 return Some(unsafe { mem::transmute(*self) });
791 // Also try in the global tcx if we're not that.
792 if !tcx.is_global() {
793 self.lift_to_tcx(tcx.global_tcx())
800 impl<'a, 'tcx> Lift<'tcx> for &'a Substs<'a> {
801 type Lifted = &'tcx Substs<'tcx>;
802 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<&'tcx Substs<'tcx>> {
804 return Some(Slice::empty());
806 if tcx.interners.arena.in_arena(&self[..] as *const _) {
807 return Some(unsafe { mem::transmute(*self) });
809 // Also try in the global tcx if we're not that.
810 if !tcx.is_global() {
811 self.lift_to_tcx(tcx.global_tcx())
818 impl<'a, 'tcx> Lift<'tcx> for Region<'a> {
819 type Lifted = Region<'tcx>;
820 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Region<'tcx>> {
821 if tcx.interners.arena.in_arena(*self as *const _) {
822 return Some(unsafe { mem::transmute(*self) });
824 // Also try in the global tcx if we're not that.
825 if !tcx.is_global() {
826 self.lift_to_tcx(tcx.global_tcx())
833 impl<'a, 'tcx> Lift<'tcx> for &'a Slice<Ty<'a>> {
834 type Lifted = &'tcx Slice<Ty<'tcx>>;
835 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>)
836 -> Option<&'tcx Slice<Ty<'tcx>>> {
838 return Some(Slice::empty());
840 if tcx.interners.arena.in_arena(*self as *const _) {
841 return Some(unsafe { mem::transmute(*self) });
843 // Also try in the global tcx if we're not that.
844 if !tcx.is_global() {
845 self.lift_to_tcx(tcx.global_tcx())
852 impl<'a, 'tcx> Lift<'tcx> for &'a Slice<ExistentialPredicate<'a>> {
853 type Lifted = &'tcx Slice<ExistentialPredicate<'tcx>>;
854 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>)
855 -> Option<&'tcx Slice<ExistentialPredicate<'tcx>>> {
857 return Some(Slice::empty());
859 if tcx.interners.arena.in_arena(*self as *const _) {
860 return Some(unsafe { mem::transmute(*self) });
862 // Also try in the global tcx if we're not that.
863 if !tcx.is_global() {
864 self.lift_to_tcx(tcx.global_tcx())
872 use super::{CtxtInterners, GlobalCtxt, TyCtxt};
878 /// Marker types used for the scoped TLS slot.
879 /// The type context cannot be used directly because the scoped TLS
880 /// in libstd doesn't allow types generic over lifetimes.
881 enum ThreadLocalGlobalCtxt {}
882 enum ThreadLocalInterners {}
885 static TLS_TCX: Cell<Option<(*const ThreadLocalGlobalCtxt,
886 *const ThreadLocalInterners)>> = Cell::new(None)
889 fn span_debug(span: syntax_pos::Span, f: &mut fmt::Formatter) -> fmt::Result {
891 write!(f, "{}", tcx.sess.codemap().span_to_string(span))
895 pub fn enter_global<'gcx, F, R>(gcx: GlobalCtxt<'gcx>, f: F) -> R
896 where F: for<'a> FnOnce(TyCtxt<'a, 'gcx, 'gcx>) -> R
898 syntax_pos::SPAN_DEBUG.with(|span_dbg| {
899 let original_span_debug = span_dbg.get();
900 span_dbg.set(span_debug);
901 let result = enter(&gcx, &gcx.global_interners, f);
902 span_dbg.set(original_span_debug);
907 pub fn enter<'a, 'gcx: 'tcx, 'tcx, F, R>(gcx: &'a GlobalCtxt<'gcx>,
908 interners: &'a CtxtInterners<'tcx>,
910 where F: FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R
912 let gcx_ptr = gcx as *const _ as *const ThreadLocalGlobalCtxt;
913 let interners_ptr = interners as *const _ as *const ThreadLocalInterners;
915 let prev = tls.get();
916 tls.set(Some((gcx_ptr, interners_ptr)));
926 pub fn with<F, R>(f: F) -> R
927 where F: for<'a, 'gcx, 'tcx> FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R
930 let (gcx, interners) = tcx.get().unwrap();
931 let gcx = unsafe { &*(gcx as *const GlobalCtxt) };
932 let interners = unsafe { &*(interners as *const CtxtInterners) };
940 pub fn with_opt<F, R>(f: F) -> R
941 where F: for<'a, 'gcx, 'tcx> FnOnce(Option<TyCtxt<'a, 'gcx, 'tcx>>) -> R
943 if TLS_TCX.with(|tcx| tcx.get().is_some()) {
951 macro_rules! sty_debug_print {
952 ($ctxt: expr, $($variant: ident),*) => {{
953 // curious inner module to allow variant names to be used as
955 #[allow(non_snake_case)]
957 use ty::{self, TyCtxt};
958 use ty::context::Interned;
960 #[derive(Copy, Clone)]
968 pub fn go(tcx: TyCtxt) {
969 let mut total = DebugStat {
971 region_infer: 0, ty_infer: 0, both_infer: 0,
973 $(let mut $variant = total;)*
976 for &Interned(t) in tcx.interners.type_.borrow().iter() {
977 let variant = match t.sty {
978 ty::TyBool | ty::TyChar | ty::TyInt(..) | ty::TyUint(..) |
979 ty::TyFloat(..) | ty::TyStr | ty::TyNever => continue,
980 ty::TyError => /* unimportant */ continue,
981 $(ty::$variant(..) => &mut $variant,)*
983 let region = t.flags.intersects(ty::TypeFlags::HAS_RE_INFER);
984 let ty = t.flags.intersects(ty::TypeFlags::HAS_TY_INFER);
988 if region { total.region_infer += 1; variant.region_infer += 1 }
989 if ty { total.ty_infer += 1; variant.ty_infer += 1 }
990 if region && ty { total.both_infer += 1; variant.both_infer += 1 }
992 println!("Ty interner total ty region both");
993 $(println!(" {:18}: {uses:6} {usespc:4.1}%, \
994 {ty:4.1}% {region:5.1}% {both:4.1}%",
995 stringify!($variant),
996 uses = $variant.total,
997 usespc = $variant.total as f64 * 100.0 / total.total as f64,
998 ty = $variant.ty_infer as f64 * 100.0 / total.total as f64,
999 region = $variant.region_infer as f64 * 100.0 / total.total as f64,
1000 both = $variant.both_infer as f64 * 100.0 / total.total as f64);
1002 println!(" total {uses:6} \
1003 {ty:4.1}% {region:5.1}% {both:4.1}%",
1005 ty = total.ty_infer as f64 * 100.0 / total.total as f64,
1006 region = total.region_infer as f64 * 100.0 / total.total as f64,
1007 both = total.both_infer as f64 * 100.0 / total.total as f64)
1015 impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
1016 pub fn print_debug_stats(self) {
1019 TyAdt, TyArray, TySlice, TyRawPtr, TyRef, TyFnDef, TyFnPtr,
1020 TyDynamic, TyClosure, TyTuple, TyParam, TyInfer, TyProjection, TyAnon);
1022 println!("Substs interner: #{}", self.interners.substs.borrow().len());
1023 println!("Region interner: #{}", self.interners.region.borrow().len());
1024 println!("Stability interner: #{}", self.stability_interner.borrow().len());
1025 println!("Layout interner: #{}", self.layout_interner.borrow().len());
1030 /// An entry in an interner.
1031 struct Interned<'tcx, T: 'tcx+?Sized>(&'tcx T);
1033 // NB: An Interned<Ty> compares and hashes as a sty.
1034 impl<'tcx> PartialEq for Interned<'tcx, TyS<'tcx>> {
1035 fn eq(&self, other: &Interned<'tcx, TyS<'tcx>>) -> bool {
1036 self.0.sty == other.0.sty
1040 impl<'tcx> Eq for Interned<'tcx, TyS<'tcx>> {}
1042 impl<'tcx> Hash for Interned<'tcx, TyS<'tcx>> {
1043 fn hash<H: Hasher>(&self, s: &mut H) {
1048 impl<'tcx: 'lcx, 'lcx> Borrow<TypeVariants<'lcx>> for Interned<'tcx, TyS<'tcx>> {
1049 fn borrow<'a>(&'a self) -> &'a TypeVariants<'lcx> {
1054 // NB: An Interned<Slice<T>> compares and hashes as its elements.
1055 impl<'tcx, T: PartialEq> PartialEq for Interned<'tcx, Slice<T>> {
1056 fn eq(&self, other: &Interned<'tcx, Slice<T>>) -> bool {
1057 self.0[..] == other.0[..]
1061 impl<'tcx, T: Eq> Eq for Interned<'tcx, Slice<T>> {}
1063 impl<'tcx, T: Hash> Hash for Interned<'tcx, Slice<T>> {
1064 fn hash<H: Hasher>(&self, s: &mut H) {
1069 impl<'tcx: 'lcx, 'lcx> Borrow<[Ty<'lcx>]> for Interned<'tcx, Slice<Ty<'tcx>>> {
1070 fn borrow<'a>(&'a self) -> &'a [Ty<'lcx>] {
1075 impl<'tcx: 'lcx, 'lcx> Borrow<[Kind<'lcx>]> for Interned<'tcx, Substs<'tcx>> {
1076 fn borrow<'a>(&'a self) -> &'a [Kind<'lcx>] {
1081 impl<'tcx> Borrow<RegionKind> for Interned<'tcx, RegionKind> {
1082 fn borrow<'a>(&'a self) -> &'a RegionKind {
1087 impl<'tcx: 'lcx, 'lcx> Borrow<[ExistentialPredicate<'lcx>]>
1088 for Interned<'tcx, Slice<ExistentialPredicate<'tcx>>> {
1089 fn borrow<'a>(&'a self) -> &'a [ExistentialPredicate<'lcx>] {
1094 impl<'tcx: 'lcx, 'lcx> Borrow<[Predicate<'lcx>]>
1095 for Interned<'tcx, Slice<Predicate<'tcx>>> {
1096 fn borrow<'a>(&'a self) -> &'a [Predicate<'lcx>] {
1101 macro_rules! intern_method {
1102 ($lt_tcx:tt, $name:ident: $method:ident($alloc:ty,
1103 $alloc_method:ident,
1106 $needs_infer:expr) -> $ty:ty) => {
1107 impl<'a, 'gcx, $lt_tcx> TyCtxt<'a, 'gcx, $lt_tcx> {
1108 pub fn $method(self, v: $alloc) -> &$lt_tcx $ty {
1110 let key = ($alloc_to_key)(&v);
1111 if let Some(i) = self.interners.$name.borrow().get(key) {
1114 if !self.is_global() {
1115 if let Some(i) = self.global_interners.$name.borrow().get(key) {
1121 // HACK(eddyb) Depend on flags being accurate to
1122 // determine that all contents are in the global tcx.
1123 // See comments on Lift for why we can't use that.
1124 if !($needs_infer)(&v) {
1125 if !self.is_global() {
1129 let i = ($alloc_to_ret)(self.global_interners.arena.$alloc_method(v));
1130 self.global_interners.$name.borrow_mut().insert(Interned(i));
1134 // Make sure we don't end up with inference
1135 // types/regions in the global tcx.
1136 if self.is_global() {
1137 bug!("Attempted to intern `{:?}` which contains \
1138 inference types/regions in the global type context",
1143 let i = ($alloc_to_ret)(self.interners.arena.$alloc_method(v));
1144 self.interners.$name.borrow_mut().insert(Interned(i));
1151 macro_rules! direct_interners {
1152 ($lt_tcx:tt, $($name:ident: $method:ident($needs_infer:expr) -> $ty:ty),+) => {
1153 $(impl<$lt_tcx> PartialEq for Interned<$lt_tcx, $ty> {
1154 fn eq(&self, other: &Self) -> bool {
1159 impl<$lt_tcx> Eq for Interned<$lt_tcx, $ty> {}
1161 impl<$lt_tcx> Hash for Interned<$lt_tcx, $ty> {
1162 fn hash<H: Hasher>(&self, s: &mut H) {
1167 intern_method!($lt_tcx, $name: $method($ty, alloc, |x| x, |x| x, $needs_infer) -> $ty);)+
1171 fn keep_local<'tcx, T: ty::TypeFoldable<'tcx>>(x: &T) -> bool {
1172 x.has_type_flags(ty::TypeFlags::KEEP_IN_LOCAL_TCX)
1175 direct_interners!('tcx,
1176 region: mk_region(|r| {
1178 &ty::ReVar(_) | &ty::ReSkolemized(..) => true,
1184 macro_rules! slice_interners {
1185 ($($field:ident: $method:ident($ty:ident)),+) => (
1186 $(intern_method!('tcx, $field: $method(&[$ty<'tcx>], alloc_slice, Deref::deref,
1187 |xs: &[$ty]| -> &Slice<$ty> {
1188 unsafe { mem::transmute(xs) }
1189 }, |xs: &[$ty]| xs.iter().any(keep_local)) -> Slice<$ty<'tcx>>);)+
1194 existential_predicates: _intern_existential_predicates(ExistentialPredicate),
1195 predicates: _intern_predicates(Predicate),
1196 type_list: _intern_type_list(Ty),
1197 substs: _intern_substs(Kind)
1200 impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
1201 /// Create an unsafe fn ty based on a safe fn ty.
1202 pub fn safe_to_unsafe_fn_ty(self, sig: PolyFnSig<'tcx>) -> Ty<'tcx> {
1203 assert_eq!(sig.unsafety(), hir::Unsafety::Normal);
1204 self.mk_fn_ptr(sig.map_bound(|sig| ty::FnSig {
1205 unsafety: hir::Unsafety::Unsafe,
1210 // Interns a type/name combination, stores the resulting box in cx.interners,
1211 // and returns the box as cast to an unsafe ptr (see comments for Ty above).
1212 pub fn mk_ty(self, st: TypeVariants<'tcx>) -> Ty<'tcx> {
1213 let global_interners = if !self.is_global() {
1214 Some(&self.global_interners)
1218 self.interners.intern_ty(st, global_interners)
1221 pub fn mk_mach_int(self, tm: ast::IntTy) -> Ty<'tcx> {
1223 ast::IntTy::Is => self.types.isize,
1224 ast::IntTy::I8 => self.types.i8,
1225 ast::IntTy::I16 => self.types.i16,
1226 ast::IntTy::I32 => self.types.i32,
1227 ast::IntTy::I64 => self.types.i64,
1228 ast::IntTy::I128 => self.types.i128,
1232 pub fn mk_mach_uint(self, tm: ast::UintTy) -> Ty<'tcx> {
1234 ast::UintTy::Us => self.types.usize,
1235 ast::UintTy::U8 => self.types.u8,
1236 ast::UintTy::U16 => self.types.u16,
1237 ast::UintTy::U32 => self.types.u32,
1238 ast::UintTy::U64 => self.types.u64,
1239 ast::UintTy::U128 => self.types.u128,
1243 pub fn mk_mach_float(self, tm: ast::FloatTy) -> Ty<'tcx> {
1245 ast::FloatTy::F32 => self.types.f32,
1246 ast::FloatTy::F64 => self.types.f64,
1250 pub fn mk_str(self) -> Ty<'tcx> {
1254 pub fn mk_static_str(self) -> Ty<'tcx> {
1255 self.mk_imm_ref(self.types.re_static, self.mk_str())
1258 pub fn mk_adt(self, def: &'tcx AdtDef, substs: &'tcx Substs<'tcx>) -> Ty<'tcx> {
1259 // take a copy of substs so that we own the vectors inside
1260 self.mk_ty(TyAdt(def, substs))
1263 pub fn mk_box(self, ty: Ty<'tcx>) -> Ty<'tcx> {
1264 let def_id = self.require_lang_item(lang_items::OwnedBoxLangItem);
1265 let adt_def = self.adt_def(def_id);
1266 let substs = self.mk_substs(iter::once(Kind::from(ty)));
1267 self.mk_ty(TyAdt(adt_def, substs))
1270 pub fn mk_ptr(self, tm: TypeAndMut<'tcx>) -> Ty<'tcx> {
1271 self.mk_ty(TyRawPtr(tm))
1274 pub fn mk_ref(self, r: Region<'tcx>, tm: TypeAndMut<'tcx>) -> Ty<'tcx> {
1275 self.mk_ty(TyRef(r, tm))
1278 pub fn mk_mut_ref(self, r: Region<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> {
1279 self.mk_ref(r, TypeAndMut {ty: ty, mutbl: hir::MutMutable})
1282 pub fn mk_imm_ref(self, r: Region<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> {
1283 self.mk_ref(r, TypeAndMut {ty: ty, mutbl: hir::MutImmutable})
1286 pub fn mk_mut_ptr(self, ty: Ty<'tcx>) -> Ty<'tcx> {
1287 self.mk_ptr(TypeAndMut {ty: ty, mutbl: hir::MutMutable})
1290 pub fn mk_imm_ptr(self, ty: Ty<'tcx>) -> Ty<'tcx> {
1291 self.mk_ptr(TypeAndMut {ty: ty, mutbl: hir::MutImmutable})
1294 pub fn mk_nil_ptr(self) -> Ty<'tcx> {
1295 self.mk_imm_ptr(self.mk_nil())
1298 pub fn mk_array(self, ty: Ty<'tcx>, n: usize) -> Ty<'tcx> {
1299 self.mk_ty(TyArray(ty, n))
1302 pub fn mk_slice(self, ty: Ty<'tcx>) -> Ty<'tcx> {
1303 self.mk_ty(TySlice(ty))
1306 pub fn intern_tup(self, ts: &[Ty<'tcx>], defaulted: bool) -> Ty<'tcx> {
1307 self.mk_ty(TyTuple(self.intern_type_list(ts), defaulted))
1310 pub fn mk_tup<I: InternAs<[Ty<'tcx>], Ty<'tcx>>>(self, iter: I,
1311 defaulted: bool) -> I::Output {
1312 iter.intern_with(|ts| self.mk_ty(TyTuple(self.intern_type_list(ts), defaulted)))
1315 pub fn mk_nil(self) -> Ty<'tcx> {
1316 self.intern_tup(&[], false)
1319 pub fn mk_diverging_default(self) -> Ty<'tcx> {
1320 if self.sess.features.borrow().never_type {
1323 self.intern_tup(&[], true)
1327 pub fn mk_bool(self) -> Ty<'tcx> {
1331 pub fn mk_fn_def(self, def_id: DefId,
1332 substs: &'tcx Substs<'tcx>,
1333 fty: PolyFnSig<'tcx>) -> Ty<'tcx> {
1334 self.mk_ty(TyFnDef(def_id, substs, fty))
1337 pub fn mk_fn_ptr(self, fty: PolyFnSig<'tcx>) -> Ty<'tcx> {
1338 self.mk_ty(TyFnPtr(fty))
1343 obj: ty::Binder<&'tcx Slice<ExistentialPredicate<'tcx>>>,
1344 reg: ty::Region<'tcx>
1346 self.mk_ty(TyDynamic(obj, reg))
1349 pub fn mk_projection(self,
1350 trait_ref: TraitRef<'tcx>,
1353 // take a copy of substs so that we own the vectors inside
1354 let inner = ProjectionTy { trait_ref: trait_ref, item_name: item_name };
1355 self.mk_ty(TyProjection(inner))
1358 pub fn mk_closure(self,
1360 substs: &'tcx Substs<'tcx>)
1362 self.mk_closure_from_closure_substs(closure_id, ClosureSubsts {
1367 pub fn mk_closure_from_closure_substs(self,
1369 closure_substs: ClosureSubsts<'tcx>)
1371 self.mk_ty(TyClosure(closure_id, closure_substs))
1374 pub fn mk_var(self, v: TyVid) -> Ty<'tcx> {
1375 self.mk_infer(TyVar(v))
1378 pub fn mk_int_var(self, v: IntVid) -> Ty<'tcx> {
1379 self.mk_infer(IntVar(v))
1382 pub fn mk_float_var(self, v: FloatVid) -> Ty<'tcx> {
1383 self.mk_infer(FloatVar(v))
1386 pub fn mk_infer(self, it: InferTy) -> Ty<'tcx> {
1387 self.mk_ty(TyInfer(it))
1390 pub fn mk_param(self,
1392 name: Name) -> Ty<'tcx> {
1393 self.mk_ty(TyParam(ParamTy { idx: index, name: name }))
1396 pub fn mk_self_type(self) -> Ty<'tcx> {
1397 self.mk_param(0, keywords::SelfType.name())
1400 pub fn mk_param_from_def(self, def: &ty::TypeParameterDef) -> Ty<'tcx> {
1401 self.mk_param(def.index, def.name)
1404 pub fn mk_anon(self, def_id: DefId, substs: &'tcx Substs<'tcx>) -> Ty<'tcx> {
1405 self.mk_ty(TyAnon(def_id, substs))
1408 pub fn intern_existential_predicates(self, eps: &[ExistentialPredicate<'tcx>])
1409 -> &'tcx Slice<ExistentialPredicate<'tcx>> {
1410 assert!(!eps.is_empty());
1411 assert!(eps.windows(2).all(|w| w[0].cmp(self, &w[1]) != Ordering::Greater));
1412 self._intern_existential_predicates(eps)
1415 pub fn intern_predicates(self, preds: &[Predicate<'tcx>])
1416 -> &'tcx Slice<Predicate<'tcx>> {
1417 // FIXME consider asking the input slice to be sorted to avoid
1418 // re-interning permutations, in which case that would be asserted
1420 if preds.len() == 0 {
1421 // The macro-generated method below asserts we don't intern an empty slice.
1424 self._intern_predicates(preds)
1428 pub fn intern_type_list(self, ts: &[Ty<'tcx>]) -> &'tcx Slice<Ty<'tcx>> {
1432 self._intern_type_list(ts)
1436 pub fn intern_substs(self, ts: &[Kind<'tcx>]) -> &'tcx Slice<Kind<'tcx>> {
1440 self._intern_substs(ts)
1444 pub fn mk_fn_sig<I>(self,
1448 unsafety: hir::Unsafety,
1450 -> <I::Item as InternIteratorElement<Ty<'tcx>, ty::FnSig<'tcx>>>::Output
1452 I::Item: InternIteratorElement<Ty<'tcx>, ty::FnSig<'tcx>>
1454 inputs.chain(iter::once(output)).intern_with(|xs| ty::FnSig {
1455 inputs_and_output: self.intern_type_list(xs),
1456 variadic, unsafety, abi
1460 pub fn mk_existential_predicates<I: InternAs<[ExistentialPredicate<'tcx>],
1461 &'tcx Slice<ExistentialPredicate<'tcx>>>>(self, iter: I)
1463 iter.intern_with(|xs| self.intern_existential_predicates(xs))
1466 pub fn mk_predicates<I: InternAs<[Predicate<'tcx>],
1467 &'tcx Slice<Predicate<'tcx>>>>(self, iter: I)
1469 iter.intern_with(|xs| self.intern_predicates(xs))
1472 pub fn mk_type_list<I: InternAs<[Ty<'tcx>],
1473 &'tcx Slice<Ty<'tcx>>>>(self, iter: I) -> I::Output {
1474 iter.intern_with(|xs| self.intern_type_list(xs))
1477 pub fn mk_substs<I: InternAs<[Kind<'tcx>],
1478 &'tcx Slice<Kind<'tcx>>>>(self, iter: I) -> I::Output {
1479 iter.intern_with(|xs| self.intern_substs(xs))
1482 pub fn mk_substs_trait(self,
1485 -> &'tcx Substs<'tcx>
1487 self.mk_substs(iter::once(s).chain(t.into_iter().cloned()).map(Kind::from))
1491 pub trait InternAs<T: ?Sized, R> {
1493 fn intern_with<F>(self, f: F) -> Self::Output
1494 where F: FnOnce(&T) -> R;
1497 impl<I, T, R, E> InternAs<[T], R> for I
1498 where E: InternIteratorElement<T, R>,
1499 I: Iterator<Item=E> {
1500 type Output = E::Output;
1501 fn intern_with<F>(self, f: F) -> Self::Output
1502 where F: FnOnce(&[T]) -> R {
1503 E::intern_with(self, f)
1507 pub trait InternIteratorElement<T, R>: Sized {
1509 fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output;
1512 impl<T, R> InternIteratorElement<T, R> for T {
1514 fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output {
1515 f(&iter.collect::<AccumulateVec<[_; 8]>>())
1519 impl<'a, T, R> InternIteratorElement<T, R> for &'a T
1523 fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output {
1524 f(&iter.cloned().collect::<AccumulateVec<[_; 8]>>())
1528 impl<T, R, E> InternIteratorElement<T, R> for Result<T, E> {
1529 type Output = Result<R, E>;
1530 fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output {
1531 Ok(f(&iter.collect::<Result<AccumulateVec<[_; 8]>, _>>()?))