1 // Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! type context book-keeping
13 use dep_graph::DepGraph;
14 use errors::DiagnosticBuilder;
17 use hir::{TraitCandidate, HirId, ItemLocalId};
18 use hir::def::{Def, Export};
19 use hir::def_id::{CrateNum, DefId, DefIndex, LOCAL_CRATE};
20 use hir::map as hir_map;
21 use hir::map::DefPathHash;
22 use lint::{self, Lint};
23 use ich::{self, StableHashingContext, NodeIdHashingMode};
24 <<<<<<< 817e1b81e230d599585f860cdcad96c5ed83b93e
25 use middle::const_val::ConstVal;
27 use middle::cstore::CrateStore;
28 >>>>>>> Remove the `cstore` reference from Session in order to prepare encapsulating CrateStore access in tcx.
29 use middle::free_region::FreeRegionMap;
30 use middle::lang_items;
31 use middle::resolve_lifetime::{self, ObjectLifetimeDefault};
32 use middle::stability;
34 use mir::transform::Passes;
35 use ty::subst::{Kind, Substs};
38 use ty::{self, Ty, TypeAndMut};
39 use ty::{TyS, TypeVariants, Slice};
40 use ty::{AdtKind, AdtDef, ClosureSubsts, GeneratorInterior, Region, Const};
41 use ty::{PolyFnSig, InferTy, ParamTy, ProjectionTy, ExistentialPredicate, Predicate};
43 use ty::{TyVar, TyVid, IntVar, IntVid, FloatVar, FloatVid};
44 use ty::TypeVariants::*;
45 use ty::layout::{Layout, TargetDataLayout};
46 use ty::inhabitedness::DefIdForest;
50 use util::nodemap::{NodeMap, NodeSet, DefIdSet, ItemLocalMap};
51 use util::nodemap::{FxHashMap, FxHashSet};
52 use rustc_data_structures::accumulate_vec::AccumulateVec;
53 use rustc_data_structures::stable_hasher::{HashStable, StableHasher,
56 use arena::{TypedArena, DroplessArena};
57 use rustc_const_math::{ConstInt, ConstUsize};
58 use rustc_data_structures::indexed_vec::IndexVec;
59 use std::borrow::Borrow;
60 use std::cell::{Cell, RefCell};
61 use std::cmp::Ordering;
62 use std::collections::hash_map::{self, Entry};
63 use std::hash::{Hash, Hasher};
69 use syntax::ast::{self, Name, NodeId};
71 use syntax::codemap::MultiSpan;
72 use syntax::symbol::{Symbol, keywords};
78 pub struct GlobalArenas<'tcx> {
80 layout: TypedArena<Layout>,
83 generics: TypedArena<ty::Generics>,
84 trait_def: TypedArena<ty::TraitDef>,
85 adt_def: TypedArena<ty::AdtDef>,
86 steal_mir: TypedArena<Steal<Mir<'tcx>>>,
87 mir: TypedArena<Mir<'tcx>>,
88 tables: TypedArena<ty::TypeckTables<'tcx>>,
91 impl<'tcx> GlobalArenas<'tcx> {
92 pub fn new() -> GlobalArenas<'tcx> {
94 layout: TypedArena::new(),
95 generics: TypedArena::new(),
96 trait_def: TypedArena::new(),
97 adt_def: TypedArena::new(),
98 steal_mir: TypedArena::new(),
99 mir: TypedArena::new(),
100 tables: TypedArena::new(),
105 pub struct CtxtInterners<'tcx> {
106 /// The arena that types, regions, etc are allocated from
107 arena: &'tcx DroplessArena,
109 /// Specifically use a speedy hash algorithm for these hash sets,
110 /// they're accessed quite often.
111 type_: RefCell<FxHashSet<Interned<'tcx, TyS<'tcx>>>>,
112 type_list: RefCell<FxHashSet<Interned<'tcx, Slice<Ty<'tcx>>>>>,
113 substs: RefCell<FxHashSet<Interned<'tcx, Substs<'tcx>>>>,
114 region: RefCell<FxHashSet<Interned<'tcx, RegionKind>>>,
115 existential_predicates: RefCell<FxHashSet<Interned<'tcx, Slice<ExistentialPredicate<'tcx>>>>>,
116 predicates: RefCell<FxHashSet<Interned<'tcx, Slice<Predicate<'tcx>>>>>,
117 const_: RefCell<FxHashSet<Interned<'tcx, Const<'tcx>>>>,
120 impl<'gcx: 'tcx, 'tcx> CtxtInterners<'tcx> {
121 fn new(arena: &'tcx DroplessArena) -> CtxtInterners<'tcx> {
124 type_: RefCell::new(FxHashSet()),
125 type_list: RefCell::new(FxHashSet()),
126 substs: RefCell::new(FxHashSet()),
127 region: RefCell::new(FxHashSet()),
128 existential_predicates: RefCell::new(FxHashSet()),
129 predicates: RefCell::new(FxHashSet()),
130 const_: RefCell::new(FxHashSet()),
134 /// Intern a type. global_interners is Some only if this is
135 /// a local interner and global_interners is its counterpart.
136 fn intern_ty(&self, st: TypeVariants<'tcx>,
137 global_interners: Option<&CtxtInterners<'gcx>>)
140 let mut interner = self.type_.borrow_mut();
141 let global_interner = global_interners.map(|interners| {
142 interners.type_.borrow_mut()
144 if let Some(&Interned(ty)) = interner.get(&st) {
147 if let Some(ref interner) = global_interner {
148 if let Some(&Interned(ty)) = interner.get(&st) {
153 let flags = super::flags::FlagComputation::for_sty(&st);
154 let ty_struct = TyS {
157 region_depth: flags.depth,
160 // HACK(eddyb) Depend on flags being accurate to
161 // determine that all contents are in the global tcx.
162 // See comments on Lift for why we can't use that.
163 if !flags.flags.intersects(ty::TypeFlags::KEEP_IN_LOCAL_TCX) {
164 if let Some(interner) = global_interners {
165 let ty_struct: TyS<'gcx> = unsafe {
166 mem::transmute(ty_struct)
168 let ty: Ty<'gcx> = interner.arena.alloc(ty_struct);
169 global_interner.unwrap().insert(Interned(ty));
173 // Make sure we don't end up with inference
174 // types/regions in the global tcx.
175 if global_interners.is_none() {
177 bug!("Attempted to intern `{:?}` which contains \
178 inference types/regions in the global type context",
183 // Don't be &mut TyS.
184 let ty: Ty<'tcx> = self.arena.alloc(ty_struct);
185 interner.insert(Interned(ty));
189 debug!("Interned type: {:?} Pointer: {:?}",
190 ty, ty as *const TyS);
196 pub struct CommonTypes<'tcx> {
216 pub re_empty: Region<'tcx>,
217 pub re_static: Region<'tcx>,
218 pub re_erased: Region<'tcx>,
221 pub struct LocalTableInContext<'a, V: 'a> {
222 local_id_root: Option<DefId>,
223 data: &'a ItemLocalMap<V>
226 /// Validate that the given HirId (respectively its `local_id` part) can be
227 /// safely used as a key in the tables of a TypeckTable. For that to be
228 /// the case, the HirId must have the same `owner` as all the other IDs in
229 /// this table (signified by `local_id_root`). Otherwise the HirId
230 /// would be in a different frame of reference and using its `local_id`
231 /// would result in lookup errors, or worse, in silently wrong data being
233 fn validate_hir_id_for_typeck_tables(local_id_root: Option<DefId>,
236 if cfg!(debug_assertions) {
237 if let Some(local_id_root) = local_id_root {
238 if hir_id.owner != local_id_root.index {
239 ty::tls::with(|tcx| {
240 let node_id = tcx.hir
242 .find_node_for_hir_id(hir_id);
244 bug!("node {} with HirId::owner {:?} cannot be placed in \
245 TypeckTables with local_id_root {:?}",
246 tcx.hir.node_to_string(node_id),
247 DefId::local(hir_id.owner),
252 // We use "Null Object" TypeckTables in some of the analysis passes.
253 // These are just expected to be empty and their `local_id_root` is
254 // `None`. Therefore we cannot verify whether a given `HirId` would
255 // be a valid key for the given table. Instead we make sure that
256 // nobody tries to write to such a Null Object table.
258 bug!("access to invalid TypeckTables")
264 impl<'a, V> LocalTableInContext<'a, V> {
265 pub fn contains_key(&self, id: hir::HirId) -> bool {
266 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
267 self.data.contains_key(&id.local_id)
270 pub fn get(&self, id: hir::HirId) -> Option<&V> {
271 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
272 self.data.get(&id.local_id)
275 pub fn iter(&self) -> hash_map::Iter<hir::ItemLocalId, V> {
280 impl<'a, V> ::std::ops::Index<hir::HirId> for LocalTableInContext<'a, V> {
283 fn index(&self, key: hir::HirId) -> &V {
284 self.get(key).expect("LocalTableInContext: key not found")
288 pub struct LocalTableInContextMut<'a, V: 'a> {
289 local_id_root: Option<DefId>,
290 data: &'a mut ItemLocalMap<V>
293 impl<'a, V> LocalTableInContextMut<'a, V> {
294 pub fn get_mut(&mut self, id: hir::HirId) -> Option<&mut V> {
295 validate_hir_id_for_typeck_tables(self.local_id_root, id, true);
296 self.data.get_mut(&id.local_id)
299 pub fn entry(&mut self, id: hir::HirId) -> Entry<hir::ItemLocalId, V> {
300 validate_hir_id_for_typeck_tables(self.local_id_root, id, true);
301 self.data.entry(id.local_id)
304 pub fn insert(&mut self, id: hir::HirId, val: V) -> Option<V> {
305 validate_hir_id_for_typeck_tables(self.local_id_root, id, true);
306 self.data.insert(id.local_id, val)
309 pub fn remove(&mut self, id: hir::HirId) -> Option<V> {
310 validate_hir_id_for_typeck_tables(self.local_id_root, id, true);
311 self.data.remove(&id.local_id)
315 #[derive(RustcEncodable, RustcDecodable)]
316 pub struct TypeckTables<'tcx> {
317 /// The HirId::owner all ItemLocalIds in this table are relative to.
318 pub local_id_root: Option<DefId>,
320 /// Resolved definitions for `<T>::X` associated paths and
321 /// method calls, including those of overloaded operators.
322 type_dependent_defs: ItemLocalMap<Def>,
324 /// Stores the types for various nodes in the AST. Note that this table
325 /// is not guaranteed to be populated until after typeck. See
326 /// typeck::check::fn_ctxt for details.
327 node_types: ItemLocalMap<Ty<'tcx>>,
329 /// Stores the type parameters which were substituted to obtain the type
330 /// of this node. This only applies to nodes that refer to entities
331 /// parameterized by type parameters, such as generic fns, types, or
333 node_substs: ItemLocalMap<&'tcx Substs<'tcx>>,
335 adjustments: ItemLocalMap<Vec<ty::adjustment::Adjustment<'tcx>>>,
337 // Stores the actual binding mode for all instances of hir::BindingAnnotation.
338 pat_binding_modes: ItemLocalMap<BindingMode>,
341 pub upvar_capture_map: ty::UpvarCaptureMap<'tcx>,
343 /// Records the type of each closure.
344 closure_tys: ItemLocalMap<ty::PolyFnSig<'tcx>>,
346 /// Records the kind of each closure and the span and name of the variable
347 /// that caused the closure to be this kind.
348 closure_kinds: ItemLocalMap<(ty::ClosureKind, Option<(Span, ast::Name)>)>,
350 generator_sigs: ItemLocalMap<Option<ty::GenSig<'tcx>>>,
352 generator_interiors: ItemLocalMap<ty::GeneratorInterior<'tcx>>,
354 /// For each fn, records the "liberated" types of its arguments
355 /// and return type. Liberated means that all bound regions
356 /// (including late-bound regions) are replaced with free
357 /// equivalents. This table is not used in trans (since regions
358 /// are erased there) and hence is not serialized to metadata.
359 liberated_fn_sigs: ItemLocalMap<ty::FnSig<'tcx>>,
361 /// For each FRU expression, record the normalized types of the fields
362 /// of the struct - this is needed because it is non-trivial to
363 /// normalize while preserving regions. This table is used only in
364 /// MIR construction and hence is not serialized to metadata.
365 fru_field_types: ItemLocalMap<Vec<Ty<'tcx>>>,
367 /// Maps a cast expression to its kind. This is keyed on the
368 /// *from* expression of the cast, not the cast itself.
369 cast_kinds: ItemLocalMap<ty::cast::CastKind>,
371 /// Set of trait imports actually used in the method resolution.
372 /// This is used for warning unused imports.
373 pub used_trait_imports: DefIdSet,
375 /// If any errors occurred while type-checking this body,
376 /// this field will be set to `true`.
377 pub tainted_by_errors: bool,
379 /// Stores the free-region relationships that were deduced from
380 /// its where clauses and parameter types. These are then
381 /// read-again by borrowck.
382 pub free_region_map: FreeRegionMap<'tcx>,
385 impl<'tcx> TypeckTables<'tcx> {
386 pub fn empty(local_id_root: Option<DefId>) -> TypeckTables<'tcx> {
389 type_dependent_defs: ItemLocalMap(),
390 node_types: ItemLocalMap(),
391 node_substs: ItemLocalMap(),
392 adjustments: ItemLocalMap(),
393 pat_binding_modes: ItemLocalMap(),
394 upvar_capture_map: FxHashMap(),
395 generator_sigs: ItemLocalMap(),
396 generator_interiors: ItemLocalMap(),
397 closure_tys: ItemLocalMap(),
398 closure_kinds: ItemLocalMap(),
399 liberated_fn_sigs: ItemLocalMap(),
400 fru_field_types: ItemLocalMap(),
401 cast_kinds: ItemLocalMap(),
402 used_trait_imports: DefIdSet(),
403 tainted_by_errors: false,
404 free_region_map: FreeRegionMap::new(),
408 /// Returns the final resolution of a `QPath` in an `Expr` or `Pat` node.
409 pub fn qpath_def(&self, qpath: &hir::QPath, id: hir::HirId) -> Def {
411 hir::QPath::Resolved(_, ref path) => path.def,
412 hir::QPath::TypeRelative(..) => {
413 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
414 self.type_dependent_defs.get(&id.local_id).cloned().unwrap_or(Def::Err)
419 pub fn type_dependent_defs(&self) -> LocalTableInContext<Def> {
420 LocalTableInContext {
421 local_id_root: self.local_id_root,
422 data: &self.type_dependent_defs
426 pub fn type_dependent_defs_mut(&mut self) -> LocalTableInContextMut<Def> {
427 LocalTableInContextMut {
428 local_id_root: self.local_id_root,
429 data: &mut self.type_dependent_defs
433 pub fn node_types(&self) -> LocalTableInContext<Ty<'tcx>> {
434 LocalTableInContext {
435 local_id_root: self.local_id_root,
436 data: &self.node_types
440 pub fn node_types_mut(&mut self) -> LocalTableInContextMut<Ty<'tcx>> {
441 LocalTableInContextMut {
442 local_id_root: self.local_id_root,
443 data: &mut self.node_types
447 pub fn node_id_to_type(&self, id: hir::HirId) -> Ty<'tcx> {
448 match self.node_id_to_type_opt(id) {
451 bug!("node_id_to_type: no type for node `{}`",
453 let id = tcx.hir.definitions().find_node_for_hir_id(id);
454 tcx.hir.node_to_string(id)
460 pub fn node_id_to_type_opt(&self, id: hir::HirId) -> Option<Ty<'tcx>> {
461 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
462 self.node_types.get(&id.local_id).cloned()
465 pub fn node_substs_mut(&mut self) -> LocalTableInContextMut<&'tcx Substs<'tcx>> {
466 LocalTableInContextMut {
467 local_id_root: self.local_id_root,
468 data: &mut self.node_substs
472 pub fn node_substs(&self, id: hir::HirId) -> &'tcx Substs<'tcx> {
473 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
474 self.node_substs.get(&id.local_id).cloned().unwrap_or(Substs::empty())
477 pub fn node_substs_opt(&self, id: hir::HirId) -> Option<&'tcx Substs<'tcx>> {
478 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
479 self.node_substs.get(&id.local_id).cloned()
482 // Returns the type of a pattern as a monotype. Like @expr_ty, this function
483 // doesn't provide type parameter substitutions.
484 pub fn pat_ty(&self, pat: &hir::Pat) -> Ty<'tcx> {
485 self.node_id_to_type(pat.hir_id)
488 pub fn pat_ty_opt(&self, pat: &hir::Pat) -> Option<Ty<'tcx>> {
489 self.node_id_to_type_opt(pat.hir_id)
492 // Returns the type of an expression as a monotype.
494 // NB (1): This is the PRE-ADJUSTMENT TYPE for the expression. That is, in
495 // some cases, we insert `Adjustment` annotations such as auto-deref or
496 // auto-ref. The type returned by this function does not consider such
497 // adjustments. See `expr_ty_adjusted()` instead.
499 // NB (2): This type doesn't provide type parameter substitutions; e.g. if you
500 // ask for the type of "id" in "id(3)", it will return "fn(&isize) -> isize"
501 // instead of "fn(ty) -> T with T = isize".
502 pub fn expr_ty(&self, expr: &hir::Expr) -> Ty<'tcx> {
503 self.node_id_to_type(expr.hir_id)
506 pub fn expr_ty_opt(&self, expr: &hir::Expr) -> Option<Ty<'tcx>> {
507 self.node_id_to_type_opt(expr.hir_id)
510 pub fn adjustments(&self) -> LocalTableInContext<Vec<ty::adjustment::Adjustment<'tcx>>> {
511 LocalTableInContext {
512 local_id_root: self.local_id_root,
513 data: &self.adjustments
517 pub fn adjustments_mut(&mut self)
518 -> LocalTableInContextMut<Vec<ty::adjustment::Adjustment<'tcx>>> {
519 LocalTableInContextMut {
520 local_id_root: self.local_id_root,
521 data: &mut self.adjustments
525 pub fn expr_adjustments(&self, expr: &hir::Expr)
526 -> &[ty::adjustment::Adjustment<'tcx>] {
527 validate_hir_id_for_typeck_tables(self.local_id_root, expr.hir_id, false);
528 self.adjustments.get(&expr.hir_id.local_id).map_or(&[], |a| &a[..])
531 /// Returns the type of `expr`, considering any `Adjustment`
532 /// entry recorded for that expression.
533 pub fn expr_ty_adjusted(&self, expr: &hir::Expr) -> Ty<'tcx> {
534 self.expr_adjustments(expr)
536 .map_or_else(|| self.expr_ty(expr), |adj| adj.target)
539 pub fn expr_ty_adjusted_opt(&self, expr: &hir::Expr) -> Option<Ty<'tcx>> {
540 self.expr_adjustments(expr)
542 .map(|adj| adj.target)
543 .or_else(|| self.expr_ty_opt(expr))
546 pub fn is_method_call(&self, expr: &hir::Expr) -> bool {
547 // Only paths and method calls/overloaded operators have
548 // entries in type_dependent_defs, ignore the former here.
549 if let hir::ExprPath(_) = expr.node {
553 match self.type_dependent_defs().get(expr.hir_id) {
554 Some(&Def::Method(_)) => true,
559 pub fn pat_binding_modes(&self) -> LocalTableInContext<BindingMode> {
560 LocalTableInContext {
561 local_id_root: self.local_id_root,
562 data: &self.pat_binding_modes
566 pub fn pat_binding_modes_mut(&mut self)
567 -> LocalTableInContextMut<BindingMode> {
568 LocalTableInContextMut {
569 local_id_root: self.local_id_root,
570 data: &mut self.pat_binding_modes
574 pub fn upvar_capture(&self, upvar_id: ty::UpvarId) -> ty::UpvarCapture<'tcx> {
575 self.upvar_capture_map[&upvar_id]
578 pub fn closure_tys(&self) -> LocalTableInContext<ty::PolyFnSig<'tcx>> {
579 LocalTableInContext {
580 local_id_root: self.local_id_root,
581 data: &self.closure_tys
585 pub fn closure_tys_mut(&mut self)
586 -> LocalTableInContextMut<ty::PolyFnSig<'tcx>> {
587 LocalTableInContextMut {
588 local_id_root: self.local_id_root,
589 data: &mut self.closure_tys
593 pub fn closure_kinds(&self) -> LocalTableInContext<(ty::ClosureKind,
594 Option<(Span, ast::Name)>)> {
595 LocalTableInContext {
596 local_id_root: self.local_id_root,
597 data: &self.closure_kinds
601 pub fn closure_kinds_mut(&mut self)
602 -> LocalTableInContextMut<(ty::ClosureKind, Option<(Span, ast::Name)>)> {
603 LocalTableInContextMut {
604 local_id_root: self.local_id_root,
605 data: &mut self.closure_kinds
609 pub fn liberated_fn_sigs(&self) -> LocalTableInContext<ty::FnSig<'tcx>> {
610 LocalTableInContext {
611 local_id_root: self.local_id_root,
612 data: &self.liberated_fn_sigs
616 pub fn liberated_fn_sigs_mut(&mut self) -> LocalTableInContextMut<ty::FnSig<'tcx>> {
617 LocalTableInContextMut {
618 local_id_root: self.local_id_root,
619 data: &mut self.liberated_fn_sigs
623 pub fn fru_field_types(&self) -> LocalTableInContext<Vec<Ty<'tcx>>> {
624 LocalTableInContext {
625 local_id_root: self.local_id_root,
626 data: &self.fru_field_types
630 pub fn fru_field_types_mut(&mut self) -> LocalTableInContextMut<Vec<Ty<'tcx>>> {
631 LocalTableInContextMut {
632 local_id_root: self.local_id_root,
633 data: &mut self.fru_field_types
637 pub fn cast_kinds(&self) -> LocalTableInContext<ty::cast::CastKind> {
638 LocalTableInContext {
639 local_id_root: self.local_id_root,
640 data: &self.cast_kinds
644 pub fn cast_kinds_mut(&mut self) -> LocalTableInContextMut<ty::cast::CastKind> {
645 LocalTableInContextMut {
646 local_id_root: self.local_id_root,
647 data: &mut self.cast_kinds
651 pub fn generator_sigs(&self)
652 -> LocalTableInContext<Option<ty::GenSig<'tcx>>>
654 LocalTableInContext {
655 local_id_root: self.local_id_root,
656 data: &self.generator_sigs,
660 pub fn generator_sigs_mut(&mut self)
661 -> LocalTableInContextMut<Option<ty::GenSig<'tcx>>>
663 LocalTableInContextMut {
664 local_id_root: self.local_id_root,
665 data: &mut self.generator_sigs,
669 pub fn generator_interiors(&self)
670 -> LocalTableInContext<ty::GeneratorInterior<'tcx>>
672 LocalTableInContext {
673 local_id_root: self.local_id_root,
674 data: &self.generator_interiors,
678 pub fn generator_interiors_mut(&mut self)
679 -> LocalTableInContextMut<ty::GeneratorInterior<'tcx>>
681 LocalTableInContextMut {
682 local_id_root: self.local_id_root,
683 data: &mut self.generator_interiors,
688 impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for TypeckTables<'gcx> {
689 fn hash_stable<W: StableHasherResult>(&self,
690 hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
691 hasher: &mut StableHasher<W>) {
692 let ty::TypeckTables {
694 ref type_dependent_defs,
698 ref pat_binding_modes,
699 ref upvar_capture_map,
702 ref liberated_fn_sigs,
707 ref used_trait_imports,
711 ref generator_interiors,
714 hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| {
715 ich::hash_stable_itemlocalmap(hcx, hasher, type_dependent_defs);
716 ich::hash_stable_itemlocalmap(hcx, hasher, node_types);
717 ich::hash_stable_itemlocalmap(hcx, hasher, node_substs);
718 ich::hash_stable_itemlocalmap(hcx, hasher, adjustments);
719 ich::hash_stable_itemlocalmap(hcx, hasher, pat_binding_modes);
720 ich::hash_stable_hashmap(hcx, hasher, upvar_capture_map, |hcx, up_var_id| {
727 local_id_root.expect("trying to hash invalid TypeckTables");
729 let var_owner_def_id = DefId {
730 krate: local_id_root.krate,
733 let closure_def_id = DefId {
734 krate: local_id_root.krate,
735 index: closure_expr_id,
737 ((hcx.def_path_hash(var_owner_def_id), var_id.local_id),
738 hcx.def_path_hash(closure_def_id))
741 ich::hash_stable_itemlocalmap(hcx, hasher, closure_tys);
742 ich::hash_stable_itemlocalmap(hcx, hasher, closure_kinds);
743 ich::hash_stable_itemlocalmap(hcx, hasher, liberated_fn_sigs);
744 ich::hash_stable_itemlocalmap(hcx, hasher, fru_field_types);
745 ich::hash_stable_itemlocalmap(hcx, hasher, cast_kinds);
746 ich::hash_stable_itemlocalmap(hcx, hasher, generator_sigs);
747 ich::hash_stable_itemlocalmap(hcx, hasher, generator_interiors);
749 ich::hash_stable_hashset(hcx, hasher, used_trait_imports, |hcx, def_id| {
750 hcx.def_path_hash(*def_id)
753 tainted_by_errors.hash_stable(hcx, hasher);
754 free_region_map.hash_stable(hcx, hasher);
759 impl<'tcx> CommonTypes<'tcx> {
760 fn new(interners: &CtxtInterners<'tcx>) -> CommonTypes<'tcx> {
761 let mk = |sty| interners.intern_ty(sty, None);
762 let mk_region = |r| {
763 if let Some(r) = interners.region.borrow().get(&r) {
766 let r = interners.arena.alloc(r);
767 interners.region.borrow_mut().insert(Interned(r));
775 isize: mk(TyInt(ast::IntTy::Is)),
776 i8: mk(TyInt(ast::IntTy::I8)),
777 i16: mk(TyInt(ast::IntTy::I16)),
778 i32: mk(TyInt(ast::IntTy::I32)),
779 i64: mk(TyInt(ast::IntTy::I64)),
780 i128: mk(TyInt(ast::IntTy::I128)),
781 usize: mk(TyUint(ast::UintTy::Us)),
782 u8: mk(TyUint(ast::UintTy::U8)),
783 u16: mk(TyUint(ast::UintTy::U16)),
784 u32: mk(TyUint(ast::UintTy::U32)),
785 u64: mk(TyUint(ast::UintTy::U64)),
786 u128: mk(TyUint(ast::UintTy::U128)),
787 f32: mk(TyFloat(ast::FloatTy::F32)),
788 f64: mk(TyFloat(ast::FloatTy::F64)),
790 re_empty: mk_region(RegionKind::ReEmpty),
791 re_static: mk_region(RegionKind::ReStatic),
792 re_erased: mk_region(RegionKind::ReErased),
797 /// The data structure to keep track of all the information that typechecker
798 /// generates so that so that it can be reused and doesn't have to be redone
800 #[derive(Copy, Clone)]
801 pub struct TyCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
802 gcx: &'a GlobalCtxt<'gcx>,
803 interners: &'a CtxtInterners<'tcx>
806 impl<'a, 'gcx, 'tcx> Deref for TyCtxt<'a, 'gcx, 'tcx> {
807 type Target = &'a GlobalCtxt<'gcx>;
808 fn deref(&self) -> &Self::Target {
813 pub struct GlobalCtxt<'tcx> {
814 global_arenas: &'tcx GlobalArenas<'tcx>,
815 global_interners: CtxtInterners<'tcx>,
817 cstore: &'tcx CrateStore,
819 pub sess: &'tcx Session,
822 pub trans_trait_caches: traits::trans::TransTraitCaches<'tcx>,
824 pub dep_graph: DepGraph,
826 /// Common types, pre-interned for your convenience.
827 pub types: CommonTypes<'tcx>,
829 /// Map indicating what traits are in scope for places where this
830 /// is relevant; generated by resolve.
831 trait_map: FxHashMap<DefIndex, Rc<FxHashMap<ItemLocalId, Rc<Vec<TraitCandidate>>>>>,
833 /// Export map produced by name resolution.
834 export_map: FxHashMap<DefId, Rc<Vec<Export>>>,
836 named_region_map: NamedRegionMap,
838 pub hir: hir_map::Map<'tcx>,
840 /// A map from DefPathHash -> DefId. Includes DefIds from the local crate
841 /// as well as all upstream crates. Only populated in incremental mode.
842 pub def_path_hash_to_def_id: Option<FxHashMap<DefPathHash, DefId>>,
844 pub maps: maps::Maps<'tcx>,
846 pub mir_passes: Rc<Passes>,
848 // Records the free variables refrenced by every closure
849 // expression. Do not track deps for this, just recompute it from
850 // scratch every time.
851 freevars: FxHashMap<DefId, Rc<Vec<hir::Freevar>>>,
853 maybe_unused_trait_imports: FxHashSet<DefId>,
855 maybe_unused_extern_crates: Vec<(DefId, Span)>,
857 // Internal cache for metadata decoding. No need to track deps on this.
858 pub rcache: RefCell<FxHashMap<ty::CReaderCacheKey, Ty<'tcx>>>,
860 // FIXME dep tracking -- should be harmless enough
861 pub normalized_cache: RefCell<FxHashMap<Ty<'tcx>, Ty<'tcx>>>,
863 pub inhabitedness_cache: RefCell<FxHashMap<Ty<'tcx>, DefIdForest>>,
865 /// Set of nodes which mark locals as mutable which end up getting used at
866 /// some point. Local variable definitions not in this set can be warned
868 pub used_mut_nodes: RefCell<NodeSet>,
870 /// Caches the results of trait selection. This cache is used
871 /// for things that do not have to do with the parameters in scope.
872 pub selection_cache: traits::SelectionCache<'tcx>,
874 /// Caches the results of trait evaluation. This cache is used
875 /// for things that do not have to do with the parameters in scope.
876 /// Merge this with `selection_cache`?
877 pub evaluation_cache: traits::EvaluationCache<'tcx>,
879 /// Maps Expr NodeId's to `true` iff `&expr` can have 'static lifetime.
880 pub rvalue_promotable_to_static: RefCell<NodeMap<bool>>,
882 /// The definite name of the current crate after taking into account
883 /// attributes, commandline parameters, etc.
884 pub crate_name: Symbol,
886 /// Data layout specification for the current target.
887 pub data_layout: TargetDataLayout,
889 /// Used to prevent layout from recursing too deeply.
890 pub layout_depth: Cell<usize>,
892 /// Map from function to the `#[derive]` mode that it's defining. Only used
893 /// by `proc-macro` crates.
894 pub derive_macros: RefCell<NodeMap<Symbol>>,
896 stability_interner: RefCell<FxHashSet<&'tcx attr::Stability>>,
898 layout_interner: RefCell<FxHashSet<&'tcx Layout>>,
900 /// A vector of every trait accessible in the whole crate
901 /// (i.e. including those from subcrates). This is used only for
902 /// error reporting, and so is lazily initialized and generally
903 /// shouldn't taint the common path (hence the RefCell).
904 pub all_traits: RefCell<Option<Vec<DefId>>>,
907 impl<'tcx> GlobalCtxt<'tcx> {
908 /// Get the global TyCtxt.
909 pub fn global_tcx<'a>(&'a self) -> TyCtxt<'a, 'tcx, 'tcx> {
912 interners: &self.global_interners
917 impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
918 pub fn cstore_untracked(&self) -> &CrateStore {
922 pub fn alloc_generics(self, generics: ty::Generics) -> &'gcx ty::Generics {
923 self.global_arenas.generics.alloc(generics)
926 pub fn alloc_steal_mir(self, mir: Mir<'gcx>) -> &'gcx Steal<Mir<'gcx>> {
927 self.global_arenas.steal_mir.alloc(Steal::new(mir))
930 pub fn alloc_mir(self, mir: Mir<'gcx>) -> &'gcx Mir<'gcx> {
931 self.global_arenas.mir.alloc(mir)
934 pub fn alloc_tables(self, tables: ty::TypeckTables<'gcx>) -> &'gcx ty::TypeckTables<'gcx> {
935 self.global_arenas.tables.alloc(tables)
938 pub fn alloc_trait_def(self, def: ty::TraitDef) -> &'gcx ty::TraitDef {
939 self.global_arenas.trait_def.alloc(def)
942 pub fn alloc_adt_def(self,
945 variants: Vec<ty::VariantDef>,
947 -> &'gcx ty::AdtDef {
948 let def = ty::AdtDef::new(self, did, kind, variants, repr);
949 self.global_arenas.adt_def.alloc(def)
952 pub fn alloc_byte_array(self, bytes: &[u8]) -> &'gcx [u8] {
953 if bytes.is_empty() {
956 self.global_interners.arena.alloc_slice(bytes)
960 pub fn alloc_const_slice(self, values: &[&'tcx ty::Const<'tcx>])
961 -> &'tcx [&'tcx ty::Const<'tcx>] {
962 if values.is_empty() {
965 self.interners.arena.alloc_slice(values)
969 pub fn alloc_name_const_slice(self, values: &[(ast::Name, &'tcx ty::Const<'tcx>)])
970 -> &'tcx [(ast::Name, &'tcx ty::Const<'tcx>)] {
971 if values.is_empty() {
974 self.interners.arena.alloc_slice(values)
978 pub fn intern_stability(self, stab: attr::Stability) -> &'gcx attr::Stability {
979 if let Some(st) = self.stability_interner.borrow().get(&stab) {
983 let interned = self.global_interners.arena.alloc(stab);
984 if let Some(prev) = self.stability_interner.borrow_mut().replace(interned) {
985 bug!("Tried to overwrite interned Stability: {:?}", prev)
990 pub fn intern_layout(self, layout: Layout) -> &'gcx Layout {
991 if let Some(layout) = self.layout_interner.borrow().get(&layout) {
995 let interned = self.global_arenas.layout.alloc(layout);
996 if let Some(prev) = self.layout_interner.borrow_mut().replace(interned) {
997 bug!("Tried to overwrite interned Layout: {:?}", prev)
1002 pub fn lift<T: ?Sized + Lift<'tcx>>(self, value: &T) -> Option<T::Lifted> {
1003 value.lift_to_tcx(self)
1006 /// Like lift, but only tries in the global tcx.
1007 pub fn lift_to_global<T: ?Sized + Lift<'gcx>>(self, value: &T) -> Option<T::Lifted> {
1008 value.lift_to_tcx(self.global_tcx())
1011 /// Returns true if self is the same as self.global_tcx().
1012 fn is_global(self) -> bool {
1013 let local = self.interners as *const _;
1014 let global = &self.global_interners as *const _;
1015 local as usize == global as usize
1018 /// Create a type context and call the closure with a `TyCtxt` reference
1019 /// to the context. The closure enforces that the type context and any interned
1020 /// value (types, substs, etc.) can only be used while `ty::tls` has a valid
1021 /// reference to the context, to allow formatting values that need it.
1022 pub fn create_and_enter<F, R>(s: &'tcx Session,
1023 cstore: &'tcx CrateStore,
1024 local_providers: ty::maps::Providers<'tcx>,
1025 extern_providers: ty::maps::Providers<'tcx>,
1026 mir_passes: Rc<Passes>,
1027 arenas: &'tcx GlobalArenas<'tcx>,
1028 arena: &'tcx DroplessArena,
1029 resolutions: ty::Resolutions,
1030 named_region_map: resolve_lifetime::NamedRegionMap,
1031 hir: hir_map::Map<'tcx>,
1034 where F: for<'b> FnOnce(TyCtxt<'b, 'tcx, 'tcx>) -> R
1036 let data_layout = TargetDataLayout::parse(s);
1037 let interners = CtxtInterners::new(arena);
1038 let common_types = CommonTypes::new(&interners);
1039 let dep_graph = hir.dep_graph.clone();
1040 let max_cnum = cstore.crates_untracked().iter().map(|c| c.as_usize()).max().unwrap_or(0);
1041 let mut providers = IndexVec::from_elem_n(extern_providers, max_cnum + 1);
1042 providers[LOCAL_CRATE] = local_providers;
1044 let def_path_hash_to_def_id = if s.opts.build_dep_graph() {
1045 let upstream_def_path_tables: Vec<(CrateNum, Rc<_>)> = cstore
1048 .map(|&cnum| (cnum, cstore.def_path_table(cnum)))
1051 let def_path_tables = || {
1052 upstream_def_path_tables
1054 .map(|&(cnum, ref rc)| (cnum, &**rc))
1055 .chain(iter::once((LOCAL_CRATE, hir.definitions().def_path_table())))
1058 // Precompute the capacity of the hashmap so we don't have to
1059 // re-allocate when populating it.
1060 let capacity = def_path_tables().map(|(_, t)| t.size()).sum::<usize>();
1062 let mut map: FxHashMap<_, _> = FxHashMap::with_capacity_and_hasher(
1064 ::std::default::Default::default()
1067 for (cnum, def_path_table) in def_path_tables() {
1068 def_path_table.add_def_path_hashes_to(cnum, &mut map);
1076 let mut trait_map = FxHashMap();
1077 for (k, v) in resolutions.trait_map {
1078 let hir_id = hir.node_to_hir_id(k);
1079 let map = trait_map.entry(hir_id.owner)
1080 .or_insert_with(|| Rc::new(FxHashMap()));
1081 Rc::get_mut(map).unwrap().insert(hir_id.local_id, Rc::new(v));
1083 let mut defs = FxHashMap();
1084 for (k, v) in named_region_map.defs {
1085 let hir_id = hir.node_to_hir_id(k);
1086 let map = defs.entry(hir_id.owner)
1087 .or_insert_with(|| Rc::new(FxHashMap()));
1088 Rc::get_mut(map).unwrap().insert(hir_id.local_id, v);
1090 let mut late_bound = FxHashMap();
1091 for k in named_region_map.late_bound {
1092 let hir_id = hir.node_to_hir_id(k);
1093 let map = late_bound.entry(hir_id.owner)
1094 .or_insert_with(|| Rc::new(FxHashSet()));
1095 Rc::get_mut(map).unwrap().insert(hir_id.local_id);
1097 let mut object_lifetime_defaults = FxHashMap();
1098 for (k, v) in named_region_map.object_lifetime_defaults {
1099 let hir_id = hir.node_to_hir_id(k);
1100 let map = object_lifetime_defaults.entry(hir_id.owner)
1101 .or_insert_with(|| Rc::new(FxHashMap()));
1102 Rc::get_mut(map).unwrap().insert(hir_id.local_id, Rc::new(v));
1105 tls::enter_global(GlobalCtxt {
1108 trans_trait_caches: traits::trans::TransTraitCaches::new(dep_graph.clone()),
1109 global_arenas: arenas,
1110 global_interners: interners,
1111 dep_graph: dep_graph.clone(),
1112 types: common_types,
1113 named_region_map: NamedRegionMap {
1116 object_lifetime_defaults,
1119 export_map: resolutions.export_map.into_iter().map(|(k, v)| {
1122 freevars: resolutions.freevars.into_iter().map(|(k, v)| {
1123 (hir.local_def_id(k), Rc::new(v))
1125 maybe_unused_trait_imports:
1126 resolutions.maybe_unused_trait_imports
1128 .map(|id| hir.local_def_id(id))
1130 maybe_unused_extern_crates:
1131 resolutions.maybe_unused_extern_crates
1133 .map(|(id, sp)| (hir.local_def_id(id), sp))
1136 def_path_hash_to_def_id,
1137 maps: maps::Maps::new(providers),
1139 rcache: RefCell::new(FxHashMap()),
1140 normalized_cache: RefCell::new(FxHashMap()),
1141 inhabitedness_cache: RefCell::new(FxHashMap()),
1142 used_mut_nodes: RefCell::new(NodeSet()),
1143 selection_cache: traits::SelectionCache::new(),
1144 evaluation_cache: traits::EvaluationCache::new(),
1145 rvalue_promotable_to_static: RefCell::new(NodeMap()),
1146 crate_name: Symbol::intern(crate_name),
1148 layout_interner: RefCell::new(FxHashSet()),
1149 layout_depth: Cell::new(0),
1150 derive_macros: RefCell::new(NodeMap()),
1151 stability_interner: RefCell::new(FxHashSet()),
1152 all_traits: RefCell::new(None),
1156 pub fn consider_optimizing<T: Fn() -> String>(&self, msg: T) -> bool {
1157 let cname = self.crate_name(LOCAL_CRATE).as_str();
1158 self.sess.consider_optimizing(&cname, msg)
1161 pub fn lang_items(self) -> Rc<middle::lang_items::LanguageItems> {
1162 // FIXME(#42293) Right now we insert a `with_ignore` node in the dep
1163 // graph here to ignore the fact that `get_lang_items` below depends on
1164 // the entire crate. For now this'll prevent false positives of
1165 // recompiling too much when anything changes.
1167 // Once red/green incremental compilation lands we should be able to
1168 // remove this because while the crate changes often the lint level map
1169 // will change rarely.
1170 self.dep_graph.with_ignore(|| {
1171 self.get_lang_items(LOCAL_CRATE)
1175 pub fn stability(self) -> Rc<stability::Index<'tcx>> {
1176 // FIXME(#42293) we should actually track this, but fails too many tests
1178 self.dep_graph.with_ignore(|| {
1179 self.stability_index(LOCAL_CRATE)
1183 pub fn crates(self) -> Rc<Vec<CrateNum>> {
1184 self.all_crate_nums(LOCAL_CRATE)
1188 impl<'gcx: 'tcx, 'tcx> GlobalCtxt<'gcx> {
1189 /// Call the closure with a local `TyCtxt` using the given arena.
1190 pub fn enter_local<F, R>(&self, arena: &'tcx DroplessArena, f: F) -> R
1191 where F: for<'a> FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R
1193 let interners = CtxtInterners::new(arena);
1194 tls::enter(self, &interners, f)
1198 /// A trait implemented for all X<'a> types which can be safely and
1199 /// efficiently converted to X<'tcx> as long as they are part of the
1200 /// provided TyCtxt<'tcx>.
1201 /// This can be done, for example, for Ty<'tcx> or &'tcx Substs<'tcx>
1202 /// by looking them up in their respective interners.
1204 /// However, this is still not the best implementation as it does
1205 /// need to compare the components, even for interned values.
1206 /// It would be more efficient if TypedArena provided a way to
1207 /// determine whether the address is in the allocated range.
1209 /// None is returned if the value or one of the components is not part
1210 /// of the provided context.
1211 /// For Ty, None can be returned if either the type interner doesn't
1212 /// contain the TypeVariants key or if the address of the interned
1213 /// pointer differs. The latter case is possible if a primitive type,
1214 /// e.g. `()` or `u8`, was interned in a different context.
1215 pub trait Lift<'tcx> {
1217 fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option<Self::Lifted>;
1220 impl<'a, 'tcx> Lift<'tcx> for Ty<'a> {
1221 type Lifted = Ty<'tcx>;
1222 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Ty<'tcx>> {
1223 if tcx.interners.arena.in_arena(*self as *const _) {
1224 return Some(unsafe { mem::transmute(*self) });
1226 // Also try in the global tcx if we're not that.
1227 if !tcx.is_global() {
1228 self.lift_to_tcx(tcx.global_tcx())
1235 impl<'a, 'tcx> Lift<'tcx> for Region<'a> {
1236 type Lifted = Region<'tcx>;
1237 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Region<'tcx>> {
1238 if tcx.interners.arena.in_arena(*self as *const _) {
1239 return Some(unsafe { mem::transmute(*self) });
1241 // Also try in the global tcx if we're not that.
1242 if !tcx.is_global() {
1243 self.lift_to_tcx(tcx.global_tcx())
1250 impl<'a, 'tcx> Lift<'tcx> for &'a Const<'a> {
1251 type Lifted = &'tcx Const<'tcx>;
1252 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<&'tcx Const<'tcx>> {
1253 if tcx.interners.arena.in_arena(*self as *const _) {
1254 return Some(unsafe { mem::transmute(*self) });
1256 // Also try in the global tcx if we're not that.
1257 if !tcx.is_global() {
1258 self.lift_to_tcx(tcx.global_tcx())
1265 impl<'a, 'tcx> Lift<'tcx> for &'a Substs<'a> {
1266 type Lifted = &'tcx Substs<'tcx>;
1267 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<&'tcx Substs<'tcx>> {
1268 if self.len() == 0 {
1269 return Some(Slice::empty());
1271 if tcx.interners.arena.in_arena(&self[..] as *const _) {
1272 return Some(unsafe { mem::transmute(*self) });
1274 // Also try in the global tcx if we're not that.
1275 if !tcx.is_global() {
1276 self.lift_to_tcx(tcx.global_tcx())
1283 impl<'a, 'tcx> Lift<'tcx> for &'a Slice<Ty<'a>> {
1284 type Lifted = &'tcx Slice<Ty<'tcx>>;
1285 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>)
1286 -> Option<&'tcx Slice<Ty<'tcx>>> {
1287 if self.len() == 0 {
1288 return Some(Slice::empty());
1290 if tcx.interners.arena.in_arena(*self as *const _) {
1291 return Some(unsafe { mem::transmute(*self) });
1293 // Also try in the global tcx if we're not that.
1294 if !tcx.is_global() {
1295 self.lift_to_tcx(tcx.global_tcx())
1302 impl<'a, 'tcx> Lift<'tcx> for &'a Slice<ExistentialPredicate<'a>> {
1303 type Lifted = &'tcx Slice<ExistentialPredicate<'tcx>>;
1304 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>)
1305 -> Option<&'tcx Slice<ExistentialPredicate<'tcx>>> {
1306 if self.is_empty() {
1307 return Some(Slice::empty());
1309 if tcx.interners.arena.in_arena(*self as *const _) {
1310 return Some(unsafe { mem::transmute(*self) });
1312 // Also try in the global tcx if we're not that.
1313 if !tcx.is_global() {
1314 self.lift_to_tcx(tcx.global_tcx())
1321 impl<'a, 'tcx> Lift<'tcx> for &'a Slice<Predicate<'a>> {
1322 type Lifted = &'tcx Slice<Predicate<'tcx>>;
1323 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>)
1324 -> Option<&'tcx Slice<Predicate<'tcx>>> {
1325 if self.is_empty() {
1326 return Some(Slice::empty());
1328 if tcx.interners.arena.in_arena(*self as *const _) {
1329 return Some(unsafe { mem::transmute(*self) });
1331 // Also try in the global tcx if we're not that.
1332 if !tcx.is_global() {
1333 self.lift_to_tcx(tcx.global_tcx())
1341 use super::{CtxtInterners, GlobalCtxt, TyCtxt};
1343 use std::cell::Cell;
1347 /// Marker types used for the scoped TLS slot.
1348 /// The type context cannot be used directly because the scoped TLS
1349 /// in libstd doesn't allow types generic over lifetimes.
1350 enum ThreadLocalGlobalCtxt {}
1351 enum ThreadLocalInterners {}
1354 static TLS_TCX: Cell<Option<(*const ThreadLocalGlobalCtxt,
1355 *const ThreadLocalInterners)>> = Cell::new(None)
1358 fn span_debug(span: syntax_pos::Span, f: &mut fmt::Formatter) -> fmt::Result {
1360 write!(f, "{}", tcx.sess.codemap().span_to_string(span))
1364 pub fn enter_global<'gcx, F, R>(gcx: GlobalCtxt<'gcx>, f: F) -> R
1365 where F: for<'a> FnOnce(TyCtxt<'a, 'gcx, 'gcx>) -> R
1367 syntax_pos::SPAN_DEBUG.with(|span_dbg| {
1368 let original_span_debug = span_dbg.get();
1369 span_dbg.set(span_debug);
1370 let result = enter(&gcx, &gcx.global_interners, f);
1371 span_dbg.set(original_span_debug);
1376 pub fn enter<'a, 'gcx: 'tcx, 'tcx, F, R>(gcx: &'a GlobalCtxt<'gcx>,
1377 interners: &'a CtxtInterners<'tcx>,
1379 where F: FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R
1381 let gcx_ptr = gcx as *const _ as *const ThreadLocalGlobalCtxt;
1382 let interners_ptr = interners as *const _ as *const ThreadLocalInterners;
1383 TLS_TCX.with(|tls| {
1384 let prev = tls.get();
1385 tls.set(Some((gcx_ptr, interners_ptr)));
1386 let ret = f(TyCtxt {
1395 pub fn with<F, R>(f: F) -> R
1396 where F: for<'a, 'gcx, 'tcx> FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R
1398 TLS_TCX.with(|tcx| {
1399 let (gcx, interners) = tcx.get().unwrap();
1400 let gcx = unsafe { &*(gcx as *const GlobalCtxt) };
1401 let interners = unsafe { &*(interners as *const CtxtInterners) };
1409 pub fn with_opt<F, R>(f: F) -> R
1410 where F: for<'a, 'gcx, 'tcx> FnOnce(Option<TyCtxt<'a, 'gcx, 'tcx>>) -> R
1412 if TLS_TCX.with(|tcx| tcx.get().is_some()) {
1413 with(|v| f(Some(v)))
1420 macro_rules! sty_debug_print {
1421 ($ctxt: expr, $($variant: ident),*) => {{
1422 // curious inner module to allow variant names to be used as
1424 #[allow(non_snake_case)]
1426 use ty::{self, TyCtxt};
1427 use ty::context::Interned;
1429 #[derive(Copy, Clone)]
1432 region_infer: usize,
1437 pub fn go(tcx: TyCtxt) {
1438 let mut total = DebugStat {
1440 region_infer: 0, ty_infer: 0, both_infer: 0,
1442 $(let mut $variant = total;)*
1445 for &Interned(t) in tcx.interners.type_.borrow().iter() {
1446 let variant = match t.sty {
1447 ty::TyBool | ty::TyChar | ty::TyInt(..) | ty::TyUint(..) |
1448 ty::TyFloat(..) | ty::TyStr | ty::TyNever => continue,
1449 ty::TyError => /* unimportant */ continue,
1450 $(ty::$variant(..) => &mut $variant,)*
1452 let region = t.flags.intersects(ty::TypeFlags::HAS_RE_INFER);
1453 let ty = t.flags.intersects(ty::TypeFlags::HAS_TY_INFER);
1457 if region { total.region_infer += 1; variant.region_infer += 1 }
1458 if ty { total.ty_infer += 1; variant.ty_infer += 1 }
1459 if region && ty { total.both_infer += 1; variant.both_infer += 1 }
1461 println!("Ty interner total ty region both");
1462 $(println!(" {:18}: {uses:6} {usespc:4.1}%, \
1463 {ty:4.1}% {region:5.1}% {both:4.1}%",
1464 stringify!($variant),
1465 uses = $variant.total,
1466 usespc = $variant.total as f64 * 100.0 / total.total as f64,
1467 ty = $variant.ty_infer as f64 * 100.0 / total.total as f64,
1468 region = $variant.region_infer as f64 * 100.0 / total.total as f64,
1469 both = $variant.both_infer as f64 * 100.0 / total.total as f64);
1471 println!(" total {uses:6} \
1472 {ty:4.1}% {region:5.1}% {both:4.1}%",
1474 ty = total.ty_infer as f64 * 100.0 / total.total as f64,
1475 region = total.region_infer as f64 * 100.0 / total.total as f64,
1476 both = total.both_infer as f64 * 100.0 / total.total as f64)
1484 impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
1485 pub fn print_debug_stats(self) {
1488 TyAdt, TyArray, TySlice, TyRawPtr, TyRef, TyFnDef, TyFnPtr, TyGenerator,
1489 TyDynamic, TyClosure, TyTuple, TyParam, TyInfer, TyProjection, TyAnon);
1491 println!("Substs interner: #{}", self.interners.substs.borrow().len());
1492 println!("Region interner: #{}", self.interners.region.borrow().len());
1493 println!("Stability interner: #{}", self.stability_interner.borrow().len());
1494 println!("Layout interner: #{}", self.layout_interner.borrow().len());
1499 /// An entry in an interner.
1500 struct Interned<'tcx, T: 'tcx+?Sized>(&'tcx T);
1502 // NB: An Interned<Ty> compares and hashes as a sty.
1503 impl<'tcx> PartialEq for Interned<'tcx, TyS<'tcx>> {
1504 fn eq(&self, other: &Interned<'tcx, TyS<'tcx>>) -> bool {
1505 self.0.sty == other.0.sty
1509 impl<'tcx> Eq for Interned<'tcx, TyS<'tcx>> {}
1511 impl<'tcx> Hash for Interned<'tcx, TyS<'tcx>> {
1512 fn hash<H: Hasher>(&self, s: &mut H) {
1517 impl<'tcx: 'lcx, 'lcx> Borrow<TypeVariants<'lcx>> for Interned<'tcx, TyS<'tcx>> {
1518 fn borrow<'a>(&'a self) -> &'a TypeVariants<'lcx> {
1523 // NB: An Interned<Slice<T>> compares and hashes as its elements.
1524 impl<'tcx, T: PartialEq> PartialEq for Interned<'tcx, Slice<T>> {
1525 fn eq(&self, other: &Interned<'tcx, Slice<T>>) -> bool {
1526 self.0[..] == other.0[..]
1530 impl<'tcx, T: Eq> Eq for Interned<'tcx, Slice<T>> {}
1532 impl<'tcx, T: Hash> Hash for Interned<'tcx, Slice<T>> {
1533 fn hash<H: Hasher>(&self, s: &mut H) {
1538 impl<'tcx: 'lcx, 'lcx> Borrow<[Ty<'lcx>]> for Interned<'tcx, Slice<Ty<'tcx>>> {
1539 fn borrow<'a>(&'a self) -> &'a [Ty<'lcx>] {
1544 impl<'tcx: 'lcx, 'lcx> Borrow<[Kind<'lcx>]> for Interned<'tcx, Substs<'tcx>> {
1545 fn borrow<'a>(&'a self) -> &'a [Kind<'lcx>] {
1550 impl<'tcx> Borrow<RegionKind> for Interned<'tcx, RegionKind> {
1551 fn borrow<'a>(&'a self) -> &'a RegionKind {
1556 impl<'tcx: 'lcx, 'lcx> Borrow<[ExistentialPredicate<'lcx>]>
1557 for Interned<'tcx, Slice<ExistentialPredicate<'tcx>>> {
1558 fn borrow<'a>(&'a self) -> &'a [ExistentialPredicate<'lcx>] {
1563 impl<'tcx: 'lcx, 'lcx> Borrow<[Predicate<'lcx>]>
1564 for Interned<'tcx, Slice<Predicate<'tcx>>> {
1565 fn borrow<'a>(&'a self) -> &'a [Predicate<'lcx>] {
1570 impl<'tcx: 'lcx, 'lcx> Borrow<Const<'lcx>> for Interned<'tcx, Const<'tcx>> {
1571 fn borrow<'a>(&'a self) -> &'a Const<'lcx> {
1576 macro_rules! intern_method {
1577 ($lt_tcx:tt, $name:ident: $method:ident($alloc:ty,
1578 $alloc_method:ident,
1581 $needs_infer:expr) -> $ty:ty) => {
1582 impl<'a, 'gcx, $lt_tcx> TyCtxt<'a, 'gcx, $lt_tcx> {
1583 pub fn $method(self, v: $alloc) -> &$lt_tcx $ty {
1585 let key = ($alloc_to_key)(&v);
1586 if let Some(i) = self.interners.$name.borrow().get(key) {
1589 if !self.is_global() {
1590 if let Some(i) = self.global_interners.$name.borrow().get(key) {
1596 // HACK(eddyb) Depend on flags being accurate to
1597 // determine that all contents are in the global tcx.
1598 // See comments on Lift for why we can't use that.
1599 if !($needs_infer)(&v) {
1600 if !self.is_global() {
1604 let i = ($alloc_to_ret)(self.global_interners.arena.$alloc_method(v));
1605 self.global_interners.$name.borrow_mut().insert(Interned(i));
1609 // Make sure we don't end up with inference
1610 // types/regions in the global tcx.
1611 if self.is_global() {
1612 bug!("Attempted to intern `{:?}` which contains \
1613 inference types/regions in the global type context",
1618 let i = ($alloc_to_ret)(self.interners.arena.$alloc_method(v));
1619 self.interners.$name.borrow_mut().insert(Interned(i));
1626 macro_rules! direct_interners {
1627 ($lt_tcx:tt, $($name:ident: $method:ident($needs_infer:expr) -> $ty:ty),+) => {
1628 $(impl<$lt_tcx> PartialEq for Interned<$lt_tcx, $ty> {
1629 fn eq(&self, other: &Self) -> bool {
1634 impl<$lt_tcx> Eq for Interned<$lt_tcx, $ty> {}
1636 impl<$lt_tcx> Hash for Interned<$lt_tcx, $ty> {
1637 fn hash<H: Hasher>(&self, s: &mut H) {
1642 intern_method!($lt_tcx, $name: $method($ty, alloc, |x| x, |x| x, $needs_infer) -> $ty);)+
1646 pub fn keep_local<'tcx, T: ty::TypeFoldable<'tcx>>(x: &T) -> bool {
1647 x.has_type_flags(ty::TypeFlags::KEEP_IN_LOCAL_TCX)
1650 direct_interners!('tcx,
1651 region: mk_region(|r| {
1653 &ty::ReVar(_) | &ty::ReSkolemized(..) => true,
1657 const_: mk_const(|c: &Const| keep_local(&c.ty) || keep_local(&c.val)) -> Const<'tcx>
1660 macro_rules! slice_interners {
1661 ($($field:ident: $method:ident($ty:ident)),+) => (
1662 $(intern_method!('tcx, $field: $method(&[$ty<'tcx>], alloc_slice, Deref::deref,
1663 |xs: &[$ty]| -> &Slice<$ty> {
1664 unsafe { mem::transmute(xs) }
1665 }, |xs: &[$ty]| xs.iter().any(keep_local)) -> Slice<$ty<'tcx>>);)+
1670 existential_predicates: _intern_existential_predicates(ExistentialPredicate),
1671 predicates: _intern_predicates(Predicate),
1672 type_list: _intern_type_list(Ty),
1673 substs: _intern_substs(Kind)
1676 impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
1677 /// Create an unsafe fn ty based on a safe fn ty.
1678 pub fn safe_to_unsafe_fn_ty(self, sig: PolyFnSig<'tcx>) -> Ty<'tcx> {
1679 assert_eq!(sig.unsafety(), hir::Unsafety::Normal);
1680 self.mk_fn_ptr(sig.map_bound(|sig| ty::FnSig {
1681 unsafety: hir::Unsafety::Unsafe,
1686 // Interns a type/name combination, stores the resulting box in cx.interners,
1687 // and returns the box as cast to an unsafe ptr (see comments for Ty above).
1688 pub fn mk_ty(self, st: TypeVariants<'tcx>) -> Ty<'tcx> {
1689 let global_interners = if !self.is_global() {
1690 Some(&self.global_interners)
1694 self.interners.intern_ty(st, global_interners)
1697 pub fn mk_mach_int(self, tm: ast::IntTy) -> Ty<'tcx> {
1699 ast::IntTy::Is => self.types.isize,
1700 ast::IntTy::I8 => self.types.i8,
1701 ast::IntTy::I16 => self.types.i16,
1702 ast::IntTy::I32 => self.types.i32,
1703 ast::IntTy::I64 => self.types.i64,
1704 ast::IntTy::I128 => self.types.i128,
1708 pub fn mk_mach_uint(self, tm: ast::UintTy) -> Ty<'tcx> {
1710 ast::UintTy::Us => self.types.usize,
1711 ast::UintTy::U8 => self.types.u8,
1712 ast::UintTy::U16 => self.types.u16,
1713 ast::UintTy::U32 => self.types.u32,
1714 ast::UintTy::U64 => self.types.u64,
1715 ast::UintTy::U128 => self.types.u128,
1719 pub fn mk_mach_float(self, tm: ast::FloatTy) -> Ty<'tcx> {
1721 ast::FloatTy::F32 => self.types.f32,
1722 ast::FloatTy::F64 => self.types.f64,
1726 pub fn mk_str(self) -> Ty<'tcx> {
1730 pub fn mk_static_str(self) -> Ty<'tcx> {
1731 self.mk_imm_ref(self.types.re_static, self.mk_str())
1734 pub fn mk_adt(self, def: &'tcx AdtDef, substs: &'tcx Substs<'tcx>) -> Ty<'tcx> {
1735 // take a copy of substs so that we own the vectors inside
1736 self.mk_ty(TyAdt(def, substs))
1739 pub fn mk_box(self, ty: Ty<'tcx>) -> Ty<'tcx> {
1740 let def_id = self.require_lang_item(lang_items::OwnedBoxLangItem);
1741 let adt_def = self.adt_def(def_id);
1742 let substs = self.mk_substs(iter::once(Kind::from(ty)));
1743 self.mk_ty(TyAdt(adt_def, substs))
1746 pub fn mk_ptr(self, tm: TypeAndMut<'tcx>) -> Ty<'tcx> {
1747 self.mk_ty(TyRawPtr(tm))
1750 pub fn mk_ref(self, r: Region<'tcx>, tm: TypeAndMut<'tcx>) -> Ty<'tcx> {
1751 self.mk_ty(TyRef(r, tm))
1754 pub fn mk_mut_ref(self, r: Region<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> {
1755 self.mk_ref(r, TypeAndMut {ty: ty, mutbl: hir::MutMutable})
1758 pub fn mk_imm_ref(self, r: Region<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> {
1759 self.mk_ref(r, TypeAndMut {ty: ty, mutbl: hir::MutImmutable})
1762 pub fn mk_mut_ptr(self, ty: Ty<'tcx>) -> Ty<'tcx> {
1763 self.mk_ptr(TypeAndMut {ty: ty, mutbl: hir::MutMutable})
1766 pub fn mk_imm_ptr(self, ty: Ty<'tcx>) -> Ty<'tcx> {
1767 self.mk_ptr(TypeAndMut {ty: ty, mutbl: hir::MutImmutable})
1770 pub fn mk_nil_ptr(self) -> Ty<'tcx> {
1771 self.mk_imm_ptr(self.mk_nil())
1774 pub fn mk_array(self, ty: Ty<'tcx>, n: u64) -> Ty<'tcx> {
1775 let n = ConstUsize::new(n, self.sess.target.usize_ty).unwrap();
1776 self.mk_array_const_usize(ty, n)
1779 pub fn mk_array_const_usize(self, ty: Ty<'tcx>, n: ConstUsize) -> Ty<'tcx> {
1780 self.mk_ty(TyArray(ty, self.mk_const(ty::Const {
1781 val: ConstVal::Integral(ConstInt::Usize(n)),
1782 ty: self.types.usize
1786 pub fn mk_slice(self, ty: Ty<'tcx>) -> Ty<'tcx> {
1787 self.mk_ty(TySlice(ty))
1790 pub fn intern_tup(self, ts: &[Ty<'tcx>], defaulted: bool) -> Ty<'tcx> {
1791 self.mk_ty(TyTuple(self.intern_type_list(ts), defaulted))
1794 pub fn mk_tup<I: InternAs<[Ty<'tcx>], Ty<'tcx>>>(self, iter: I,
1795 defaulted: bool) -> I::Output {
1796 iter.intern_with(|ts| self.mk_ty(TyTuple(self.intern_type_list(ts), defaulted)))
1799 pub fn mk_nil(self) -> Ty<'tcx> {
1800 self.intern_tup(&[], false)
1803 pub fn mk_diverging_default(self) -> Ty<'tcx> {
1804 if self.sess.features.borrow().never_type {
1807 self.intern_tup(&[], true)
1811 pub fn mk_bool(self) -> Ty<'tcx> {
1815 pub fn mk_fn_def(self, def_id: DefId,
1816 substs: &'tcx Substs<'tcx>) -> Ty<'tcx> {
1817 self.mk_ty(TyFnDef(def_id, substs))
1820 pub fn mk_fn_ptr(self, fty: PolyFnSig<'tcx>) -> Ty<'tcx> {
1821 self.mk_ty(TyFnPtr(fty))
1826 obj: ty::Binder<&'tcx Slice<ExistentialPredicate<'tcx>>>,
1827 reg: ty::Region<'tcx>
1829 self.mk_ty(TyDynamic(obj, reg))
1832 pub fn mk_projection(self,
1834 substs: &'tcx Substs<'tcx>)
1836 self.mk_ty(TyProjection(ProjectionTy {
1842 pub fn mk_closure(self,
1844 substs: &'tcx Substs<'tcx>)
1846 self.mk_closure_from_closure_substs(closure_id, ClosureSubsts {
1851 pub fn mk_closure_from_closure_substs(self,
1853 closure_substs: ClosureSubsts<'tcx>)
1855 self.mk_ty(TyClosure(closure_id, closure_substs))
1858 pub fn mk_generator(self,
1860 closure_substs: ClosureSubsts<'tcx>,
1861 interior: GeneratorInterior<'tcx>)
1863 self.mk_ty(TyGenerator(id, closure_substs, interior))
1866 pub fn mk_var(self, v: TyVid) -> Ty<'tcx> {
1867 self.mk_infer(TyVar(v))
1870 pub fn mk_int_var(self, v: IntVid) -> Ty<'tcx> {
1871 self.mk_infer(IntVar(v))
1874 pub fn mk_float_var(self, v: FloatVid) -> Ty<'tcx> {
1875 self.mk_infer(FloatVar(v))
1878 pub fn mk_infer(self, it: InferTy) -> Ty<'tcx> {
1879 self.mk_ty(TyInfer(it))
1882 pub fn mk_param(self,
1884 name: Name) -> Ty<'tcx> {
1885 self.mk_ty(TyParam(ParamTy { idx: index, name: name }))
1888 pub fn mk_self_type(self) -> Ty<'tcx> {
1889 self.mk_param(0, keywords::SelfType.name())
1892 pub fn mk_param_from_def(self, def: &ty::TypeParameterDef) -> Ty<'tcx> {
1893 self.mk_param(def.index, def.name)
1896 pub fn mk_anon(self, def_id: DefId, substs: &'tcx Substs<'tcx>) -> Ty<'tcx> {
1897 self.mk_ty(TyAnon(def_id, substs))
1900 pub fn intern_existential_predicates(self, eps: &[ExistentialPredicate<'tcx>])
1901 -> &'tcx Slice<ExistentialPredicate<'tcx>> {
1902 assert!(!eps.is_empty());
1903 assert!(eps.windows(2).all(|w| w[0].cmp(self, &w[1]) != Ordering::Greater));
1904 self._intern_existential_predicates(eps)
1907 pub fn intern_predicates(self, preds: &[Predicate<'tcx>])
1908 -> &'tcx Slice<Predicate<'tcx>> {
1909 // FIXME consider asking the input slice to be sorted to avoid
1910 // re-interning permutations, in which case that would be asserted
1912 if preds.len() == 0 {
1913 // The macro-generated method below asserts we don't intern an empty slice.
1916 self._intern_predicates(preds)
1920 pub fn intern_type_list(self, ts: &[Ty<'tcx>]) -> &'tcx Slice<Ty<'tcx>> {
1924 self._intern_type_list(ts)
1928 pub fn intern_substs(self, ts: &[Kind<'tcx>]) -> &'tcx Slice<Kind<'tcx>> {
1932 self._intern_substs(ts)
1936 pub fn mk_fn_sig<I>(self,
1940 unsafety: hir::Unsafety,
1942 -> <I::Item as InternIteratorElement<Ty<'tcx>, ty::FnSig<'tcx>>>::Output
1944 I::Item: InternIteratorElement<Ty<'tcx>, ty::FnSig<'tcx>>
1946 inputs.chain(iter::once(output)).intern_with(|xs| ty::FnSig {
1947 inputs_and_output: self.intern_type_list(xs),
1948 variadic, unsafety, abi
1952 pub fn mk_existential_predicates<I: InternAs<[ExistentialPredicate<'tcx>],
1953 &'tcx Slice<ExistentialPredicate<'tcx>>>>(self, iter: I)
1955 iter.intern_with(|xs| self.intern_existential_predicates(xs))
1958 pub fn mk_predicates<I: InternAs<[Predicate<'tcx>],
1959 &'tcx Slice<Predicate<'tcx>>>>(self, iter: I)
1961 iter.intern_with(|xs| self.intern_predicates(xs))
1964 pub fn mk_type_list<I: InternAs<[Ty<'tcx>],
1965 &'tcx Slice<Ty<'tcx>>>>(self, iter: I) -> I::Output {
1966 iter.intern_with(|xs| self.intern_type_list(xs))
1969 pub fn mk_substs<I: InternAs<[Kind<'tcx>],
1970 &'tcx Slice<Kind<'tcx>>>>(self, iter: I) -> I::Output {
1971 iter.intern_with(|xs| self.intern_substs(xs))
1974 pub fn mk_substs_trait(self,
1977 -> &'tcx Substs<'tcx>
1979 self.mk_substs(iter::once(s).chain(t.into_iter().cloned()).map(Kind::from))
1982 pub fn lint_node<S: Into<MultiSpan>>(self,
1983 lint: &'static Lint,
1987 self.struct_span_lint_node(lint, id, span.into(), msg).emit()
1990 pub fn lint_node_note<S: Into<MultiSpan>>(self,
1991 lint: &'static Lint,
1996 let mut err = self.struct_span_lint_node(lint, id, span.into(), msg);
2001 pub fn lint_level_at_node(self, lint: &'static Lint, mut id: NodeId)
2002 -> (lint::Level, lint::LintSource)
2004 // Right now we insert a `with_ignore` node in the dep graph here to
2005 // ignore the fact that `lint_levels` below depends on the entire crate.
2006 // For now this'll prevent false positives of recompiling too much when
2007 // anything changes.
2009 // Once red/green incremental compilation lands we should be able to
2010 // remove this because while the crate changes often the lint level map
2011 // will change rarely.
2012 self.dep_graph.with_ignore(|| {
2013 let sets = self.lint_levels(LOCAL_CRATE);
2015 let hir_id = self.hir.definitions().node_to_hir_id(id);
2016 if let Some(pair) = sets.level_and_source(lint, hir_id) {
2019 let next = self.hir.get_parent_node(id);
2021 bug!("lint traversal reached the root of the crate");
2028 pub fn struct_span_lint_node<S: Into<MultiSpan>>(self,
2029 lint: &'static Lint,
2033 -> DiagnosticBuilder<'tcx>
2035 let (level, src) = self.lint_level_at_node(lint, id);
2036 lint::struct_lint_level(self.sess, lint, level, src, Some(span.into()), msg)
2039 pub fn struct_lint_node(self, lint: &'static Lint, id: NodeId, msg: &str)
2040 -> DiagnosticBuilder<'tcx>
2042 let (level, src) = self.lint_level_at_node(lint, id);
2043 lint::struct_lint_level(self.sess, lint, level, src, None, msg)
2046 pub fn in_scope_traits(self, id: HirId) -> Option<Rc<Vec<TraitCandidate>>> {
2047 self.in_scope_traits_map(id.owner)
2048 .and_then(|map| map.get(&id.local_id).cloned())
2051 pub fn named_region(self, id: HirId) -> Option<resolve_lifetime::Region> {
2052 self.named_region_map(id.owner)
2053 .and_then(|map| map.get(&id.local_id).cloned())
2056 pub fn is_late_bound(self, id: HirId) -> bool {
2057 self.is_late_bound_map(id.owner)
2058 .map(|set| set.contains(&id.local_id))
2062 pub fn object_lifetime_defaults(self, id: HirId)
2063 -> Option<Rc<Vec<ObjectLifetimeDefault>>>
2065 self.object_lifetime_defaults_map(id.owner)
2066 .and_then(|map| map.get(&id.local_id).cloned())
2070 pub trait InternAs<T: ?Sized, R> {
2072 fn intern_with<F>(self, f: F) -> Self::Output
2073 where F: FnOnce(&T) -> R;
2076 impl<I, T, R, E> InternAs<[T], R> for I
2077 where E: InternIteratorElement<T, R>,
2078 I: Iterator<Item=E> {
2079 type Output = E::Output;
2080 fn intern_with<F>(self, f: F) -> Self::Output
2081 where F: FnOnce(&[T]) -> R {
2082 E::intern_with(self, f)
2086 pub trait InternIteratorElement<T, R>: Sized {
2088 fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output;
2091 impl<T, R> InternIteratorElement<T, R> for T {
2093 fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output {
2094 f(&iter.collect::<AccumulateVec<[_; 8]>>())
2098 impl<'a, T, R> InternIteratorElement<T, R> for &'a T
2102 fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output {
2103 f(&iter.cloned().collect::<AccumulateVec<[_; 8]>>())
2107 impl<T, R, E> InternIteratorElement<T, R> for Result<T, E> {
2108 type Output = Result<R, E>;
2109 fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output {
2110 Ok(f(&iter.collect::<Result<AccumulateVec<[_; 8]>, _>>()?))
2114 struct NamedRegionMap {
2115 defs: FxHashMap<DefIndex, Rc<FxHashMap<ItemLocalId, resolve_lifetime::Region>>>,
2116 late_bound: FxHashMap<DefIndex, Rc<FxHashSet<ItemLocalId>>>,
2117 object_lifetime_defaults:
2120 Rc<FxHashMap<ItemLocalId, Rc<Vec<ObjectLifetimeDefault>>>>,
2124 pub fn provide(providers: &mut ty::maps::Providers) {
2125 // FIXME(#44234) - almost all of these queries have no sub-queries and
2126 // therefore no actual inputs, they're just reading tables calculated in
2127 // resolve! Does this work? Unsure! That's what the issue is about
2128 providers.in_scope_traits_map = |tcx, id| tcx.gcx.trait_map.get(&id).cloned();
2129 providers.module_exports = |tcx, id| tcx.gcx.export_map.get(&id).cloned();
2130 providers.named_region_map = |tcx, id| tcx.gcx.named_region_map.defs.get(&id).cloned();
2131 providers.is_late_bound_map = |tcx, id| tcx.gcx.named_region_map.late_bound.get(&id).cloned();
2132 providers.object_lifetime_defaults_map = |tcx, id| {
2133 tcx.gcx.named_region_map.object_lifetime_defaults.get(&id).cloned()
2135 providers.crate_name = |tcx, id| {
2136 assert_eq!(id, LOCAL_CRATE);
2139 providers.get_lang_items = |tcx, id| {
2140 assert_eq!(id, LOCAL_CRATE);
2141 Rc::new(middle::lang_items::collect(tcx))
2143 providers.freevars = |tcx, id| tcx.gcx.freevars.get(&id).cloned();
2144 providers.maybe_unused_trait_import = |tcx, id| {
2145 tcx.maybe_unused_trait_imports.contains(&id)
2147 providers.maybe_unused_extern_crates = |tcx, cnum| {
2148 assert_eq!(cnum, LOCAL_CRATE);
2149 Rc::new(tcx.maybe_unused_extern_crates.clone())
2152 providers.stability_index = |tcx, cnum| {
2153 assert_eq!(cnum, LOCAL_CRATE);
2154 Rc::new(stability::Index::new(tcx))
2156 providers.lookup_stability = |tcx, id| {
2157 assert_eq!(id.krate, LOCAL_CRATE);
2158 let id = tcx.hir.definitions().def_index_to_hir_id(id.index);
2159 tcx.stability().local_stability(id)
2161 providers.lookup_deprecation_entry = |tcx, id| {
2162 assert_eq!(id.krate, LOCAL_CRATE);
2163 let id = tcx.hir.definitions().def_index_to_hir_id(id.index);
2164 tcx.stability().local_deprecation_entry(id)