1 // Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! type context book-keeping
13 use dep_graph::DepGraph;
14 use dep_graph::{DepNode, DepConstructor};
15 use errors::DiagnosticBuilder;
17 use session::config::{BorrowckMode, OutputFilenames, OptLevel};
18 use session::config::CrateType::*;
20 use hir::{TraitCandidate, HirId, ItemLocalId};
21 use hir::def::{Def, Export};
22 use hir::def_id::{CrateNum, DefId, DefIndex, LOCAL_CRATE};
23 use hir::map as hir_map;
24 use hir::map::DefPathHash;
25 use lint::{self, Lint};
26 use ich::{StableHashingContext, NodeIdHashingMode};
27 use infer::canonical::{CanonicalVarInfo, CanonicalVarInfos};
28 use infer::outlives::free_region_map::FreeRegionMap;
29 use middle::const_val::ConstVal;
30 use middle::cstore::{CrateStore, LinkMeta};
31 use middle::cstore::EncodedMetadata;
32 use middle::lang_items;
33 use middle::resolve_lifetime::{self, ObjectLifetimeDefault};
34 use middle::stability;
35 use mir::{self, Mir, interpret};
36 use mir::interpret::{Value, PrimVal};
37 use ty::subst::{Kind, Substs};
41 use ty::{self, Ty, TypeAndMut};
42 use ty::{TyS, TypeVariants, Slice};
43 use ty::{AdtKind, AdtDef, ClosureSubsts, GeneratorInterior, Region, Const};
44 use ty::{PolyFnSig, InferTy, ParamTy, ProjectionTy, ExistentialPredicate, Predicate};
46 use ty::{TyVar, TyVid, IntVar, IntVid, FloatVar, FloatVid};
47 use ty::TypeVariants::*;
48 use ty::layout::{LayoutDetails, TargetDataLayout};
53 use util::nodemap::{NodeMap, DefIdSet, ItemLocalMap};
54 use util::nodemap::{FxHashMap, FxHashSet};
55 use rustc_data_structures::accumulate_vec::AccumulateVec;
56 use rustc_data_structures::stable_hasher::{HashStable, hash_stable_hashmap,
57 StableHasher, StableHasherResult,
59 use arena::{TypedArena, DroplessArena};
60 use rustc_data_structures::indexed_vec::IndexVec;
61 use rustc_data_structures::sync::Lrc;
63 use std::borrow::Borrow;
64 use std::cell::{Cell, RefCell};
65 use std::cmp::Ordering;
66 use std::collections::hash_map::{self, Entry};
67 use std::hash::{Hash, Hasher};
74 use syntax::ast::{self, Name, NodeId};
76 use syntax::codemap::MultiSpan;
77 use syntax::feature_gate;
78 use syntax::symbol::{Symbol, keywords};
83 pub struct AllArenas<'tcx> {
84 pub global: GlobalArenas<'tcx>,
85 pub interner: DroplessArena,
88 impl<'tcx> AllArenas<'tcx> {
89 pub fn new() -> Self {
91 global: GlobalArenas::new(),
92 interner: DroplessArena::new(),
98 pub struct GlobalArenas<'tcx> {
100 layout: TypedArena<LayoutDetails>,
103 generics: TypedArena<ty::Generics>,
104 trait_def: TypedArena<ty::TraitDef>,
105 adt_def: TypedArena<ty::AdtDef>,
106 steal_mir: TypedArena<Steal<Mir<'tcx>>>,
107 mir: TypedArena<Mir<'tcx>>,
108 tables: TypedArena<ty::TypeckTables<'tcx>>,
110 const_allocs: TypedArena<interpret::Allocation>,
113 impl<'tcx> GlobalArenas<'tcx> {
114 pub fn new() -> GlobalArenas<'tcx> {
116 layout: TypedArena::new(),
117 generics: TypedArena::new(),
118 trait_def: TypedArena::new(),
119 adt_def: TypedArena::new(),
120 steal_mir: TypedArena::new(),
121 mir: TypedArena::new(),
122 tables: TypedArena::new(),
123 const_allocs: TypedArena::new(),
128 pub struct CtxtInterners<'tcx> {
129 /// The arena that types, regions, etc are allocated from
130 arena: &'tcx DroplessArena,
132 /// Specifically use a speedy hash algorithm for these hash sets,
133 /// they're accessed quite often.
134 type_: RefCell<FxHashSet<Interned<'tcx, TyS<'tcx>>>>,
135 type_list: RefCell<FxHashSet<Interned<'tcx, Slice<Ty<'tcx>>>>>,
136 substs: RefCell<FxHashSet<Interned<'tcx, Substs<'tcx>>>>,
137 canonical_var_infos: RefCell<FxHashSet<Interned<'tcx, Slice<CanonicalVarInfo>>>>,
138 region: RefCell<FxHashSet<Interned<'tcx, RegionKind>>>,
139 existential_predicates: RefCell<FxHashSet<Interned<'tcx, Slice<ExistentialPredicate<'tcx>>>>>,
140 predicates: RefCell<FxHashSet<Interned<'tcx, Slice<Predicate<'tcx>>>>>,
141 const_: RefCell<FxHashSet<Interned<'tcx, Const<'tcx>>>>,
144 impl<'gcx: 'tcx, 'tcx> CtxtInterners<'tcx> {
145 fn new(arena: &'tcx DroplessArena) -> CtxtInterners<'tcx> {
148 type_: RefCell::new(FxHashSet()),
149 type_list: RefCell::new(FxHashSet()),
150 substs: RefCell::new(FxHashSet()),
151 region: RefCell::new(FxHashSet()),
152 existential_predicates: RefCell::new(FxHashSet()),
153 canonical_var_infos: RefCell::new(FxHashSet()),
154 predicates: RefCell::new(FxHashSet()),
155 const_: RefCell::new(FxHashSet()),
159 /// Intern a type. global_interners is Some only if this is
160 /// a local interner and global_interners is its counterpart.
161 fn intern_ty(&self, st: TypeVariants<'tcx>,
162 global_interners: Option<&CtxtInterners<'gcx>>)
165 let mut interner = self.type_.borrow_mut();
166 if let Some(&Interned(ty)) = interner.get(&st) {
169 let global_interner = global_interners.map(|interners| {
170 interners.type_.borrow_mut()
172 if let Some(ref interner) = global_interner {
173 if let Some(&Interned(ty)) = interner.get(&st) {
178 let flags = super::flags::FlagComputation::for_sty(&st);
179 let ty_struct = TyS {
182 region_depth: flags.depth,
185 // HACK(eddyb) Depend on flags being accurate to
186 // determine that all contents are in the global tcx.
187 // See comments on Lift for why we can't use that.
188 if !flags.flags.intersects(ty::TypeFlags::KEEP_IN_LOCAL_TCX) {
189 if let Some(interner) = global_interners {
190 let ty_struct: TyS<'gcx> = unsafe {
191 mem::transmute(ty_struct)
193 let ty: Ty<'gcx> = interner.arena.alloc(ty_struct);
194 global_interner.unwrap().insert(Interned(ty));
198 // Make sure we don't end up with inference
199 // types/regions in the global tcx.
200 if global_interners.is_none() {
202 bug!("Attempted to intern `{:?}` which contains \
203 inference types/regions in the global type context",
208 // Don't be &mut TyS.
209 let ty: Ty<'tcx> = self.arena.alloc(ty_struct);
210 interner.insert(Interned(ty));
214 debug!("Interned type: {:?} Pointer: {:?}",
215 ty, ty as *const TyS);
221 pub struct CommonTypes<'tcx> {
241 pub re_empty: Region<'tcx>,
242 pub re_static: Region<'tcx>,
243 pub re_erased: Region<'tcx>,
246 pub struct LocalTableInContext<'a, V: 'a> {
247 local_id_root: Option<DefId>,
248 data: &'a ItemLocalMap<V>
251 /// Validate that the given HirId (respectively its `local_id` part) can be
252 /// safely used as a key in the tables of a TypeckTable. For that to be
253 /// the case, the HirId must have the same `owner` as all the other IDs in
254 /// this table (signified by `local_id_root`). Otherwise the HirId
255 /// would be in a different frame of reference and using its `local_id`
256 /// would result in lookup errors, or worse, in silently wrong data being
258 fn validate_hir_id_for_typeck_tables(local_id_root: Option<DefId>,
261 if cfg!(debug_assertions) {
262 if let Some(local_id_root) = local_id_root {
263 if hir_id.owner != local_id_root.index {
264 ty::tls::with(|tcx| {
265 let node_id = tcx.hir
267 .find_node_for_hir_id(hir_id);
269 bug!("node {} with HirId::owner {:?} cannot be placed in \
270 TypeckTables with local_id_root {:?}",
271 tcx.hir.node_to_string(node_id),
272 DefId::local(hir_id.owner),
277 // We use "Null Object" TypeckTables in some of the analysis passes.
278 // These are just expected to be empty and their `local_id_root` is
279 // `None`. Therefore we cannot verify whether a given `HirId` would
280 // be a valid key for the given table. Instead we make sure that
281 // nobody tries to write to such a Null Object table.
283 bug!("access to invalid TypeckTables")
289 impl<'a, V> LocalTableInContext<'a, V> {
290 pub fn contains_key(&self, id: hir::HirId) -> bool {
291 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
292 self.data.contains_key(&id.local_id)
295 pub fn get(&self, id: hir::HirId) -> Option<&V> {
296 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
297 self.data.get(&id.local_id)
300 pub fn iter(&self) -> hash_map::Iter<hir::ItemLocalId, V> {
305 impl<'a, V> ::std::ops::Index<hir::HirId> for LocalTableInContext<'a, V> {
308 fn index(&self, key: hir::HirId) -> &V {
309 self.get(key).expect("LocalTableInContext: key not found")
313 pub struct LocalTableInContextMut<'a, V: 'a> {
314 local_id_root: Option<DefId>,
315 data: &'a mut ItemLocalMap<V>
318 impl<'a, V> LocalTableInContextMut<'a, V> {
319 pub fn get_mut(&mut self, id: hir::HirId) -> Option<&mut V> {
320 validate_hir_id_for_typeck_tables(self.local_id_root, id, true);
321 self.data.get_mut(&id.local_id)
324 pub fn entry(&mut self, id: hir::HirId) -> Entry<hir::ItemLocalId, V> {
325 validate_hir_id_for_typeck_tables(self.local_id_root, id, true);
326 self.data.entry(id.local_id)
329 pub fn insert(&mut self, id: hir::HirId, val: V) -> Option<V> {
330 validate_hir_id_for_typeck_tables(self.local_id_root, id, true);
331 self.data.insert(id.local_id, val)
334 pub fn remove(&mut self, id: hir::HirId) -> Option<V> {
335 validate_hir_id_for_typeck_tables(self.local_id_root, id, true);
336 self.data.remove(&id.local_id)
340 #[derive(RustcEncodable, RustcDecodable, Debug)]
341 pub struct TypeckTables<'tcx> {
342 /// The HirId::owner all ItemLocalIds in this table are relative to.
343 pub local_id_root: Option<DefId>,
345 /// Resolved definitions for `<T>::X` associated paths and
346 /// method calls, including those of overloaded operators.
347 type_dependent_defs: ItemLocalMap<Def>,
349 /// Stores the canonicalized types provided by the user. See also `UserAssertTy` statement in
351 user_provided_tys: ItemLocalMap<CanonicalTy<'tcx>>,
353 /// Stores the types for various nodes in the AST. Note that this table
354 /// is not guaranteed to be populated until after typeck. See
355 /// typeck::check::fn_ctxt for details.
356 node_types: ItemLocalMap<Ty<'tcx>>,
358 /// Stores the type parameters which were substituted to obtain the type
359 /// of this node. This only applies to nodes that refer to entities
360 /// parameterized by type parameters, such as generic fns, types, or
362 node_substs: ItemLocalMap<&'tcx Substs<'tcx>>,
364 adjustments: ItemLocalMap<Vec<ty::adjustment::Adjustment<'tcx>>>,
366 /// Stores the actual binding mode for all instances of hir::BindingAnnotation.
367 pat_binding_modes: ItemLocalMap<BindingMode>,
369 /// Stores the types which were implicitly dereferenced in pattern binding modes
370 /// for later usage in HAIR lowering. For example,
373 /// match &&Some(5i32) {
378 /// leads to a `vec![&&Option<i32>, &Option<i32>]`. Empty vectors are not stored.
381 /// https://github.com/rust-lang/rfcs/blob/master/text/2005-match-ergonomics.md#definitions
382 pat_adjustments: ItemLocalMap<Vec<Ty<'tcx>>>,
385 pub upvar_capture_map: ty::UpvarCaptureMap<'tcx>,
387 /// Records the reasons that we picked the kind of each closure;
388 /// not all closures are present in the map.
389 closure_kind_origins: ItemLocalMap<(Span, ast::Name)>,
391 /// For each fn, records the "liberated" types of its arguments
392 /// and return type. Liberated means that all bound regions
393 /// (including late-bound regions) are replaced with free
394 /// equivalents. This table is not used in trans (since regions
395 /// are erased there) and hence is not serialized to metadata.
396 liberated_fn_sigs: ItemLocalMap<ty::FnSig<'tcx>>,
398 /// For each FRU expression, record the normalized types of the fields
399 /// of the struct - this is needed because it is non-trivial to
400 /// normalize while preserving regions. This table is used only in
401 /// MIR construction and hence is not serialized to metadata.
402 fru_field_types: ItemLocalMap<Vec<Ty<'tcx>>>,
404 /// Maps a cast expression to its kind. This is keyed on the
405 /// *from* expression of the cast, not the cast itself.
406 cast_kinds: ItemLocalMap<ty::cast::CastKind>,
408 /// Set of trait imports actually used in the method resolution.
409 /// This is used for warning unused imports. During type
410 /// checking, this `Lrc` should not be cloned: it must have a ref-count
411 /// of 1 so that we can insert things into the set mutably.
412 pub used_trait_imports: Lrc<DefIdSet>,
414 /// If any errors occurred while type-checking this body,
415 /// this field will be set to `true`.
416 pub tainted_by_errors: bool,
418 /// Stores the free-region relationships that were deduced from
419 /// its where clauses and parameter types. These are then
420 /// read-again by borrowck.
421 pub free_region_map: FreeRegionMap<'tcx>,
424 impl<'tcx> TypeckTables<'tcx> {
425 pub fn empty(local_id_root: Option<DefId>) -> TypeckTables<'tcx> {
428 type_dependent_defs: ItemLocalMap(),
429 user_provided_tys: ItemLocalMap(),
430 node_types: ItemLocalMap(),
431 node_substs: ItemLocalMap(),
432 adjustments: ItemLocalMap(),
433 pat_binding_modes: ItemLocalMap(),
434 pat_adjustments: ItemLocalMap(),
435 upvar_capture_map: FxHashMap(),
436 closure_kind_origins: ItemLocalMap(),
437 liberated_fn_sigs: ItemLocalMap(),
438 fru_field_types: ItemLocalMap(),
439 cast_kinds: ItemLocalMap(),
440 used_trait_imports: Lrc::new(DefIdSet()),
441 tainted_by_errors: false,
442 free_region_map: FreeRegionMap::new(),
446 /// Returns the final resolution of a `QPath` in an `Expr` or `Pat` node.
447 pub fn qpath_def(&self, qpath: &hir::QPath, id: hir::HirId) -> Def {
449 hir::QPath::Resolved(_, ref path) => path.def,
450 hir::QPath::TypeRelative(..) => {
451 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
452 self.type_dependent_defs.get(&id.local_id).cloned().unwrap_or(Def::Err)
457 pub fn type_dependent_defs(&self) -> LocalTableInContext<Def> {
458 LocalTableInContext {
459 local_id_root: self.local_id_root,
460 data: &self.type_dependent_defs
464 pub fn type_dependent_defs_mut(&mut self) -> LocalTableInContextMut<Def> {
465 LocalTableInContextMut {
466 local_id_root: self.local_id_root,
467 data: &mut self.type_dependent_defs
471 pub fn user_provided_tys(&self) -> LocalTableInContext<CanonicalTy<'tcx>> {
472 LocalTableInContext {
473 local_id_root: self.local_id_root,
474 data: &self.user_provided_tys
478 pub fn user_provided_tys_mut(&mut self) -> LocalTableInContextMut<CanonicalTy<'tcx>> {
479 LocalTableInContextMut {
480 local_id_root: self.local_id_root,
481 data: &mut self.user_provided_tys
485 pub fn node_types(&self) -> LocalTableInContext<Ty<'tcx>> {
486 LocalTableInContext {
487 local_id_root: self.local_id_root,
488 data: &self.node_types
492 pub fn node_types_mut(&mut self) -> LocalTableInContextMut<Ty<'tcx>> {
493 LocalTableInContextMut {
494 local_id_root: self.local_id_root,
495 data: &mut self.node_types
499 pub fn node_id_to_type(&self, id: hir::HirId) -> Ty<'tcx> {
500 match self.node_id_to_type_opt(id) {
503 bug!("node_id_to_type: no type for node `{}`",
505 let id = tcx.hir.definitions().find_node_for_hir_id(id);
506 tcx.hir.node_to_string(id)
512 pub fn node_id_to_type_opt(&self, id: hir::HirId) -> Option<Ty<'tcx>> {
513 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
514 self.node_types.get(&id.local_id).cloned()
517 pub fn node_substs_mut(&mut self) -> LocalTableInContextMut<&'tcx Substs<'tcx>> {
518 LocalTableInContextMut {
519 local_id_root: self.local_id_root,
520 data: &mut self.node_substs
524 pub fn node_substs(&self, id: hir::HirId) -> &'tcx Substs<'tcx> {
525 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
526 self.node_substs.get(&id.local_id).cloned().unwrap_or(Substs::empty())
529 pub fn node_substs_opt(&self, id: hir::HirId) -> Option<&'tcx Substs<'tcx>> {
530 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
531 self.node_substs.get(&id.local_id).cloned()
534 // Returns the type of a pattern as a monotype. Like @expr_ty, this function
535 // doesn't provide type parameter substitutions.
536 pub fn pat_ty(&self, pat: &hir::Pat) -> Ty<'tcx> {
537 self.node_id_to_type(pat.hir_id)
540 pub fn pat_ty_opt(&self, pat: &hir::Pat) -> Option<Ty<'tcx>> {
541 self.node_id_to_type_opt(pat.hir_id)
544 // Returns the type of an expression as a monotype.
546 // NB (1): This is the PRE-ADJUSTMENT TYPE for the expression. That is, in
547 // some cases, we insert `Adjustment` annotations such as auto-deref or
548 // auto-ref. The type returned by this function does not consider such
549 // adjustments. See `expr_ty_adjusted()` instead.
551 // NB (2): This type doesn't provide type parameter substitutions; e.g. if you
552 // ask for the type of "id" in "id(3)", it will return "fn(&isize) -> isize"
553 // instead of "fn(ty) -> T with T = isize".
554 pub fn expr_ty(&self, expr: &hir::Expr) -> Ty<'tcx> {
555 self.node_id_to_type(expr.hir_id)
558 pub fn expr_ty_opt(&self, expr: &hir::Expr) -> Option<Ty<'tcx>> {
559 self.node_id_to_type_opt(expr.hir_id)
562 pub fn adjustments(&self) -> LocalTableInContext<Vec<ty::adjustment::Adjustment<'tcx>>> {
563 LocalTableInContext {
564 local_id_root: self.local_id_root,
565 data: &self.adjustments
569 pub fn adjustments_mut(&mut self)
570 -> LocalTableInContextMut<Vec<ty::adjustment::Adjustment<'tcx>>> {
571 LocalTableInContextMut {
572 local_id_root: self.local_id_root,
573 data: &mut self.adjustments
577 pub fn expr_adjustments(&self, expr: &hir::Expr)
578 -> &[ty::adjustment::Adjustment<'tcx>] {
579 validate_hir_id_for_typeck_tables(self.local_id_root, expr.hir_id, false);
580 self.adjustments.get(&expr.hir_id.local_id).map_or(&[], |a| &a[..])
583 /// Returns the type of `expr`, considering any `Adjustment`
584 /// entry recorded for that expression.
585 pub fn expr_ty_adjusted(&self, expr: &hir::Expr) -> Ty<'tcx> {
586 self.expr_adjustments(expr)
588 .map_or_else(|| self.expr_ty(expr), |adj| adj.target)
591 pub fn expr_ty_adjusted_opt(&self, expr: &hir::Expr) -> Option<Ty<'tcx>> {
592 self.expr_adjustments(expr)
594 .map(|adj| adj.target)
595 .or_else(|| self.expr_ty_opt(expr))
598 pub fn is_method_call(&self, expr: &hir::Expr) -> bool {
599 // Only paths and method calls/overloaded operators have
600 // entries in type_dependent_defs, ignore the former here.
601 if let hir::ExprPath(_) = expr.node {
605 match self.type_dependent_defs().get(expr.hir_id) {
606 Some(&Def::Method(_)) => true,
611 pub fn pat_binding_modes(&self) -> LocalTableInContext<BindingMode> {
612 LocalTableInContext {
613 local_id_root: self.local_id_root,
614 data: &self.pat_binding_modes
618 pub fn pat_binding_modes_mut(&mut self)
619 -> LocalTableInContextMut<BindingMode> {
620 LocalTableInContextMut {
621 local_id_root: self.local_id_root,
622 data: &mut self.pat_binding_modes
626 pub fn pat_adjustments(&self) -> LocalTableInContext<Vec<Ty<'tcx>>> {
627 LocalTableInContext {
628 local_id_root: self.local_id_root,
629 data: &self.pat_adjustments,
633 pub fn pat_adjustments_mut(&mut self)
634 -> LocalTableInContextMut<Vec<Ty<'tcx>>> {
635 LocalTableInContextMut {
636 local_id_root: self.local_id_root,
637 data: &mut self.pat_adjustments,
641 pub fn upvar_capture(&self, upvar_id: ty::UpvarId) -> ty::UpvarCapture<'tcx> {
642 self.upvar_capture_map[&upvar_id]
645 pub fn closure_kind_origins(&self) -> LocalTableInContext<(Span, ast::Name)> {
646 LocalTableInContext {
647 local_id_root: self.local_id_root,
648 data: &self.closure_kind_origins
652 pub fn closure_kind_origins_mut(&mut self) -> LocalTableInContextMut<(Span, ast::Name)> {
653 LocalTableInContextMut {
654 local_id_root: self.local_id_root,
655 data: &mut self.closure_kind_origins
659 pub fn liberated_fn_sigs(&self) -> LocalTableInContext<ty::FnSig<'tcx>> {
660 LocalTableInContext {
661 local_id_root: self.local_id_root,
662 data: &self.liberated_fn_sigs
666 pub fn liberated_fn_sigs_mut(&mut self) -> LocalTableInContextMut<ty::FnSig<'tcx>> {
667 LocalTableInContextMut {
668 local_id_root: self.local_id_root,
669 data: &mut self.liberated_fn_sigs
673 pub fn fru_field_types(&self) -> LocalTableInContext<Vec<Ty<'tcx>>> {
674 LocalTableInContext {
675 local_id_root: self.local_id_root,
676 data: &self.fru_field_types
680 pub fn fru_field_types_mut(&mut self) -> LocalTableInContextMut<Vec<Ty<'tcx>>> {
681 LocalTableInContextMut {
682 local_id_root: self.local_id_root,
683 data: &mut self.fru_field_types
687 pub fn cast_kinds(&self) -> LocalTableInContext<ty::cast::CastKind> {
688 LocalTableInContext {
689 local_id_root: self.local_id_root,
690 data: &self.cast_kinds
694 pub fn cast_kinds_mut(&mut self) -> LocalTableInContextMut<ty::cast::CastKind> {
695 LocalTableInContextMut {
696 local_id_root: self.local_id_root,
697 data: &mut self.cast_kinds
702 impl<'a, 'gcx> HashStable<StableHashingContext<'a>> for TypeckTables<'gcx> {
703 fn hash_stable<W: StableHasherResult>(&self,
704 hcx: &mut StableHashingContext<'a>,
705 hasher: &mut StableHasher<W>) {
706 let ty::TypeckTables {
708 ref type_dependent_defs,
709 ref user_provided_tys,
713 ref pat_binding_modes,
715 ref upvar_capture_map,
716 ref closure_kind_origins,
717 ref liberated_fn_sigs,
722 ref used_trait_imports,
727 hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| {
728 type_dependent_defs.hash_stable(hcx, hasher);
729 user_provided_tys.hash_stable(hcx, hasher);
730 node_types.hash_stable(hcx, hasher);
731 node_substs.hash_stable(hcx, hasher);
732 adjustments.hash_stable(hcx, hasher);
733 pat_binding_modes.hash_stable(hcx, hasher);
734 pat_adjustments.hash_stable(hcx, hasher);
735 hash_stable_hashmap(hcx, hasher, upvar_capture_map, |up_var_id, hcx| {
742 local_id_root.expect("trying to hash invalid TypeckTables");
744 let var_owner_def_id = DefId {
745 krate: local_id_root.krate,
748 let closure_def_id = DefId {
749 krate: local_id_root.krate,
750 index: closure_expr_id.to_def_id().index,
752 (hcx.def_path_hash(var_owner_def_id),
754 hcx.def_path_hash(closure_def_id))
757 closure_kind_origins.hash_stable(hcx, hasher);
758 liberated_fn_sigs.hash_stable(hcx, hasher);
759 fru_field_types.hash_stable(hcx, hasher);
760 cast_kinds.hash_stable(hcx, hasher);
761 used_trait_imports.hash_stable(hcx, hasher);
762 tainted_by_errors.hash_stable(hcx, hasher);
763 free_region_map.hash_stable(hcx, hasher);
768 impl<'tcx> CommonTypes<'tcx> {
769 fn new(interners: &CtxtInterners<'tcx>) -> CommonTypes<'tcx> {
770 let mk = |sty| interners.intern_ty(sty, None);
771 let mk_region = |r| {
772 if let Some(r) = interners.region.borrow().get(&r) {
775 let r = interners.arena.alloc(r);
776 interners.region.borrow_mut().insert(Interned(r));
784 isize: mk(TyInt(ast::IntTy::Isize)),
785 i8: mk(TyInt(ast::IntTy::I8)),
786 i16: mk(TyInt(ast::IntTy::I16)),
787 i32: mk(TyInt(ast::IntTy::I32)),
788 i64: mk(TyInt(ast::IntTy::I64)),
789 i128: mk(TyInt(ast::IntTy::I128)),
790 usize: mk(TyUint(ast::UintTy::Usize)),
791 u8: mk(TyUint(ast::UintTy::U8)),
792 u16: mk(TyUint(ast::UintTy::U16)),
793 u32: mk(TyUint(ast::UintTy::U32)),
794 u64: mk(TyUint(ast::UintTy::U64)),
795 u128: mk(TyUint(ast::UintTy::U128)),
796 f32: mk(TyFloat(ast::FloatTy::F32)),
797 f64: mk(TyFloat(ast::FloatTy::F64)),
799 re_empty: mk_region(RegionKind::ReEmpty),
800 re_static: mk_region(RegionKind::ReStatic),
801 re_erased: mk_region(RegionKind::ReErased),
806 /// The central data structure of the compiler. It stores references
807 /// to the various **arenas** and also houses the results of the
808 /// various **compiler queries** that have been performed. See the
809 /// [rustc guide] for more details.
811 /// [rustc guide]: https://rust-lang-nursery.github.io/rustc-guide/ty.html
812 #[derive(Copy, Clone)]
813 pub struct TyCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
814 gcx: &'a GlobalCtxt<'gcx>,
815 interners: &'a CtxtInterners<'tcx>
818 impl<'a, 'gcx, 'tcx> Deref for TyCtxt<'a, 'gcx, 'tcx> {
819 type Target = &'a GlobalCtxt<'gcx>;
820 fn deref(&self) -> &Self::Target {
825 pub struct GlobalCtxt<'tcx> {
826 global_arenas: &'tcx GlobalArenas<'tcx>,
827 global_interners: CtxtInterners<'tcx>,
829 cstore: &'tcx dyn CrateStore,
831 pub sess: &'tcx Session,
833 pub dep_graph: DepGraph,
835 /// This provides access to the incr. comp. on-disk cache for query results.
836 /// Do not access this directly. It is only meant to be used by
837 /// `DepGraph::try_mark_green()` and the query infrastructure in `ty::maps`.
838 pub(crate) on_disk_query_result_cache: maps::OnDiskCache<'tcx>,
840 /// Common types, pre-interned for your convenience.
841 pub types: CommonTypes<'tcx>,
843 /// Map indicating what traits are in scope for places where this
844 /// is relevant; generated by resolve.
845 trait_map: FxHashMap<DefIndex,
846 Lrc<FxHashMap<ItemLocalId,
847 Lrc<StableVec<TraitCandidate>>>>>,
849 /// Export map produced by name resolution.
850 export_map: FxHashMap<DefId, Lrc<Vec<Export>>>,
852 pub hir: hir_map::Map<'tcx>,
854 /// A map from DefPathHash -> DefId. Includes DefIds from the local crate
855 /// as well as all upstream crates. Only populated in incremental mode.
856 pub def_path_hash_to_def_id: Option<FxHashMap<DefPathHash, DefId>>,
858 pub maps: maps::Maps<'tcx>,
860 // Records the free variables refrenced by every closure
861 // expression. Do not track deps for this, just recompute it from
862 // scratch every time.
863 freevars: FxHashMap<DefId, Lrc<Vec<hir::Freevar>>>,
865 maybe_unused_trait_imports: FxHashSet<DefId>,
867 maybe_unused_extern_crates: Vec<(DefId, Span)>,
869 // Internal cache for metadata decoding. No need to track deps on this.
870 pub rcache: RefCell<FxHashMap<ty::CReaderCacheKey, Ty<'tcx>>>,
872 /// Caches the results of trait selection. This cache is used
873 /// for things that do not have to do with the parameters in scope.
874 pub selection_cache: traits::SelectionCache<'tcx>,
876 /// Caches the results of trait evaluation. This cache is used
877 /// for things that do not have to do with the parameters in scope.
878 /// Merge this with `selection_cache`?
879 pub evaluation_cache: traits::EvaluationCache<'tcx>,
881 /// The definite name of the current crate after taking into account
882 /// attributes, commandline parameters, etc.
883 pub crate_name: Symbol,
885 /// Data layout specification for the current target.
886 pub data_layout: TargetDataLayout,
888 /// Used to prevent layout from recursing too deeply.
889 pub layout_depth: Cell<usize>,
891 /// Map from function to the `#[derive]` mode that it's defining. Only used
892 /// by `proc-macro` crates.
893 pub derive_macros: RefCell<NodeMap<Symbol>>,
895 stability_interner: RefCell<FxHashSet<&'tcx attr::Stability>>,
897 pub interpret_interner: InterpretInterner<'tcx>,
899 layout_interner: RefCell<FxHashSet<&'tcx LayoutDetails>>,
901 /// A vector of every trait accessible in the whole crate
902 /// (i.e. including those from subcrates). This is used only for
903 /// error reporting, and so is lazily initialized and generally
904 /// shouldn't taint the common path (hence the RefCell).
905 pub all_traits: RefCell<Option<Vec<DefId>>>,
907 /// A general purpose channel to throw data out the back towards LLVM worker
910 /// This is intended to only get used during the trans phase of the compiler
911 /// when satisfying the query for a particular codegen unit. Internally in
912 /// the query it'll send data along this channel to get processed later.
913 pub tx_to_llvm_workers: mpsc::Sender<Box<dyn Any + Send>>,
915 output_filenames: Arc<OutputFilenames>,
918 /// Everything needed to efficiently work with interned allocations
919 #[derive(Debug, Default)]
920 pub struct InterpretInterner<'tcx> {
921 inner: RefCell<InterpretInternerInner<'tcx>>,
924 #[derive(Debug, Default)]
925 struct InterpretInternerInner<'tcx> {
926 /// Stores the value of constants (and deduplicates the actual memory)
927 allocs: FxHashSet<&'tcx interpret::Allocation>,
929 /// Allows obtaining function instance handles via a unique identifier
930 functions: FxHashMap<interpret::AllocId, Instance<'tcx>>,
932 /// Inverse map of `interpret_functions`.
933 /// Used so we don't allocate a new pointer every time we need one
934 function_cache: FxHashMap<Instance<'tcx>, interpret::AllocId>,
936 /// Allows obtaining const allocs via a unique identifier
937 alloc_by_id: FxHashMap<interpret::AllocId, &'tcx interpret::Allocation>,
939 /// Reverse map of `alloc_cache`
940 global_cache: FxHashMap<interpret::AllocId, DefId>,
942 /// The AllocId to assign to the next new regular allocation.
943 /// Always incremented, never gets smaller.
944 next_id: interpret::AllocId,
946 /// Allows checking whether a static already has an allocation
948 /// This is only important for detecting statics referring to themselves
949 // FIXME(oli-obk) move it to the EvalContext?
950 alloc_cache: FxHashMap<DefId, interpret::AllocId>,
952 /// A cache for basic byte allocations keyed by their contents. This is used to deduplicate
953 /// allocations for string and bytestring literals.
954 literal_alloc_cache: FxHashMap<Vec<u8>, interpret::AllocId>,
957 impl<'tcx> InterpretInterner<'tcx> {
958 pub fn create_fn_alloc(&self, instance: Instance<'tcx>) -> interpret::AllocId {
959 if let Some(&alloc_id) = self.inner.borrow().function_cache.get(&instance) {
962 let id = self.reserve();
963 debug!("creating fn ptr: {}", id);
964 let mut inner = self.inner.borrow_mut();
965 inner.functions.insert(id, instance);
966 inner.function_cache.insert(instance, id);
972 id: interpret::AllocId,
973 ) -> Option<Instance<'tcx>> {
974 self.inner.borrow().functions.get(&id).cloned()
979 id: interpret::AllocId,
980 ) -> Option<&'tcx interpret::Allocation> {
981 self.inner.borrow().alloc_by_id.get(&id).cloned()
987 ) -> Option<interpret::AllocId> {
988 self.inner.borrow().alloc_cache.get(&static_id).cloned()
994 alloc_id: interpret::AllocId,
996 let mut inner = self.inner.borrow_mut();
997 inner.global_cache.insert(alloc_id, static_id);
998 if let Some(old) = inner.alloc_cache.insert(static_id, alloc_id) {
999 bug!("tried to cache {:?}, but was already existing as {:#?}", static_id, old);
1003 pub fn get_corresponding_static_def_id(
1005 ptr: interpret::AllocId,
1006 ) -> Option<DefId> {
1007 self.inner.borrow().global_cache.get(&ptr).cloned()
1010 pub fn intern_at_reserved(
1012 id: interpret::AllocId,
1013 alloc: &'tcx interpret::Allocation,
1015 if let Some(old) = self.inner.borrow_mut().alloc_by_id.insert(id, alloc) {
1016 bug!("tried to intern allocation at {}, but was already existing as {:#?}", id, old);
1020 /// obtains a new allocation ID that can be referenced but does not
1021 /// yet have an allocation backing it.
1024 ) -> interpret::AllocId {
1025 let mut inner = self.inner.borrow_mut();
1026 let next = inner.next_id;
1027 inner.next_id.0 = inner.next_id.0
1029 .expect("You overflowed a u64 by incrementing by 1... \
1030 You've just earned yourself a free drink if we ever meet. \
1031 Seriously, how did you do that?!");
1036 impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
1037 /// Get the global TyCtxt.
1039 pub fn global_tcx(self) -> TyCtxt<'a, 'gcx, 'gcx> {
1042 interners: &self.gcx.global_interners,
1046 pub fn alloc_generics(self, generics: ty::Generics) -> &'gcx ty::Generics {
1047 self.global_arenas.generics.alloc(generics)
1050 pub fn alloc_steal_mir(self, mir: Mir<'gcx>) -> &'gcx Steal<Mir<'gcx>> {
1051 self.global_arenas.steal_mir.alloc(Steal::new(mir))
1054 pub fn alloc_mir(self, mir: Mir<'gcx>) -> &'gcx Mir<'gcx> {
1055 self.global_arenas.mir.alloc(mir)
1058 pub fn alloc_tables(self, tables: ty::TypeckTables<'gcx>) -> &'gcx ty::TypeckTables<'gcx> {
1059 self.global_arenas.tables.alloc(tables)
1062 pub fn alloc_trait_def(self, def: ty::TraitDef) -> &'gcx ty::TraitDef {
1063 self.global_arenas.trait_def.alloc(def)
1066 pub fn alloc_adt_def(self,
1069 variants: Vec<ty::VariantDef>,
1071 -> &'gcx ty::AdtDef {
1072 let def = ty::AdtDef::new(self, did, kind, variants, repr);
1073 self.global_arenas.adt_def.alloc(def)
1076 pub fn alloc_byte_array(self, bytes: &[u8]) -> &'gcx [u8] {
1077 if bytes.is_empty() {
1080 self.global_interners.arena.alloc_slice(bytes)
1084 pub fn alloc_const_slice(self, values: &[&'tcx ty::Const<'tcx>])
1085 -> &'tcx [&'tcx ty::Const<'tcx>] {
1086 if values.is_empty() {
1089 self.interners.arena.alloc_slice(values)
1093 pub fn alloc_name_const_slice(self, values: &[(ast::Name, &'tcx ty::Const<'tcx>)])
1094 -> &'tcx [(ast::Name, &'tcx ty::Const<'tcx>)] {
1095 if values.is_empty() {
1098 self.interners.arena.alloc_slice(values)
1102 pub fn intern_const_alloc(
1104 alloc: interpret::Allocation,
1105 ) -> &'gcx interpret::Allocation {
1106 let allocs = &mut self.interpret_interner.inner.borrow_mut().allocs;
1107 if let Some(alloc) = allocs.get(&alloc) {
1111 let interned = self.global_arenas.const_allocs.alloc(alloc);
1112 if let Some(prev) = allocs.replace(interned) {
1113 bug!("Tried to overwrite interned Allocation: {:#?}", prev)
1118 /// Allocates a byte or string literal for `mir::interpret`
1119 pub fn allocate_cached(self, bytes: &[u8]) -> interpret::AllocId {
1120 // check whether we already allocated this literal or a constant with the same memory
1121 if let Some(&alloc_id) = self.interpret_interner.inner.borrow()
1122 .literal_alloc_cache.get(bytes) {
1125 // create an allocation that just contains these bytes
1126 let alloc = interpret::Allocation::from_bytes(bytes);
1127 let alloc = self.intern_const_alloc(alloc);
1129 // the next unique id
1130 let id = self.interpret_interner.reserve();
1131 // make the allocation identifiable
1132 self.interpret_interner.inner.borrow_mut().alloc_by_id.insert(id, alloc);
1133 // cache it for the future
1134 self.interpret_interner.inner.borrow_mut().literal_alloc_cache.insert(bytes.to_owned(), id);
1138 pub fn intern_stability(self, stab: attr::Stability) -> &'gcx attr::Stability {
1139 let mut stability_interner = self.stability_interner.borrow_mut();
1140 if let Some(st) = stability_interner.get(&stab) {
1144 let interned = self.global_interners.arena.alloc(stab);
1145 if let Some(prev) = stability_interner.replace(interned) {
1146 bug!("Tried to overwrite interned Stability: {:?}", prev)
1151 pub fn intern_layout(self, layout: LayoutDetails) -> &'gcx LayoutDetails {
1152 let mut layout_interner = self.layout_interner.borrow_mut();
1153 if let Some(layout) = layout_interner.get(&layout) {
1157 let interned = self.global_arenas.layout.alloc(layout);
1158 if let Some(prev) = layout_interner.replace(interned) {
1159 bug!("Tried to overwrite interned Layout: {:?}", prev)
1164 pub fn lift<T: ?Sized + Lift<'tcx>>(self, value: &T) -> Option<T::Lifted> {
1165 value.lift_to_tcx(self)
1168 /// Like lift, but only tries in the global tcx.
1169 pub fn lift_to_global<T: ?Sized + Lift<'gcx>>(self, value: &T) -> Option<T::Lifted> {
1170 value.lift_to_tcx(self.global_tcx())
1173 /// Returns true if self is the same as self.global_tcx().
1174 fn is_global(self) -> bool {
1175 let local = self.interners as *const _;
1176 let global = &self.global_interners as *const _;
1177 local as usize == global as usize
1180 /// Create a type context and call the closure with a `TyCtxt` reference
1181 /// to the context. The closure enforces that the type context and any interned
1182 /// value (types, substs, etc.) can only be used while `ty::tls` has a valid
1183 /// reference to the context, to allow formatting values that need it.
1184 pub fn create_and_enter<F, R>(s: &'tcx Session,
1185 cstore: &'tcx dyn CrateStore,
1186 local_providers: ty::maps::Providers<'tcx>,
1187 extern_providers: ty::maps::Providers<'tcx>,
1188 arenas: &'tcx AllArenas<'tcx>,
1189 resolutions: ty::Resolutions,
1190 hir: hir_map::Map<'tcx>,
1191 on_disk_query_result_cache: maps::OnDiskCache<'tcx>,
1193 tx: mpsc::Sender<Box<dyn Any + Send>>,
1194 output_filenames: &OutputFilenames,
1196 where F: for<'b> FnOnce(TyCtxt<'b, 'tcx, 'tcx>) -> R
1198 let data_layout = TargetDataLayout::parse(s);
1199 let interners = CtxtInterners::new(&arenas.interner);
1200 let common_types = CommonTypes::new(&interners);
1201 let dep_graph = hir.dep_graph.clone();
1202 let max_cnum = cstore.crates_untracked().iter().map(|c| c.as_usize()).max().unwrap_or(0);
1203 let mut providers = IndexVec::from_elem_n(extern_providers, max_cnum + 1);
1204 providers[LOCAL_CRATE] = local_providers;
1206 let def_path_hash_to_def_id = if s.opts.build_dep_graph() {
1207 let upstream_def_path_tables: Vec<(CrateNum, Lrc<_>)> = cstore
1210 .map(|&cnum| (cnum, cstore.def_path_table(cnum)))
1213 let def_path_tables = || {
1214 upstream_def_path_tables
1216 .map(|&(cnum, ref rc)| (cnum, &**rc))
1217 .chain(iter::once((LOCAL_CRATE, hir.definitions().def_path_table())))
1220 // Precompute the capacity of the hashmap so we don't have to
1221 // re-allocate when populating it.
1222 let capacity = def_path_tables().map(|(_, t)| t.size()).sum::<usize>();
1224 let mut map: FxHashMap<_, _> = FxHashMap::with_capacity_and_hasher(
1226 ::std::default::Default::default()
1229 for (cnum, def_path_table) in def_path_tables() {
1230 def_path_table.add_def_path_hashes_to(cnum, &mut map);
1238 let mut trait_map = FxHashMap();
1239 for (k, v) in resolutions.trait_map {
1240 let hir_id = hir.node_to_hir_id(k);
1241 let map = trait_map.entry(hir_id.owner)
1242 .or_insert_with(|| Lrc::new(FxHashMap()));
1243 Lrc::get_mut(map).unwrap()
1244 .insert(hir_id.local_id,
1245 Lrc::new(StableVec::new(v)));
1248 let gcx = &GlobalCtxt {
1251 global_arenas: &arenas.global,
1252 global_interners: interners,
1253 dep_graph: dep_graph.clone(),
1254 on_disk_query_result_cache,
1255 types: common_types,
1257 export_map: resolutions.export_map.into_iter().map(|(k, v)| {
1260 freevars: resolutions.freevars.into_iter().map(|(k, v)| {
1261 (hir.local_def_id(k), Lrc::new(v))
1263 maybe_unused_trait_imports:
1264 resolutions.maybe_unused_trait_imports
1266 .map(|id| hir.local_def_id(id))
1268 maybe_unused_extern_crates:
1269 resolutions.maybe_unused_extern_crates
1271 .map(|(id, sp)| (hir.local_def_id(id), sp))
1274 def_path_hash_to_def_id,
1275 maps: maps::Maps::new(providers),
1276 rcache: RefCell::new(FxHashMap()),
1277 selection_cache: traits::SelectionCache::new(),
1278 evaluation_cache: traits::EvaluationCache::new(),
1279 crate_name: Symbol::intern(crate_name),
1281 layout_interner: RefCell::new(FxHashSet()),
1282 layout_depth: Cell::new(0),
1283 derive_macros: RefCell::new(NodeMap()),
1284 stability_interner: RefCell::new(FxHashSet()),
1285 interpret_interner: Default::default(),
1286 all_traits: RefCell::new(None),
1287 tx_to_llvm_workers: tx,
1288 output_filenames: Arc::new(output_filenames.clone()),
1291 tls::enter_global(gcx, f)
1294 pub fn consider_optimizing<T: Fn() -> String>(&self, msg: T) -> bool {
1295 let cname = self.crate_name(LOCAL_CRATE).as_str();
1296 self.sess.consider_optimizing(&cname, msg)
1299 pub fn lang_items(self) -> Lrc<middle::lang_items::LanguageItems> {
1300 self.get_lang_items(LOCAL_CRATE)
1303 /// Due to missing llvm support for lowering 128 bit math to software emulation
1304 /// (on some targets), the lowering can be done in MIR.
1306 /// This function only exists until said support is implemented.
1307 pub fn is_binop_lang_item(&self, def_id: DefId) -> Option<(mir::BinOp, bool)> {
1308 let items = self.lang_items();
1309 let def_id = Some(def_id);
1310 if items.i128_add_fn() == def_id { Some((mir::BinOp::Add, false)) }
1311 else if items.u128_add_fn() == def_id { Some((mir::BinOp::Add, false)) }
1312 else if items.i128_sub_fn() == def_id { Some((mir::BinOp::Sub, false)) }
1313 else if items.u128_sub_fn() == def_id { Some((mir::BinOp::Sub, false)) }
1314 else if items.i128_mul_fn() == def_id { Some((mir::BinOp::Mul, false)) }
1315 else if items.u128_mul_fn() == def_id { Some((mir::BinOp::Mul, false)) }
1316 else if items.i128_div_fn() == def_id { Some((mir::BinOp::Div, false)) }
1317 else if items.u128_div_fn() == def_id { Some((mir::BinOp::Div, false)) }
1318 else if items.i128_rem_fn() == def_id { Some((mir::BinOp::Rem, false)) }
1319 else if items.u128_rem_fn() == def_id { Some((mir::BinOp::Rem, false)) }
1320 else if items.i128_shl_fn() == def_id { Some((mir::BinOp::Shl, false)) }
1321 else if items.u128_shl_fn() == def_id { Some((mir::BinOp::Shl, false)) }
1322 else if items.i128_shr_fn() == def_id { Some((mir::BinOp::Shr, false)) }
1323 else if items.u128_shr_fn() == def_id { Some((mir::BinOp::Shr, false)) }
1324 else if items.i128_addo_fn() == def_id { Some((mir::BinOp::Add, true)) }
1325 else if items.u128_addo_fn() == def_id { Some((mir::BinOp::Add, true)) }
1326 else if items.i128_subo_fn() == def_id { Some((mir::BinOp::Sub, true)) }
1327 else if items.u128_subo_fn() == def_id { Some((mir::BinOp::Sub, true)) }
1328 else if items.i128_mulo_fn() == def_id { Some((mir::BinOp::Mul, true)) }
1329 else if items.u128_mulo_fn() == def_id { Some((mir::BinOp::Mul, true)) }
1330 else if items.i128_shlo_fn() == def_id { Some((mir::BinOp::Shl, true)) }
1331 else if items.u128_shlo_fn() == def_id { Some((mir::BinOp::Shl, true)) }
1332 else if items.i128_shro_fn() == def_id { Some((mir::BinOp::Shr, true)) }
1333 else if items.u128_shro_fn() == def_id { Some((mir::BinOp::Shr, true)) }
1337 pub fn stability(self) -> Lrc<stability::Index<'tcx>> {
1338 self.stability_index(LOCAL_CRATE)
1341 pub fn crates(self) -> Lrc<Vec<CrateNum>> {
1342 self.all_crate_nums(LOCAL_CRATE)
1345 pub fn features(self) -> Lrc<feature_gate::Features> {
1346 self.features_query(LOCAL_CRATE)
1349 pub fn def_key(self, id: DefId) -> hir_map::DefKey {
1351 self.hir.def_key(id)
1353 self.cstore.def_key(id)
1357 /// Convert a `DefId` into its fully expanded `DefPath` (every
1358 /// `DefId` is really just an interned def-path).
1360 /// Note that if `id` is not local to this crate, the result will
1361 /// be a non-local `DefPath`.
1362 pub fn def_path(self, id: DefId) -> hir_map::DefPath {
1364 self.hir.def_path(id)
1366 self.cstore.def_path(id)
1371 pub fn def_path_hash(self, def_id: DefId) -> hir_map::DefPathHash {
1372 if def_id.is_local() {
1373 self.hir.definitions().def_path_hash(def_id.index)
1375 self.cstore.def_path_hash(def_id)
1379 pub fn def_path_debug_str(self, def_id: DefId) -> String {
1380 // We are explicitly not going through queries here in order to get
1381 // crate name and disambiguator since this code is called from debug!()
1382 // statements within the query system and we'd run into endless
1383 // recursion otherwise.
1384 let (crate_name, crate_disambiguator) = if def_id.is_local() {
1385 (self.crate_name.clone(),
1386 self.sess.local_crate_disambiguator())
1388 (self.cstore.crate_name_untracked(def_id.krate),
1389 self.cstore.crate_disambiguator_untracked(def_id.krate))
1394 // Don't print the whole crate disambiguator. That's just
1395 // annoying in debug output.
1396 &(crate_disambiguator.to_fingerprint().to_hex())[..4],
1397 self.def_path(def_id).to_string_no_crate())
1400 pub fn metadata_encoding_version(self) -> Vec<u8> {
1401 self.cstore.metadata_encoding_version().to_vec()
1404 // Note that this is *untracked* and should only be used within the query
1405 // system if the result is otherwise tracked through queries
1406 pub fn crate_data_as_rc_any(self, cnum: CrateNum) -> Lrc<dyn Any> {
1407 self.cstore.crate_data_as_rc_any(cnum)
1410 pub fn create_stable_hashing_context(self) -> StableHashingContext<'a> {
1411 let krate = self.dep_graph.with_ignore(|| self.gcx.hir.krate());
1413 StableHashingContext::new(self.sess,
1415 self.hir.definitions(),
1419 // This method makes sure that we have a DepNode and a Fingerprint for
1420 // every upstream crate. It needs to be called once right after the tcx is
1422 // With full-fledged red/green, the method will probably become unnecessary
1423 // as this will be done on-demand.
1424 pub fn allocate_metadata_dep_nodes(self) {
1425 // We cannot use the query versions of crates() and crate_hash(), since
1426 // those would need the DepNodes that we are allocating here.
1427 for cnum in self.cstore.crates_untracked() {
1428 let dep_node = DepNode::new(self, DepConstructor::CrateMetadata(cnum));
1429 let crate_hash = self.cstore.crate_hash_untracked(cnum);
1430 self.dep_graph.with_task(dep_node,
1433 |_, x| x // No transformation needed
1438 // This method exercises the `in_scope_traits_map` query for all possible
1439 // values so that we have their fingerprints available in the DepGraph.
1440 // This is only required as long as we still use the old dependency tracking
1441 // which needs to have the fingerprints of all input nodes beforehand.
1442 pub fn precompute_in_scope_traits_hashes(self) {
1443 for &def_index in self.trait_map.keys() {
1444 self.in_scope_traits_map(def_index);
1448 pub fn serialize_query_result_cache<E>(self,
1450 -> Result<(), E::Error>
1451 where E: ty::codec::TyEncoder
1453 self.on_disk_query_result_cache.serialize(self.global_tcx(), encoder)
1456 /// If true, we should use NLL-style region checking instead of
1458 pub fn nll(self) -> bool {
1459 self.features().nll || self.sess.opts.debugging_opts.nll
1462 /// If true, we should use the MIR-based borrowck (we may *also* use
1463 /// the AST-based borrowck).
1464 pub fn use_mir(self) -> bool {
1465 self.borrowck_mode().use_mir()
1468 /// If true, we should enable two-phase borrows checks. This is
1469 /// done with either `-Ztwo-phase-borrows` or with
1470 /// `#![feature(nll)]`.
1471 pub fn two_phase_borrows(self) -> bool {
1472 self.features().nll || self.sess.opts.debugging_opts.two_phase_borrows
1475 /// What mode(s) of borrowck should we run? AST? MIR? both?
1476 /// (Also considers the `#![feature(nll)]` setting.)
1477 pub fn borrowck_mode(&self) -> BorrowckMode {
1478 match self.sess.opts.borrowck_mode {
1479 mode @ BorrowckMode::Mir |
1480 mode @ BorrowckMode::Compare => mode,
1482 mode @ BorrowckMode::Ast => {
1493 /// Should we emit EndRegion MIR statements? These are consumed by
1494 /// MIR borrowck, but not when NLL is used. They are also consumed
1495 /// by the validation stuff.
1496 pub fn emit_end_regions(self) -> bool {
1497 // FIXME(#46875) -- we should not emit end regions when NLL is enabled,
1498 // but for now we can't stop doing so because it causes false positives
1499 self.sess.opts.debugging_opts.emit_end_regions ||
1500 self.sess.opts.debugging_opts.mir_emit_validate > 0 ||
1505 pub fn share_generics(self) -> bool {
1506 match self.sess.opts.debugging_opts.share_generics {
1507 Some(setting) => setting,
1509 self.sess.opts.incremental.is_some() ||
1510 match self.sess.opts.optimize {
1514 OptLevel::SizeMin => true,
1516 OptLevel::Aggressive => false,
1523 pub fn local_crate_exports_generics(self) -> bool {
1524 debug_assert!(self.share_generics());
1526 self.sess.crate_types.borrow().iter().any(|crate_type| {
1528 CrateTypeExecutable |
1529 CrateTypeStaticlib |
1530 CrateTypeProcMacro |
1531 CrateTypeCdylib => false,
1533 CrateTypeDylib => true,
1539 impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
1540 pub fn encode_metadata(self, link_meta: &LinkMeta)
1543 self.cstore.encode_metadata(self, link_meta)
1547 impl<'gcx: 'tcx, 'tcx> GlobalCtxt<'gcx> {
1548 /// Call the closure with a local `TyCtxt` using the given arena.
1549 pub fn enter_local<F, R>(
1551 arena: &'tcx DroplessArena,
1555 F: for<'a> FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R
1557 let interners = CtxtInterners::new(arena);
1560 interners: &interners,
1562 ty::tls::with_related_context(tcx.global_tcx(), |icx| {
1563 let new_icx = ty::tls::ImplicitCtxt {
1565 query: icx.query.clone(),
1567 ty::tls::enter_context(&new_icx, |new_icx| {
1574 /// A trait implemented for all X<'a> types which can be safely and
1575 /// efficiently converted to X<'tcx> as long as they are part of the
1576 /// provided TyCtxt<'tcx>.
1577 /// This can be done, for example, for Ty<'tcx> or &'tcx Substs<'tcx>
1578 /// by looking them up in their respective interners.
1580 /// However, this is still not the best implementation as it does
1581 /// need to compare the components, even for interned values.
1582 /// It would be more efficient if TypedArena provided a way to
1583 /// determine whether the address is in the allocated range.
1585 /// None is returned if the value or one of the components is not part
1586 /// of the provided context.
1587 /// For Ty, None can be returned if either the type interner doesn't
1588 /// contain the TypeVariants key or if the address of the interned
1589 /// pointer differs. The latter case is possible if a primitive type,
1590 /// e.g. `()` or `u8`, was interned in a different context.
1591 pub trait Lift<'tcx> {
1593 fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option<Self::Lifted>;
1596 impl<'a, 'tcx> Lift<'tcx> for Ty<'a> {
1597 type Lifted = Ty<'tcx>;
1598 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Ty<'tcx>> {
1599 if tcx.interners.arena.in_arena(*self as *const _) {
1600 return Some(unsafe { mem::transmute(*self) });
1602 // Also try in the global tcx if we're not that.
1603 if !tcx.is_global() {
1604 self.lift_to_tcx(tcx.global_tcx())
1611 impl<'a, 'tcx> Lift<'tcx> for Region<'a> {
1612 type Lifted = Region<'tcx>;
1613 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Region<'tcx>> {
1614 if tcx.interners.arena.in_arena(*self as *const _) {
1615 return Some(unsafe { mem::transmute(*self) });
1617 // Also try in the global tcx if we're not that.
1618 if !tcx.is_global() {
1619 self.lift_to_tcx(tcx.global_tcx())
1626 impl<'a, 'tcx> Lift<'tcx> for &'a Const<'a> {
1627 type Lifted = &'tcx Const<'tcx>;
1628 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<&'tcx Const<'tcx>> {
1629 if tcx.interners.arena.in_arena(*self as *const _) {
1630 return Some(unsafe { mem::transmute(*self) });
1632 // Also try in the global tcx if we're not that.
1633 if !tcx.is_global() {
1634 self.lift_to_tcx(tcx.global_tcx())
1641 impl<'a, 'tcx> Lift<'tcx> for &'a Substs<'a> {
1642 type Lifted = &'tcx Substs<'tcx>;
1643 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<&'tcx Substs<'tcx>> {
1644 if self.len() == 0 {
1645 return Some(Slice::empty());
1647 if tcx.interners.arena.in_arena(&self[..] as *const _) {
1648 return Some(unsafe { mem::transmute(*self) });
1650 // Also try in the global tcx if we're not that.
1651 if !tcx.is_global() {
1652 self.lift_to_tcx(tcx.global_tcx())
1659 impl<'a, 'tcx> Lift<'tcx> for &'a Slice<Ty<'a>> {
1660 type Lifted = &'tcx Slice<Ty<'tcx>>;
1661 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>)
1662 -> Option<&'tcx Slice<Ty<'tcx>>> {
1663 if self.len() == 0 {
1664 return Some(Slice::empty());
1666 if tcx.interners.arena.in_arena(*self as *const _) {
1667 return Some(unsafe { mem::transmute(*self) });
1669 // Also try in the global tcx if we're not that.
1670 if !tcx.is_global() {
1671 self.lift_to_tcx(tcx.global_tcx())
1678 impl<'a, 'tcx> Lift<'tcx> for &'a Slice<ExistentialPredicate<'a>> {
1679 type Lifted = &'tcx Slice<ExistentialPredicate<'tcx>>;
1680 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>)
1681 -> Option<&'tcx Slice<ExistentialPredicate<'tcx>>> {
1682 if self.is_empty() {
1683 return Some(Slice::empty());
1685 if tcx.interners.arena.in_arena(*self as *const _) {
1686 return Some(unsafe { mem::transmute(*self) });
1688 // Also try in the global tcx if we're not that.
1689 if !tcx.is_global() {
1690 self.lift_to_tcx(tcx.global_tcx())
1697 impl<'a, 'tcx> Lift<'tcx> for &'a Slice<Predicate<'a>> {
1698 type Lifted = &'tcx Slice<Predicate<'tcx>>;
1699 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>)
1700 -> Option<&'tcx Slice<Predicate<'tcx>>> {
1701 if self.is_empty() {
1702 return Some(Slice::empty());
1704 if tcx.interners.arena.in_arena(*self as *const _) {
1705 return Some(unsafe { mem::transmute(*self) });
1707 // Also try in the global tcx if we're not that.
1708 if !tcx.is_global() {
1709 self.lift_to_tcx(tcx.global_tcx())
1716 impl<'a, 'tcx> Lift<'tcx> for &'a Slice<CanonicalVarInfo> {
1717 type Lifted = &'tcx Slice<CanonicalVarInfo>;
1718 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
1719 if self.len() == 0 {
1720 return Some(Slice::empty());
1722 if tcx.interners.arena.in_arena(*self as *const _) {
1723 return Some(unsafe { mem::transmute(*self) });
1725 // Also try in the global tcx if we're not that.
1726 if !tcx.is_global() {
1727 self.lift_to_tcx(tcx.global_tcx())
1735 use super::{GlobalCtxt, TyCtxt};
1737 use std::cell::Cell;
1742 use errors::{Diagnostic, TRACK_DIAGNOSTICS};
1743 use rustc_data_structures::OnDrop;
1744 use rustc_data_structures::sync::Lrc;
1746 /// This is the implicit state of rustc. It contains the current
1747 /// TyCtxt and query. It is updated when creating a local interner or
1748 /// executing a new query. Whenever there's a TyCtxt value available
1749 /// you should also have access to an ImplicitCtxt through the functions
1752 pub struct ImplicitCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
1753 /// The current TyCtxt. Initially created by `enter_global` and updated
1754 /// by `enter_local` with a new local interner
1755 pub tcx: TyCtxt<'a, 'gcx, 'tcx>,
1757 /// The current query job, if any. This is updated by start_job in
1758 /// ty::maps::plumbing when executing a query
1759 pub query: Option<Lrc<maps::QueryJob<'gcx>>>,
1762 // A thread local value which stores a pointer to the current ImplicitCtxt
1763 thread_local!(static TLV: Cell<usize> = Cell::new(0));
1765 fn set_tlv<F: FnOnce() -> R, R>(value: usize, f: F) -> R {
1766 let old = get_tlv();
1767 let _reset = OnDrop(move || TLV.with(|tlv| tlv.set(old)));
1768 TLV.with(|tlv| tlv.set(value));
1772 fn get_tlv() -> usize {
1773 TLV.with(|tlv| tlv.get())
1776 /// This is a callback from libsyntax as it cannot access the implicit state
1777 /// in librustc otherwise
1778 fn span_debug(span: syntax_pos::Span, f: &mut fmt::Formatter) -> fmt::Result {
1780 write!(f, "{}", tcx.sess.codemap().span_to_string(span))
1784 /// This is a callback from libsyntax as it cannot access the implicit state
1785 /// in librustc otherwise. It is used to when diagnostic messages are
1786 /// emitted and stores them in the current query, if there is one.
1787 fn track_diagnostic(diagnostic: &Diagnostic) {
1788 with_context(|context| {
1789 if let Some(ref query) = context.query {
1790 query.diagnostics.lock().push(diagnostic.clone());
1795 /// Sets up the callbacks from libsyntax on the current thread
1796 pub fn with_thread_locals<F, R>(f: F) -> R
1797 where F: FnOnce() -> R
1799 syntax_pos::SPAN_DEBUG.with(|span_dbg| {
1800 let original_span_debug = span_dbg.get();
1801 span_dbg.set(span_debug);
1803 let _on_drop = OnDrop(move || {
1804 span_dbg.set(original_span_debug);
1807 TRACK_DIAGNOSTICS.with(|current| {
1808 let original = current.get();
1809 current.set(track_diagnostic);
1811 let _on_drop = OnDrop(move || {
1812 current.set(original);
1820 /// Sets `context` as the new current ImplicitCtxt for the duration of the function `f`
1821 pub fn enter_context<'a, 'gcx: 'tcx, 'tcx, F, R>(context: &ImplicitCtxt<'a, 'gcx, 'tcx>,
1823 where F: FnOnce(&ImplicitCtxt<'a, 'gcx, 'tcx>) -> R
1825 set_tlv(context as *const _ as usize, || {
1830 /// Enters GlobalCtxt by setting up libsyntax callbacks and
1831 /// creating a initial TyCtxt and ImplicitCtxt.
1832 /// This happens once per rustc session and TyCtxts only exists
1833 /// inside the `f` function.
1834 pub fn enter_global<'gcx, F, R>(gcx: &GlobalCtxt<'gcx>, f: F) -> R
1835 where F: for<'a> FnOnce(TyCtxt<'a, 'gcx, 'gcx>) -> R
1837 with_thread_locals(|| {
1840 interners: &gcx.global_interners,
1842 let icx = ImplicitCtxt {
1846 enter_context(&icx, |_| {
1852 /// Allows access to the current ImplicitCtxt in a closure if one is available
1853 pub fn with_context_opt<F, R>(f: F) -> R
1854 where F: for<'a, 'gcx, 'tcx> FnOnce(Option<&ImplicitCtxt<'a, 'gcx, 'tcx>>) -> R
1856 let context = get_tlv();
1860 unsafe { f(Some(&*(context as *const ImplicitCtxt))) }
1864 /// Allows access to the current ImplicitCtxt.
1865 /// Panics if there is no ImplicitCtxt available
1866 pub fn with_context<F, R>(f: F) -> R
1867 where F: for<'a, 'gcx, 'tcx> FnOnce(&ImplicitCtxt<'a, 'gcx, 'tcx>) -> R
1869 with_context_opt(|opt_context| f(opt_context.expect("no ImplicitCtxt stored in tls")))
1872 /// Allows access to the current ImplicitCtxt whose tcx field has the same global
1873 /// interner as the tcx argument passed in. This means the closure is given an ImplicitCtxt
1874 /// with the same 'gcx lifetime as the TyCtxt passed in.
1875 /// This will panic if you pass it a TyCtxt which has a different global interner from
1876 /// the current ImplicitCtxt's tcx field.
1877 pub fn with_related_context<'a, 'gcx, 'tcx1, F, R>(tcx: TyCtxt<'a, 'gcx, 'tcx1>, f: F) -> R
1878 where F: for<'b, 'tcx2> FnOnce(&ImplicitCtxt<'b, 'gcx, 'tcx2>) -> R
1880 with_context(|context| {
1882 let gcx = tcx.gcx as *const _ as usize;
1883 assert!(context.tcx.gcx as *const _ as usize == gcx);
1884 let context: &ImplicitCtxt = mem::transmute(context);
1890 /// Allows access to the current ImplicitCtxt whose tcx field has the same global
1891 /// interner and local interner as the tcx argument passed in. This means the closure
1892 /// is given an ImplicitCtxt with the same 'tcx and 'gcx lifetimes as the TyCtxt passed in.
1893 /// This will panic if you pass it a TyCtxt which has a different global interner or
1894 /// a different local interner from the current ImplicitCtxt's tcx field.
1895 pub fn with_fully_related_context<'a, 'gcx, 'tcx, F, R>(tcx: TyCtxt<'a, 'gcx, 'tcx>, f: F) -> R
1896 where F: for<'b> FnOnce(&ImplicitCtxt<'b, 'gcx, 'tcx>) -> R
1898 with_context(|context| {
1900 let gcx = tcx.gcx as *const _ as usize;
1901 let interners = tcx.interners as *const _ as usize;
1902 assert!(context.tcx.gcx as *const _ as usize == gcx);
1903 assert!(context.tcx.interners as *const _ as usize == interners);
1904 let context: &ImplicitCtxt = mem::transmute(context);
1910 /// Allows access to the TyCtxt in the current ImplicitCtxt.
1911 /// Panics if there is no ImplicitCtxt available
1912 pub fn with<F, R>(f: F) -> R
1913 where F: for<'a, 'gcx, 'tcx> FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R
1915 with_context(|context| f(context.tcx))
1918 /// Allows access to the TyCtxt in the current ImplicitCtxt.
1919 /// The closure is passed None if there is no ImplicitCtxt available
1920 pub fn with_opt<F, R>(f: F) -> R
1921 where F: for<'a, 'gcx, 'tcx> FnOnce(Option<TyCtxt<'a, 'gcx, 'tcx>>) -> R
1923 with_context_opt(|opt_context| f(opt_context.map(|context| context.tcx)))
1927 macro_rules! sty_debug_print {
1928 ($ctxt: expr, $($variant: ident),*) => {{
1929 // curious inner module to allow variant names to be used as
1931 #[allow(non_snake_case)]
1933 use ty::{self, TyCtxt};
1934 use ty::context::Interned;
1936 #[derive(Copy, Clone)]
1939 region_infer: usize,
1944 pub fn go(tcx: TyCtxt) {
1945 let mut total = DebugStat {
1947 region_infer: 0, ty_infer: 0, both_infer: 0,
1949 $(let mut $variant = total;)*
1952 for &Interned(t) in tcx.interners.type_.borrow().iter() {
1953 let variant = match t.sty {
1954 ty::TyBool | ty::TyChar | ty::TyInt(..) | ty::TyUint(..) |
1955 ty::TyFloat(..) | ty::TyStr | ty::TyNever => continue,
1956 ty::TyError => /* unimportant */ continue,
1957 $(ty::$variant(..) => &mut $variant,)*
1959 let region = t.flags.intersects(ty::TypeFlags::HAS_RE_INFER);
1960 let ty = t.flags.intersects(ty::TypeFlags::HAS_TY_INFER);
1964 if region { total.region_infer += 1; variant.region_infer += 1 }
1965 if ty { total.ty_infer += 1; variant.ty_infer += 1 }
1966 if region && ty { total.both_infer += 1; variant.both_infer += 1 }
1968 println!("Ty interner total ty region both");
1969 $(println!(" {:18}: {uses:6} {usespc:4.1}%, \
1970 {ty:4.1}% {region:5.1}% {both:4.1}%",
1971 stringify!($variant),
1972 uses = $variant.total,
1973 usespc = $variant.total as f64 * 100.0 / total.total as f64,
1974 ty = $variant.ty_infer as f64 * 100.0 / total.total as f64,
1975 region = $variant.region_infer as f64 * 100.0 / total.total as f64,
1976 both = $variant.both_infer as f64 * 100.0 / total.total as f64);
1978 println!(" total {uses:6} \
1979 {ty:4.1}% {region:5.1}% {both:4.1}%",
1981 ty = total.ty_infer as f64 * 100.0 / total.total as f64,
1982 region = total.region_infer as f64 * 100.0 / total.total as f64,
1983 both = total.both_infer as f64 * 100.0 / total.total as f64)
1991 impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
1992 pub fn print_debug_stats(self) {
1995 TyAdt, TyArray, TySlice, TyRawPtr, TyRef, TyFnDef, TyFnPtr,
1996 TyGenerator, TyGeneratorWitness, TyDynamic, TyClosure, TyTuple,
1997 TyParam, TyInfer, TyProjection, TyAnon, TyForeign);
1999 println!("Substs interner: #{}", self.interners.substs.borrow().len());
2000 println!("Region interner: #{}", self.interners.region.borrow().len());
2001 println!("Stability interner: #{}", self.stability_interner.borrow().len());
2002 println!("Interpret interner: #{}", self.interpret_interner.inner.borrow().allocs.len());
2003 println!("Layout interner: #{}", self.layout_interner.borrow().len());
2008 /// An entry in an interner.
2009 struct Interned<'tcx, T: 'tcx+?Sized>(&'tcx T);
2011 // NB: An Interned<Ty> compares and hashes as a sty.
2012 impl<'tcx> PartialEq for Interned<'tcx, TyS<'tcx>> {
2013 fn eq(&self, other: &Interned<'tcx, TyS<'tcx>>) -> bool {
2014 self.0.sty == other.0.sty
2018 impl<'tcx> Eq for Interned<'tcx, TyS<'tcx>> {}
2020 impl<'tcx> Hash for Interned<'tcx, TyS<'tcx>> {
2021 fn hash<H: Hasher>(&self, s: &mut H) {
2026 impl<'tcx: 'lcx, 'lcx> Borrow<TypeVariants<'lcx>> for Interned<'tcx, TyS<'tcx>> {
2027 fn borrow<'a>(&'a self) -> &'a TypeVariants<'lcx> {
2032 // NB: An Interned<Slice<T>> compares and hashes as its elements.
2033 impl<'tcx, T: PartialEq> PartialEq for Interned<'tcx, Slice<T>> {
2034 fn eq(&self, other: &Interned<'tcx, Slice<T>>) -> bool {
2035 self.0[..] == other.0[..]
2039 impl<'tcx, T: Eq> Eq for Interned<'tcx, Slice<T>> {}
2041 impl<'tcx, T: Hash> Hash for Interned<'tcx, Slice<T>> {
2042 fn hash<H: Hasher>(&self, s: &mut H) {
2047 impl<'tcx: 'lcx, 'lcx> Borrow<[Ty<'lcx>]> for Interned<'tcx, Slice<Ty<'tcx>>> {
2048 fn borrow<'a>(&'a self) -> &'a [Ty<'lcx>] {
2053 impl<'tcx: 'lcx, 'lcx> Borrow<[CanonicalVarInfo]> for Interned<'tcx, Slice<CanonicalVarInfo>> {
2054 fn borrow<'a>(&'a self) -> &'a [CanonicalVarInfo] {
2059 impl<'tcx: 'lcx, 'lcx> Borrow<[Kind<'lcx>]> for Interned<'tcx, Substs<'tcx>> {
2060 fn borrow<'a>(&'a self) -> &'a [Kind<'lcx>] {
2065 impl<'tcx> Borrow<RegionKind> for Interned<'tcx, RegionKind> {
2066 fn borrow<'a>(&'a self) -> &'a RegionKind {
2071 impl<'tcx: 'lcx, 'lcx> Borrow<[ExistentialPredicate<'lcx>]>
2072 for Interned<'tcx, Slice<ExistentialPredicate<'tcx>>> {
2073 fn borrow<'a>(&'a self) -> &'a [ExistentialPredicate<'lcx>] {
2078 impl<'tcx: 'lcx, 'lcx> Borrow<[Predicate<'lcx>]>
2079 for Interned<'tcx, Slice<Predicate<'tcx>>> {
2080 fn borrow<'a>(&'a self) -> &'a [Predicate<'lcx>] {
2085 impl<'tcx: 'lcx, 'lcx> Borrow<Const<'lcx>> for Interned<'tcx, Const<'tcx>> {
2086 fn borrow<'a>(&'a self) -> &'a Const<'lcx> {
2091 macro_rules! intern_method {
2092 ($lt_tcx:tt, $name:ident: $method:ident($alloc:ty,
2093 $alloc_method:ident,
2096 $needs_infer:expr) -> $ty:ty) => {
2097 impl<'a, 'gcx, $lt_tcx> TyCtxt<'a, 'gcx, $lt_tcx> {
2098 pub fn $method(self, v: $alloc) -> &$lt_tcx $ty {
2100 let key = ($alloc_to_key)(&v);
2101 if let Some(i) = self.interners.$name.borrow().get(key) {
2104 if !self.is_global() {
2105 if let Some(i) = self.global_interners.$name.borrow().get(key) {
2111 // HACK(eddyb) Depend on flags being accurate to
2112 // determine that all contents are in the global tcx.
2113 // See comments on Lift for why we can't use that.
2114 if !($needs_infer)(&v) {
2115 if !self.is_global() {
2119 let i = ($alloc_to_ret)(self.global_interners.arena.$alloc_method(v));
2120 self.global_interners.$name.borrow_mut().insert(Interned(i));
2124 // Make sure we don't end up with inference
2125 // types/regions in the global tcx.
2126 if self.is_global() {
2127 bug!("Attempted to intern `{:?}` which contains \
2128 inference types/regions in the global type context",
2133 let i = ($alloc_to_ret)(self.interners.arena.$alloc_method(v));
2134 self.interners.$name.borrow_mut().insert(Interned(i));
2141 macro_rules! direct_interners {
2142 ($lt_tcx:tt, $($name:ident: $method:ident($needs_infer:expr) -> $ty:ty),+) => {
2143 $(impl<$lt_tcx> PartialEq for Interned<$lt_tcx, $ty> {
2144 fn eq(&self, other: &Self) -> bool {
2149 impl<$lt_tcx> Eq for Interned<$lt_tcx, $ty> {}
2151 impl<$lt_tcx> Hash for Interned<$lt_tcx, $ty> {
2152 fn hash<H: Hasher>(&self, s: &mut H) {
2157 intern_method!($lt_tcx, $name: $method($ty, alloc, |x| x, |x| x, $needs_infer) -> $ty);)+
2161 pub fn keep_local<'tcx, T: ty::TypeFoldable<'tcx>>(x: &T) -> bool {
2162 x.has_type_flags(ty::TypeFlags::KEEP_IN_LOCAL_TCX)
2165 direct_interners!('tcx,
2166 region: mk_region(|r| {
2168 &ty::ReVar(_) | &ty::ReSkolemized(..) => true,
2172 const_: mk_const(|c: &Const| keep_local(&c.ty) || keep_local(&c.val)) -> Const<'tcx>
2175 macro_rules! slice_interners {
2176 ($($field:ident: $method:ident($ty:ident)),+) => (
2177 $(intern_method!('tcx, $field: $method(&[$ty<'tcx>], alloc_slice, Deref::deref,
2178 |xs: &[$ty]| -> &Slice<$ty> {
2179 unsafe { mem::transmute(xs) }
2180 }, |xs: &[$ty]| xs.iter().any(keep_local)) -> Slice<$ty<'tcx>>);)+
2185 existential_predicates: _intern_existential_predicates(ExistentialPredicate),
2186 predicates: _intern_predicates(Predicate),
2187 type_list: _intern_type_list(Ty),
2188 substs: _intern_substs(Kind)
2191 // This isn't a perfect fit: CanonicalVarInfo slices are always
2192 // allocated in the global arena, so this `intern_method!` macro is
2193 // overly general. But we just return false for the code that checks
2194 // whether they belong in the thread-local arena, so no harm done, and
2195 // seems better than open-coding the rest.
2198 canonical_var_infos: _intern_canonical_var_infos(
2199 &[CanonicalVarInfo],
2202 |xs: &[CanonicalVarInfo]| -> &Slice<CanonicalVarInfo> { unsafe { mem::transmute(xs) } },
2203 |_xs: &[CanonicalVarInfo]| -> bool { false }
2204 ) -> Slice<CanonicalVarInfo>
2207 impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
2208 /// Given a `fn` type, returns an equivalent `unsafe fn` type;
2209 /// that is, a `fn` type that is equivalent in every way for being
2211 pub fn safe_to_unsafe_fn_ty(self, sig: PolyFnSig<'tcx>) -> Ty<'tcx> {
2212 assert_eq!(sig.unsafety(), hir::Unsafety::Normal);
2213 self.mk_fn_ptr(sig.map_bound(|sig| ty::FnSig {
2214 unsafety: hir::Unsafety::Unsafe,
2219 /// Given a closure signature `sig`, returns an equivalent `fn`
2220 /// type with the same signature. Detuples and so forth -- so
2221 /// e.g. if we have a sig with `Fn<(u32, i32)>` then you would get
2222 /// a `fn(u32, i32)`.
2223 pub fn coerce_closure_fn_ty(self, sig: PolyFnSig<'tcx>) -> Ty<'tcx> {
2224 let converted_sig = sig.map_bound(|s| {
2225 let params_iter = match s.inputs()[0].sty {
2226 ty::TyTuple(params) => {
2227 params.into_iter().cloned()
2235 hir::Unsafety::Normal,
2240 self.mk_fn_ptr(converted_sig)
2243 // Interns a type/name combination, stores the resulting box in cx.interners,
2244 // and returns the box as cast to an unsafe ptr (see comments for Ty above).
2245 pub fn mk_ty(self, st: TypeVariants<'tcx>) -> Ty<'tcx> {
2246 let global_interners = if !self.is_global() {
2247 Some(&self.global_interners)
2251 self.interners.intern_ty(st, global_interners)
2254 pub fn mk_mach_int(self, tm: ast::IntTy) -> Ty<'tcx> {
2256 ast::IntTy::Isize => self.types.isize,
2257 ast::IntTy::I8 => self.types.i8,
2258 ast::IntTy::I16 => self.types.i16,
2259 ast::IntTy::I32 => self.types.i32,
2260 ast::IntTy::I64 => self.types.i64,
2261 ast::IntTy::I128 => self.types.i128,
2265 pub fn mk_mach_uint(self, tm: ast::UintTy) -> Ty<'tcx> {
2267 ast::UintTy::Usize => self.types.usize,
2268 ast::UintTy::U8 => self.types.u8,
2269 ast::UintTy::U16 => self.types.u16,
2270 ast::UintTy::U32 => self.types.u32,
2271 ast::UintTy::U64 => self.types.u64,
2272 ast::UintTy::U128 => self.types.u128,
2276 pub fn mk_mach_float(self, tm: ast::FloatTy) -> Ty<'tcx> {
2278 ast::FloatTy::F32 => self.types.f32,
2279 ast::FloatTy::F64 => self.types.f64,
2283 pub fn mk_str(self) -> Ty<'tcx> {
2287 pub fn mk_static_str(self) -> Ty<'tcx> {
2288 self.mk_imm_ref(self.types.re_static, self.mk_str())
2291 pub fn mk_adt(self, def: &'tcx AdtDef, substs: &'tcx Substs<'tcx>) -> Ty<'tcx> {
2292 // take a copy of substs so that we own the vectors inside
2293 self.mk_ty(TyAdt(def, substs))
2296 pub fn mk_foreign(self, def_id: DefId) -> Ty<'tcx> {
2297 self.mk_ty(TyForeign(def_id))
2300 pub fn mk_box(self, ty: Ty<'tcx>) -> Ty<'tcx> {
2301 let def_id = self.require_lang_item(lang_items::OwnedBoxLangItem);
2302 let adt_def = self.adt_def(def_id);
2303 let substs = self.mk_substs(iter::once(Kind::from(ty)));
2304 self.mk_ty(TyAdt(adt_def, substs))
2307 pub fn mk_ptr(self, tm: TypeAndMut<'tcx>) -> Ty<'tcx> {
2308 self.mk_ty(TyRawPtr(tm))
2311 pub fn mk_ref(self, r: Region<'tcx>, tm: TypeAndMut<'tcx>) -> Ty<'tcx> {
2312 self.mk_ty(TyRef(r, tm))
2315 pub fn mk_mut_ref(self, r: Region<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> {
2316 self.mk_ref(r, TypeAndMut {ty: ty, mutbl: hir::MutMutable})
2319 pub fn mk_imm_ref(self, r: Region<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> {
2320 self.mk_ref(r, TypeAndMut {ty: ty, mutbl: hir::MutImmutable})
2323 pub fn mk_mut_ptr(self, ty: Ty<'tcx>) -> Ty<'tcx> {
2324 self.mk_ptr(TypeAndMut {ty: ty, mutbl: hir::MutMutable})
2327 pub fn mk_imm_ptr(self, ty: Ty<'tcx>) -> Ty<'tcx> {
2328 self.mk_ptr(TypeAndMut {ty: ty, mutbl: hir::MutImmutable})
2331 pub fn mk_nil_ptr(self) -> Ty<'tcx> {
2332 self.mk_imm_ptr(self.mk_nil())
2335 pub fn mk_array(self, ty: Ty<'tcx>, n: u64) -> Ty<'tcx> {
2336 self.mk_ty(TyArray(ty, self.mk_const(ty::Const {
2337 val: ConstVal::Value(Value::ByVal(PrimVal::Bytes(n.into()))),
2338 ty: self.types.usize
2342 pub fn mk_slice(self, ty: Ty<'tcx>) -> Ty<'tcx> {
2343 self.mk_ty(TySlice(ty))
2346 pub fn intern_tup(self, ts: &[Ty<'tcx>]) -> Ty<'tcx> {
2347 self.mk_ty(TyTuple(self.intern_type_list(ts)))
2350 pub fn mk_tup<I: InternAs<[Ty<'tcx>], Ty<'tcx>>>(self, iter: I) -> I::Output {
2351 iter.intern_with(|ts| self.mk_ty(TyTuple(self.intern_type_list(ts))))
2354 pub fn mk_nil(self) -> Ty<'tcx> {
2355 self.intern_tup(&[])
2358 pub fn mk_bool(self) -> Ty<'tcx> {
2362 pub fn mk_fn_def(self, def_id: DefId,
2363 substs: &'tcx Substs<'tcx>) -> Ty<'tcx> {
2364 self.mk_ty(TyFnDef(def_id, substs))
2367 pub fn mk_fn_ptr(self, fty: PolyFnSig<'tcx>) -> Ty<'tcx> {
2368 self.mk_ty(TyFnPtr(fty))
2373 obj: ty::Binder<&'tcx Slice<ExistentialPredicate<'tcx>>>,
2374 reg: ty::Region<'tcx>
2376 self.mk_ty(TyDynamic(obj, reg))
2379 pub fn mk_projection(self,
2381 substs: &'tcx Substs<'tcx>)
2383 self.mk_ty(TyProjection(ProjectionTy {
2389 pub fn mk_closure(self,
2391 substs: ClosureSubsts<'tcx>)
2393 self.mk_closure_from_closure_substs(closure_id, substs)
2396 pub fn mk_closure_from_closure_substs(self,
2398 closure_substs: ClosureSubsts<'tcx>)
2400 self.mk_ty(TyClosure(closure_id, closure_substs))
2403 pub fn mk_generator(self,
2405 closure_substs: ClosureSubsts<'tcx>,
2406 interior: GeneratorInterior<'tcx>)
2408 self.mk_ty(TyGenerator(id, closure_substs, interior))
2411 pub fn mk_generator_witness(self, types: ty::Binder<&'tcx Slice<Ty<'tcx>>>) -> Ty<'tcx> {
2412 self.mk_ty(TyGeneratorWitness(types))
2415 pub fn mk_var(self, v: TyVid) -> Ty<'tcx> {
2416 self.mk_infer(TyVar(v))
2419 pub fn mk_int_var(self, v: IntVid) -> Ty<'tcx> {
2420 self.mk_infer(IntVar(v))
2423 pub fn mk_float_var(self, v: FloatVid) -> Ty<'tcx> {
2424 self.mk_infer(FloatVar(v))
2427 pub fn mk_infer(self, it: InferTy) -> Ty<'tcx> {
2428 self.mk_ty(TyInfer(it))
2431 pub fn mk_param(self,
2433 name: Name) -> Ty<'tcx> {
2434 self.mk_ty(TyParam(ParamTy { idx: index, name: name }))
2437 pub fn mk_self_type(self) -> Ty<'tcx> {
2438 self.mk_param(0, keywords::SelfType.name())
2441 pub fn mk_param_from_def(self, def: &ty::TypeParameterDef) -> Ty<'tcx> {
2442 self.mk_param(def.index, def.name)
2445 pub fn mk_anon(self, def_id: DefId, substs: &'tcx Substs<'tcx>) -> Ty<'tcx> {
2446 self.mk_ty(TyAnon(def_id, substs))
2449 pub fn intern_existential_predicates(self, eps: &[ExistentialPredicate<'tcx>])
2450 -> &'tcx Slice<ExistentialPredicate<'tcx>> {
2451 assert!(!eps.is_empty());
2452 assert!(eps.windows(2).all(|w| w[0].cmp(self, &w[1]) != Ordering::Greater));
2453 self._intern_existential_predicates(eps)
2456 pub fn intern_predicates(self, preds: &[Predicate<'tcx>])
2457 -> &'tcx Slice<Predicate<'tcx>> {
2458 // FIXME consider asking the input slice to be sorted to avoid
2459 // re-interning permutations, in which case that would be asserted
2461 if preds.len() == 0 {
2462 // The macro-generated method below asserts we don't intern an empty slice.
2465 self._intern_predicates(preds)
2469 pub fn intern_type_list(self, ts: &[Ty<'tcx>]) -> &'tcx Slice<Ty<'tcx>> {
2473 self._intern_type_list(ts)
2477 pub fn intern_substs(self, ts: &[Kind<'tcx>]) -> &'tcx Slice<Kind<'tcx>> {
2481 self._intern_substs(ts)
2485 pub fn intern_canonical_var_infos(self, ts: &[CanonicalVarInfo]) -> CanonicalVarInfos<'gcx> {
2489 self.global_tcx()._intern_canonical_var_infos(ts)
2493 pub fn mk_fn_sig<I>(self,
2497 unsafety: hir::Unsafety,
2499 -> <I::Item as InternIteratorElement<Ty<'tcx>, ty::FnSig<'tcx>>>::Output
2501 I::Item: InternIteratorElement<Ty<'tcx>, ty::FnSig<'tcx>>
2503 inputs.chain(iter::once(output)).intern_with(|xs| ty::FnSig {
2504 inputs_and_output: self.intern_type_list(xs),
2505 variadic, unsafety, abi
2509 pub fn mk_existential_predicates<I: InternAs<[ExistentialPredicate<'tcx>],
2510 &'tcx Slice<ExistentialPredicate<'tcx>>>>(self, iter: I)
2512 iter.intern_with(|xs| self.intern_existential_predicates(xs))
2515 pub fn mk_predicates<I: InternAs<[Predicate<'tcx>],
2516 &'tcx Slice<Predicate<'tcx>>>>(self, iter: I)
2518 iter.intern_with(|xs| self.intern_predicates(xs))
2521 pub fn mk_type_list<I: InternAs<[Ty<'tcx>],
2522 &'tcx Slice<Ty<'tcx>>>>(self, iter: I) -> I::Output {
2523 iter.intern_with(|xs| self.intern_type_list(xs))
2526 pub fn mk_substs<I: InternAs<[Kind<'tcx>],
2527 &'tcx Slice<Kind<'tcx>>>>(self, iter: I) -> I::Output {
2528 iter.intern_with(|xs| self.intern_substs(xs))
2531 pub fn mk_substs_trait(self,
2534 -> &'tcx Substs<'tcx>
2536 self.mk_substs(iter::once(s).chain(t.into_iter().cloned()).map(Kind::from))
2539 pub fn lint_node<S: Into<MultiSpan>>(self,
2540 lint: &'static Lint,
2544 self.struct_span_lint_node(lint, id, span.into(), msg).emit()
2547 pub fn lint_node_note<S: Into<MultiSpan>>(self,
2548 lint: &'static Lint,
2553 let mut err = self.struct_span_lint_node(lint, id, span.into(), msg);
2558 pub fn lint_level_at_node(self, lint: &'static Lint, mut id: NodeId)
2559 -> (lint::Level, lint::LintSource)
2561 // Right now we insert a `with_ignore` node in the dep graph here to
2562 // ignore the fact that `lint_levels` below depends on the entire crate.
2563 // For now this'll prevent false positives of recompiling too much when
2564 // anything changes.
2566 // Once red/green incremental compilation lands we should be able to
2567 // remove this because while the crate changes often the lint level map
2568 // will change rarely.
2569 self.dep_graph.with_ignore(|| {
2570 let sets = self.lint_levels(LOCAL_CRATE);
2572 let hir_id = self.hir.definitions().node_to_hir_id(id);
2573 if let Some(pair) = sets.level_and_source(lint, hir_id, self.sess) {
2576 let next = self.hir.get_parent_node(id);
2578 bug!("lint traversal reached the root of the crate");
2585 pub fn struct_span_lint_node<S: Into<MultiSpan>>(self,
2586 lint: &'static Lint,
2590 -> DiagnosticBuilder<'tcx>
2592 let (level, src) = self.lint_level_at_node(lint, id);
2593 lint::struct_lint_level(self.sess, lint, level, src, Some(span.into()), msg)
2596 pub fn struct_lint_node(self, lint: &'static Lint, id: NodeId, msg: &str)
2597 -> DiagnosticBuilder<'tcx>
2599 let (level, src) = self.lint_level_at_node(lint, id);
2600 lint::struct_lint_level(self.sess, lint, level, src, None, msg)
2603 pub fn in_scope_traits(self, id: HirId) -> Option<Lrc<StableVec<TraitCandidate>>> {
2604 self.in_scope_traits_map(id.owner)
2605 .and_then(|map| map.get(&id.local_id).cloned())
2608 pub fn named_region(self, id: HirId) -> Option<resolve_lifetime::Region> {
2609 self.named_region_map(id.owner)
2610 .and_then(|map| map.get(&id.local_id).cloned())
2613 pub fn is_late_bound(self, id: HirId) -> bool {
2614 self.is_late_bound_map(id.owner)
2615 .map(|set| set.contains(&id.local_id))
2619 pub fn object_lifetime_defaults(self, id: HirId)
2620 -> Option<Lrc<Vec<ObjectLifetimeDefault>>>
2622 self.object_lifetime_defaults_map(id.owner)
2623 .and_then(|map| map.get(&id.local_id).cloned())
2627 pub trait InternAs<T: ?Sized, R> {
2629 fn intern_with<F>(self, f: F) -> Self::Output
2630 where F: FnOnce(&T) -> R;
2633 impl<I, T, R, E> InternAs<[T], R> for I
2634 where E: InternIteratorElement<T, R>,
2635 I: Iterator<Item=E> {
2636 type Output = E::Output;
2637 fn intern_with<F>(self, f: F) -> Self::Output
2638 where F: FnOnce(&[T]) -> R {
2639 E::intern_with(self, f)
2643 pub trait InternIteratorElement<T, R>: Sized {
2645 fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output;
2648 impl<T, R> InternIteratorElement<T, R> for T {
2650 fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output {
2651 f(&iter.collect::<AccumulateVec<[_; 8]>>())
2655 impl<'a, T, R> InternIteratorElement<T, R> for &'a T
2659 fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output {
2660 f(&iter.cloned().collect::<AccumulateVec<[_; 8]>>())
2664 impl<T, R, E> InternIteratorElement<T, R> for Result<T, E> {
2665 type Output = Result<R, E>;
2666 fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output {
2667 Ok(f(&iter.collect::<Result<AccumulateVec<[_; 8]>, _>>()?))
2671 pub fn provide(providers: &mut ty::maps::Providers) {
2672 // FIXME(#44234) - almost all of these queries have no sub-queries and
2673 // therefore no actual inputs, they're just reading tables calculated in
2674 // resolve! Does this work? Unsure! That's what the issue is about
2675 providers.in_scope_traits_map = |tcx, id| tcx.gcx.trait_map.get(&id).cloned();
2676 providers.module_exports = |tcx, id| tcx.gcx.export_map.get(&id).cloned();
2677 providers.crate_name = |tcx, id| {
2678 assert_eq!(id, LOCAL_CRATE);
2681 providers.get_lang_items = |tcx, id| {
2682 assert_eq!(id, LOCAL_CRATE);
2683 // FIXME(#42293) Right now we insert a `with_ignore` node in the dep
2684 // graph here to ignore the fact that `get_lang_items` below depends on
2685 // the entire crate. For now this'll prevent false positives of
2686 // recompiling too much when anything changes.
2688 // Once red/green incremental compilation lands we should be able to
2689 // remove this because while the crate changes often the lint level map
2690 // will change rarely.
2691 tcx.dep_graph.with_ignore(|| Lrc::new(middle::lang_items::collect(tcx)))
2693 providers.freevars = |tcx, id| tcx.gcx.freevars.get(&id).cloned();
2694 providers.maybe_unused_trait_import = |tcx, id| {
2695 tcx.maybe_unused_trait_imports.contains(&id)
2697 providers.maybe_unused_extern_crates = |tcx, cnum| {
2698 assert_eq!(cnum, LOCAL_CRATE);
2699 Lrc::new(tcx.maybe_unused_extern_crates.clone())
2702 providers.stability_index = |tcx, cnum| {
2703 assert_eq!(cnum, LOCAL_CRATE);
2704 Lrc::new(stability::Index::new(tcx))
2706 providers.lookup_stability = |tcx, id| {
2707 assert_eq!(id.krate, LOCAL_CRATE);
2708 let id = tcx.hir.definitions().def_index_to_hir_id(id.index);
2709 tcx.stability().local_stability(id)
2711 providers.lookup_deprecation_entry = |tcx, id| {
2712 assert_eq!(id.krate, LOCAL_CRATE);
2713 let id = tcx.hir.definitions().def_index_to_hir_id(id.index);
2714 tcx.stability().local_deprecation_entry(id)
2716 providers.extern_mod_stmt_cnum = |tcx, id| {
2717 let id = tcx.hir.as_local_node_id(id).unwrap();
2718 tcx.cstore.extern_mod_stmt_cnum_untracked(id)
2720 providers.all_crate_nums = |tcx, cnum| {
2721 assert_eq!(cnum, LOCAL_CRATE);
2722 Lrc::new(tcx.cstore.crates_untracked())
2724 providers.postorder_cnums = |tcx, cnum| {
2725 assert_eq!(cnum, LOCAL_CRATE);
2726 Lrc::new(tcx.cstore.postorder_cnums_untracked())
2728 providers.output_filenames = |tcx, cnum| {
2729 assert_eq!(cnum, LOCAL_CRATE);
2730 tcx.output_filenames.clone()
2732 providers.features_query = |tcx, cnum| {
2733 assert_eq!(cnum, LOCAL_CRATE);
2734 Lrc::new(tcx.sess.features_untracked().clone())
2736 providers.is_panic_runtime = |tcx, cnum| {
2737 assert_eq!(cnum, LOCAL_CRATE);
2738 attr::contains_name(tcx.hir.krate_attrs(), "panic_runtime")
2740 providers.is_compiler_builtins = |tcx, cnum| {
2741 assert_eq!(cnum, LOCAL_CRATE);
2742 attr::contains_name(tcx.hir.krate_attrs(), "compiler_builtins")