1 //! type context book-keeping
3 use dep_graph::DepGraph;
4 use dep_graph::{DepNode, DepConstructor};
5 use errors::DiagnosticBuilder;
7 use session::config::{BorrowckMode, OutputFilenames};
8 use session::config::CrateType;
10 use hir::{TraitCandidate, HirId, ItemKind, ItemLocalId, Node};
11 use hir::def::{Def, Export};
12 use hir::def_id::{CrateNum, DefId, DefIndex, LOCAL_CRATE};
13 use hir::map as hir_map;
14 use hir::map::DefPathHash;
15 use lint::{self, Lint};
16 use ich::{StableHashingContext, NodeIdHashingMode};
17 use infer::canonical::{Canonical, CanonicalVarInfo, CanonicalVarInfos};
18 use infer::outlives::free_region_map::FreeRegionMap;
19 use middle::cstore::CrateStoreDyn;
20 use middle::cstore::EncodedMetadata;
21 use middle::lang_items;
22 use middle::resolve_lifetime::{self, ObjectLifetimeDefault};
23 use middle::stability;
24 use mir::{self, Mir, interpret, ProjectionKind};
25 use mir::interpret::Allocation;
26 use ty::subst::{Kind, Substs, Subst};
29 use traits::{Clause, Clauses, GoalKind, Goal, Goals};
30 use ty::{self, Ty, TypeAndMut};
31 use ty::{TyS, TyKind, List};
32 use ty::{AdtKind, AdtDef, ClosureSubsts, GeneratorSubsts, Region, Const, LazyConst};
33 use ty::{PolyFnSig, InferTy, ParamTy, ProjectionTy, ExistentialPredicate, Predicate};
35 use ty::{TyVar, TyVid, IntVar, IntVid, FloatVar, FloatVid};
37 use ty::GenericParamDefKind;
38 use ty::layout::{LayoutDetails, TargetDataLayout, VariantIdx};
41 use ty::subst::{UserSubsts, UnpackedKind};
42 use ty::{BoundVar, BindingMode};
43 use ty::CanonicalPolyFnSig;
44 use util::nodemap::{DefIdMap, DefIdSet, ItemLocalMap};
45 use util::nodemap::{FxHashMap, FxHashSet};
46 use rustc_data_structures::interner::HashInterner;
47 use smallvec::SmallVec;
48 use rustc_data_structures::stable_hasher::{HashStable, hash_stable_hashmap,
49 StableHasher, StableHasherResult,
51 use arena::{TypedArena, SyncDroplessArena};
52 use rustc_data_structures::indexed_vec::{Idx, IndexVec};
53 use rustc_data_structures::sync::{self, Lrc, Lock, WorkerLocal};
55 use std::borrow::Borrow;
56 use std::cmp::Ordering;
57 use std::collections::hash_map::{self, Entry};
58 use std::hash::{Hash, Hasher};
61 use std::ops::{Deref, Bound};
65 use std::marker::PhantomData;
66 use rustc_target::spec::abi;
67 use syntax::ast::{self, NodeId};
69 use syntax::source_map::MultiSpan;
70 use syntax::edition::Edition;
71 use syntax::feature_gate;
72 use syntax::symbol::{Symbol, keywords, InternedString};
77 pub struct AllArenas<'tcx> {
78 pub global: WorkerLocal<GlobalArenas<'tcx>>,
79 pub interner: SyncDroplessArena,
80 global_ctxt: Option<GlobalCtxt<'tcx>>,
83 impl<'tcx> AllArenas<'tcx> {
84 pub fn new() -> Self {
86 global: WorkerLocal::new(|_| GlobalArenas::default()),
87 interner: SyncDroplessArena::default(),
95 pub struct GlobalArenas<'tcx> {
97 layout: TypedArena<LayoutDetails>,
100 generics: TypedArena<ty::Generics>,
101 trait_def: TypedArena<ty::TraitDef>,
102 adt_def: TypedArena<ty::AdtDef>,
103 steal_mir: TypedArena<Steal<Mir<'tcx>>>,
104 mir: TypedArena<Mir<'tcx>>,
105 tables: TypedArena<ty::TypeckTables<'tcx>>,
107 const_allocs: TypedArena<interpret::Allocation>,
110 type InternedSet<'tcx, T> = Lock<FxHashMap<Interned<'tcx, T>, ()>>;
112 pub struct CtxtInterners<'tcx> {
113 /// The arena that types, regions, etc are allocated from
114 arena: &'tcx SyncDroplessArena,
116 /// Specifically use a speedy hash algorithm for these hash sets,
117 /// they're accessed quite often.
118 type_: InternedSet<'tcx, TyS<'tcx>>,
119 type_list: InternedSet<'tcx, List<Ty<'tcx>>>,
120 substs: InternedSet<'tcx, Substs<'tcx>>,
121 canonical_var_infos: InternedSet<'tcx, List<CanonicalVarInfo>>,
122 region: InternedSet<'tcx, RegionKind>,
123 existential_predicates: InternedSet<'tcx, List<ExistentialPredicate<'tcx>>>,
124 predicates: InternedSet<'tcx, List<Predicate<'tcx>>>,
125 const_: InternedSet<'tcx, Const<'tcx>>,
126 clauses: InternedSet<'tcx, List<Clause<'tcx>>>,
127 goal: InternedSet<'tcx, GoalKind<'tcx>>,
128 goal_list: InternedSet<'tcx, List<Goal<'tcx>>>,
129 projs: InternedSet<'tcx, List<ProjectionKind<'tcx>>>,
132 impl<'gcx: 'tcx, 'tcx> CtxtInterners<'tcx> {
133 fn new(arena: &'tcx SyncDroplessArena) -> CtxtInterners<'tcx> {
136 type_: Default::default(),
137 type_list: Default::default(),
138 substs: Default::default(),
139 region: Default::default(),
140 existential_predicates: Default::default(),
141 canonical_var_infos: Default::default(),
142 predicates: Default::default(),
143 const_: Default::default(),
144 clauses: Default::default(),
145 goal: Default::default(),
146 goal_list: Default::default(),
147 projs: Default::default(),
154 local: &CtxtInterners<'tcx>,
155 global: &CtxtInterners<'gcx>,
158 let flags = super::flags::FlagComputation::for_sty(&st);
160 // HACK(eddyb) Depend on flags being accurate to
161 // determine that all contents are in the global tcx.
162 // See comments on Lift for why we can't use that.
163 if flags.flags.intersects(ty::TypeFlags::KEEP_IN_LOCAL_TCX) {
164 local.type_.borrow_mut().intern(st, |st| {
165 let ty_struct = TyS {
168 outer_exclusive_binder: flags.outer_exclusive_binder,
171 // Make sure we don't end up with inference
172 // types/regions in the global interner
173 if local as *const _ as usize == global as *const _ as usize {
174 bug!("Attempted to intern `{:?}` which contains \
175 inference types/regions in the global type context",
179 Interned(local.arena.alloc(ty_struct))
182 global.type_.borrow_mut().intern(st, |st| {
183 let ty_struct = TyS {
186 outer_exclusive_binder: flags.outer_exclusive_binder,
189 // This is safe because all the types the ty_struct can point to
190 // already is in the global arena
191 let ty_struct: TyS<'gcx> = unsafe {
192 mem::transmute(ty_struct)
195 Interned(global.arena.alloc(ty_struct))
201 pub struct CommonTypes<'tcx> {
222 pub re_empty: Region<'tcx>,
223 pub re_static: Region<'tcx>,
224 pub re_erased: Region<'tcx>,
227 pub struct LocalTableInContext<'a, V: 'a> {
228 local_id_root: Option<DefId>,
229 data: &'a ItemLocalMap<V>
232 /// Validate that the given HirId (respectively its `local_id` part) can be
233 /// safely used as a key in the tables of a TypeckTable. For that to be
234 /// the case, the HirId must have the same `owner` as all the other IDs in
235 /// this table (signified by `local_id_root`). Otherwise the HirId
236 /// would be in a different frame of reference and using its `local_id`
237 /// would result in lookup errors, or worse, in silently wrong data being
239 fn validate_hir_id_for_typeck_tables(local_id_root: Option<DefId>,
242 if cfg!(debug_assertions) {
243 if let Some(local_id_root) = local_id_root {
244 if hir_id.owner != local_id_root.index {
245 ty::tls::with(|tcx| {
246 let node_id = tcx.hir().hir_to_node_id(hir_id);
248 bug!("node {} with HirId::owner {:?} cannot be placed in \
249 TypeckTables with local_id_root {:?}",
250 tcx.hir().node_to_string(node_id),
251 DefId::local(hir_id.owner),
256 // We use "Null Object" TypeckTables in some of the analysis passes.
257 // These are just expected to be empty and their `local_id_root` is
258 // `None`. Therefore we cannot verify whether a given `HirId` would
259 // be a valid key for the given table. Instead we make sure that
260 // nobody tries to write to such a Null Object table.
262 bug!("access to invalid TypeckTables")
268 impl<'a, V> LocalTableInContext<'a, V> {
269 pub fn contains_key(&self, id: hir::HirId) -> bool {
270 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
271 self.data.contains_key(&id.local_id)
274 pub fn get(&self, id: hir::HirId) -> Option<&V> {
275 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
276 self.data.get(&id.local_id)
279 pub fn iter(&self) -> hash_map::Iter<'_, hir::ItemLocalId, V> {
284 impl<'a, V> ::std::ops::Index<hir::HirId> for LocalTableInContext<'a, V> {
287 fn index(&self, key: hir::HirId) -> &V {
288 self.get(key).expect("LocalTableInContext: key not found")
292 pub struct LocalTableInContextMut<'a, V: 'a> {
293 local_id_root: Option<DefId>,
294 data: &'a mut ItemLocalMap<V>
297 impl<'a, V> LocalTableInContextMut<'a, V> {
298 pub fn get_mut(&mut self, id: hir::HirId) -> Option<&mut V> {
299 validate_hir_id_for_typeck_tables(self.local_id_root, id, true);
300 self.data.get_mut(&id.local_id)
303 pub fn entry(&mut self, id: hir::HirId) -> Entry<'_, hir::ItemLocalId, V> {
304 validate_hir_id_for_typeck_tables(self.local_id_root, id, true);
305 self.data.entry(id.local_id)
308 pub fn insert(&mut self, id: hir::HirId, val: V) -> Option<V> {
309 validate_hir_id_for_typeck_tables(self.local_id_root, id, true);
310 self.data.insert(id.local_id, val)
313 pub fn remove(&mut self, id: hir::HirId) -> Option<V> {
314 validate_hir_id_for_typeck_tables(self.local_id_root, id, true);
315 self.data.remove(&id.local_id)
319 #[derive(RustcEncodable, RustcDecodable, Debug)]
320 pub struct TypeckTables<'tcx> {
321 /// The HirId::owner all ItemLocalIds in this table are relative to.
322 pub local_id_root: Option<DefId>,
324 /// Resolved definitions for `<T>::X` associated paths and
325 /// method calls, including those of overloaded operators.
326 type_dependent_defs: ItemLocalMap<Def>,
328 /// Resolved field indices for field accesses in expressions (`S { field }`, `obj.field`)
329 /// or patterns (`S { field }`). The index is often useful by itself, but to learn more
330 /// about the field you also need definition of the variant to which the field
331 /// belongs, but it may not exist if it's a tuple field (`tuple.0`).
332 field_indices: ItemLocalMap<usize>,
334 /// Stores the types for various nodes in the AST. Note that this table
335 /// is not guaranteed to be populated until after typeck. See
336 /// typeck::check::fn_ctxt for details.
337 node_types: ItemLocalMap<Ty<'tcx>>,
339 /// Stores the type parameters which were substituted to obtain the type
340 /// of this node. This only applies to nodes that refer to entities
341 /// parameterized by type parameters, such as generic fns, types, or
343 node_substs: ItemLocalMap<&'tcx Substs<'tcx>>,
345 /// This will either store the canonicalized types provided by the user
346 /// or the substitutions that the user explicitly gave (if any) attached
347 /// to `id`. These will not include any inferred values. The canonical form
348 /// is used to capture things like `_` or other unspecified values.
350 /// For example, if the user wrote `foo.collect::<Vec<_>>()`, then the
351 /// canonical substitutions would include only `for<X> { Vec<X> }`.
353 /// See also `AscribeUserType` statement in MIR.
354 user_provided_types: ItemLocalMap<CanonicalUserTypeAnnotation<'tcx>>,
356 /// Stores the canonicalized types provided by the user. See also
357 /// `AscribeUserType` statement in MIR.
358 pub user_provided_sigs: DefIdMap<CanonicalPolyFnSig<'tcx>>,
360 adjustments: ItemLocalMap<Vec<ty::adjustment::Adjustment<'tcx>>>,
362 /// Stores the actual binding mode for all instances of hir::BindingAnnotation.
363 pat_binding_modes: ItemLocalMap<BindingMode>,
365 /// Stores the types which were implicitly dereferenced in pattern binding modes
366 /// for later usage in HAIR lowering. For example,
369 /// match &&Some(5i32) {
374 /// leads to a `vec![&&Option<i32>, &Option<i32>]`. Empty vectors are not stored.
377 /// https://github.com/rust-lang/rfcs/blob/master/text/2005-match-ergonomics.md#definitions
378 pat_adjustments: ItemLocalMap<Vec<Ty<'tcx>>>,
381 pub upvar_capture_map: ty::UpvarCaptureMap<'tcx>,
383 /// Records the reasons that we picked the kind of each closure;
384 /// not all closures are present in the map.
385 closure_kind_origins: ItemLocalMap<(Span, ast::Name)>,
387 /// For each fn, records the "liberated" types of its arguments
388 /// and return type. Liberated means that all bound regions
389 /// (including late-bound regions) are replaced with free
390 /// equivalents. This table is not used in codegen (since regions
391 /// are erased there) and hence is not serialized to metadata.
392 liberated_fn_sigs: ItemLocalMap<ty::FnSig<'tcx>>,
394 /// For each FRU expression, record the normalized types of the fields
395 /// of the struct - this is needed because it is non-trivial to
396 /// normalize while preserving regions. This table is used only in
397 /// MIR construction and hence is not serialized to metadata.
398 fru_field_types: ItemLocalMap<Vec<Ty<'tcx>>>,
400 /// Maps a cast expression to its kind. This is keyed on the
401 /// *from* expression of the cast, not the cast itself.
402 cast_kinds: ItemLocalMap<ty::cast::CastKind>,
404 /// Set of trait imports actually used in the method resolution.
405 /// This is used for warning unused imports. During type
406 /// checking, this `Lrc` should not be cloned: it must have a ref-count
407 /// of 1 so that we can insert things into the set mutably.
408 pub used_trait_imports: Lrc<DefIdSet>,
410 /// If any errors occurred while type-checking this body,
411 /// this field will be set to `true`.
412 pub tainted_by_errors: bool,
414 /// Stores the free-region relationships that were deduced from
415 /// its where clauses and parameter types. These are then
416 /// read-again by borrowck.
417 pub free_region_map: FreeRegionMap<'tcx>,
419 /// All the existential types that are restricted to concrete types
421 pub concrete_existential_types: FxHashMap<DefId, Ty<'tcx>>,
424 impl<'tcx> TypeckTables<'tcx> {
425 pub fn empty(local_id_root: Option<DefId>) -> TypeckTables<'tcx> {
428 type_dependent_defs: Default::default(),
429 field_indices: Default::default(),
430 user_provided_types: Default::default(),
431 user_provided_sigs: Default::default(),
432 node_types: Default::default(),
433 node_substs: Default::default(),
434 adjustments: Default::default(),
435 pat_binding_modes: Default::default(),
436 pat_adjustments: Default::default(),
437 upvar_capture_map: Default::default(),
438 closure_kind_origins: Default::default(),
439 liberated_fn_sigs: Default::default(),
440 fru_field_types: Default::default(),
441 cast_kinds: Default::default(),
442 used_trait_imports: Lrc::new(Default::default()),
443 tainted_by_errors: false,
444 free_region_map: Default::default(),
445 concrete_existential_types: Default::default(),
449 /// Returns the final resolution of a `QPath` in an `Expr` or `Pat` node.
450 pub fn qpath_def(&self, qpath: &hir::QPath, id: hir::HirId) -> Def {
452 hir::QPath::Resolved(_, ref path) => path.def,
453 hir::QPath::TypeRelative(..) => {
454 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
455 self.type_dependent_defs.get(&id.local_id).cloned().unwrap_or(Def::Err)
460 pub fn type_dependent_defs(&self) -> LocalTableInContext<'_, Def> {
461 LocalTableInContext {
462 local_id_root: self.local_id_root,
463 data: &self.type_dependent_defs
467 pub fn type_dependent_defs_mut(&mut self) -> LocalTableInContextMut<'_, Def> {
468 LocalTableInContextMut {
469 local_id_root: self.local_id_root,
470 data: &mut self.type_dependent_defs
474 pub fn field_indices(&self) -> LocalTableInContext<'_, usize> {
475 LocalTableInContext {
476 local_id_root: self.local_id_root,
477 data: &self.field_indices
481 pub fn field_indices_mut(&mut self) -> LocalTableInContextMut<'_, usize> {
482 LocalTableInContextMut {
483 local_id_root: self.local_id_root,
484 data: &mut self.field_indices
488 pub fn user_provided_types(
490 ) -> LocalTableInContext<'_, CanonicalUserTypeAnnotation<'tcx>> {
491 LocalTableInContext {
492 local_id_root: self.local_id_root,
493 data: &self.user_provided_types
497 pub fn user_provided_types_mut(
499 ) -> LocalTableInContextMut<'_, CanonicalUserTypeAnnotation<'tcx>> {
500 LocalTableInContextMut {
501 local_id_root: self.local_id_root,
502 data: &mut self.user_provided_types
506 pub fn node_types(&self) -> LocalTableInContext<'_, Ty<'tcx>> {
507 LocalTableInContext {
508 local_id_root: self.local_id_root,
509 data: &self.node_types
513 pub fn node_types_mut(&mut self) -> LocalTableInContextMut<'_, Ty<'tcx>> {
514 LocalTableInContextMut {
515 local_id_root: self.local_id_root,
516 data: &mut self.node_types
520 pub fn node_id_to_type(&self, id: hir::HirId) -> Ty<'tcx> {
521 self.node_id_to_type_opt(id).unwrap_or_else(||
522 bug!("node_id_to_type: no type for node `{}`",
524 let id = tcx.hir().hir_to_node_id(id);
525 tcx.hir().node_to_string(id)
530 pub fn node_id_to_type_opt(&self, id: hir::HirId) -> Option<Ty<'tcx>> {
531 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
532 self.node_types.get(&id.local_id).cloned()
535 pub fn node_substs_mut(&mut self) -> LocalTableInContextMut<'_, &'tcx Substs<'tcx>> {
536 LocalTableInContextMut {
537 local_id_root: self.local_id_root,
538 data: &mut self.node_substs
542 pub fn node_substs(&self, id: hir::HirId) -> &'tcx Substs<'tcx> {
543 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
544 self.node_substs.get(&id.local_id).cloned().unwrap_or_else(|| Substs::empty())
547 pub fn node_substs_opt(&self, id: hir::HirId) -> Option<&'tcx Substs<'tcx>> {
548 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
549 self.node_substs.get(&id.local_id).cloned()
552 // Returns the type of a pattern as a monotype. Like @expr_ty, this function
553 // doesn't provide type parameter substitutions.
554 pub fn pat_ty(&self, pat: &hir::Pat) -> Ty<'tcx> {
555 self.node_id_to_type(pat.hir_id)
558 pub fn pat_ty_opt(&self, pat: &hir::Pat) -> Option<Ty<'tcx>> {
559 self.node_id_to_type_opt(pat.hir_id)
562 // Returns the type of an expression as a monotype.
564 // NB (1): This is the PRE-ADJUSTMENT TYPE for the expression. That is, in
565 // some cases, we insert `Adjustment` annotations such as auto-deref or
566 // auto-ref. The type returned by this function does not consider such
567 // adjustments. See `expr_ty_adjusted()` instead.
569 // NB (2): This type doesn't provide type parameter substitutions; e.g., if you
570 // ask for the type of "id" in "id(3)", it will return "fn(&isize) -> isize"
571 // instead of "fn(ty) -> T with T = isize".
572 pub fn expr_ty(&self, expr: &hir::Expr) -> Ty<'tcx> {
573 self.node_id_to_type(expr.hir_id)
576 pub fn expr_ty_opt(&self, expr: &hir::Expr) -> Option<Ty<'tcx>> {
577 self.node_id_to_type_opt(expr.hir_id)
580 pub fn adjustments(&self) -> LocalTableInContext<'_, Vec<ty::adjustment::Adjustment<'tcx>>> {
581 LocalTableInContext {
582 local_id_root: self.local_id_root,
583 data: &self.adjustments
587 pub fn adjustments_mut(&mut self)
588 -> LocalTableInContextMut<'_, Vec<ty::adjustment::Adjustment<'tcx>>> {
589 LocalTableInContextMut {
590 local_id_root: self.local_id_root,
591 data: &mut self.adjustments
595 pub fn expr_adjustments(&self, expr: &hir::Expr)
596 -> &[ty::adjustment::Adjustment<'tcx>] {
597 validate_hir_id_for_typeck_tables(self.local_id_root, expr.hir_id, false);
598 self.adjustments.get(&expr.hir_id.local_id).map_or(&[], |a| &a[..])
601 /// Returns the type of `expr`, considering any `Adjustment`
602 /// entry recorded for that expression.
603 pub fn expr_ty_adjusted(&self, expr: &hir::Expr) -> Ty<'tcx> {
604 self.expr_adjustments(expr)
606 .map_or_else(|| self.expr_ty(expr), |adj| adj.target)
609 pub fn expr_ty_adjusted_opt(&self, expr: &hir::Expr) -> Option<Ty<'tcx>> {
610 self.expr_adjustments(expr)
612 .map(|adj| adj.target)
613 .or_else(|| self.expr_ty_opt(expr))
616 pub fn is_method_call(&self, expr: &hir::Expr) -> bool {
617 // Only paths and method calls/overloaded operators have
618 // entries in type_dependent_defs, ignore the former here.
619 if let hir::ExprKind::Path(_) = expr.node {
623 match self.type_dependent_defs().get(expr.hir_id) {
624 Some(&Def::Method(_)) => true,
629 pub fn pat_binding_modes(&self) -> LocalTableInContext<'_, BindingMode> {
630 LocalTableInContext {
631 local_id_root: self.local_id_root,
632 data: &self.pat_binding_modes
636 pub fn pat_binding_modes_mut(&mut self)
637 -> LocalTableInContextMut<'_, BindingMode> {
638 LocalTableInContextMut {
639 local_id_root: self.local_id_root,
640 data: &mut self.pat_binding_modes
644 pub fn pat_adjustments(&self) -> LocalTableInContext<'_, Vec<Ty<'tcx>>> {
645 LocalTableInContext {
646 local_id_root: self.local_id_root,
647 data: &self.pat_adjustments,
651 pub fn pat_adjustments_mut(&mut self)
652 -> LocalTableInContextMut<'_, Vec<Ty<'tcx>>> {
653 LocalTableInContextMut {
654 local_id_root: self.local_id_root,
655 data: &mut self.pat_adjustments,
659 pub fn upvar_capture(&self, upvar_id: ty::UpvarId) -> ty::UpvarCapture<'tcx> {
660 self.upvar_capture_map[&upvar_id]
663 pub fn closure_kind_origins(&self) -> LocalTableInContext<'_, (Span, ast::Name)> {
664 LocalTableInContext {
665 local_id_root: self.local_id_root,
666 data: &self.closure_kind_origins
670 pub fn closure_kind_origins_mut(&mut self) -> LocalTableInContextMut<'_, (Span, ast::Name)> {
671 LocalTableInContextMut {
672 local_id_root: self.local_id_root,
673 data: &mut self.closure_kind_origins
677 pub fn liberated_fn_sigs(&self) -> LocalTableInContext<'_, ty::FnSig<'tcx>> {
678 LocalTableInContext {
679 local_id_root: self.local_id_root,
680 data: &self.liberated_fn_sigs
684 pub fn liberated_fn_sigs_mut(&mut self) -> LocalTableInContextMut<'_, ty::FnSig<'tcx>> {
685 LocalTableInContextMut {
686 local_id_root: self.local_id_root,
687 data: &mut self.liberated_fn_sigs
691 pub fn fru_field_types(&self) -> LocalTableInContext<'_, Vec<Ty<'tcx>>> {
692 LocalTableInContext {
693 local_id_root: self.local_id_root,
694 data: &self.fru_field_types
698 pub fn fru_field_types_mut(&mut self) -> LocalTableInContextMut<'_, Vec<Ty<'tcx>>> {
699 LocalTableInContextMut {
700 local_id_root: self.local_id_root,
701 data: &mut self.fru_field_types
705 pub fn cast_kinds(&self) -> LocalTableInContext<'_, ty::cast::CastKind> {
706 LocalTableInContext {
707 local_id_root: self.local_id_root,
708 data: &self.cast_kinds
712 pub fn cast_kinds_mut(&mut self) -> LocalTableInContextMut<'_, ty::cast::CastKind> {
713 LocalTableInContextMut {
714 local_id_root: self.local_id_root,
715 data: &mut self.cast_kinds
720 impl<'a, 'gcx> HashStable<StableHashingContext<'a>> for TypeckTables<'gcx> {
721 fn hash_stable<W: StableHasherResult>(&self,
722 hcx: &mut StableHashingContext<'a>,
723 hasher: &mut StableHasher<W>) {
724 let ty::TypeckTables {
726 ref type_dependent_defs,
728 ref user_provided_types,
729 ref user_provided_sigs,
733 ref pat_binding_modes,
735 ref upvar_capture_map,
736 ref closure_kind_origins,
737 ref liberated_fn_sigs,
742 ref used_trait_imports,
745 ref concrete_existential_types,
748 hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| {
749 type_dependent_defs.hash_stable(hcx, hasher);
750 field_indices.hash_stable(hcx, hasher);
751 user_provided_types.hash_stable(hcx, hasher);
752 user_provided_sigs.hash_stable(hcx, hasher);
753 node_types.hash_stable(hcx, hasher);
754 node_substs.hash_stable(hcx, hasher);
755 adjustments.hash_stable(hcx, hasher);
756 pat_binding_modes.hash_stable(hcx, hasher);
757 pat_adjustments.hash_stable(hcx, hasher);
758 hash_stable_hashmap(hcx, hasher, upvar_capture_map, |up_var_id, hcx| {
765 local_id_root.expect("trying to hash invalid TypeckTables");
767 let var_owner_def_id = DefId {
768 krate: local_id_root.krate,
769 index: var_path.hir_id.owner,
771 let closure_def_id = DefId {
772 krate: local_id_root.krate,
773 index: closure_expr_id.to_def_id().index,
775 (hcx.def_path_hash(var_owner_def_id),
776 var_path.hir_id.local_id,
777 hcx.def_path_hash(closure_def_id))
780 closure_kind_origins.hash_stable(hcx, hasher);
781 liberated_fn_sigs.hash_stable(hcx, hasher);
782 fru_field_types.hash_stable(hcx, hasher);
783 cast_kinds.hash_stable(hcx, hasher);
784 used_trait_imports.hash_stable(hcx, hasher);
785 tainted_by_errors.hash_stable(hcx, hasher);
786 free_region_map.hash_stable(hcx, hasher);
787 concrete_existential_types.hash_stable(hcx, hasher);
793 pub struct UserTypeAnnotationIndex {
794 DEBUG_FORMAT = "UserTypeAnnotation({})",
795 const START_INDEX = 0,
799 /// Mapping of type annotation indices to canonical user type annotations.
800 pub type CanonicalUserTypeAnnotations<'tcx> =
801 IndexVec<UserTypeAnnotationIndex, (Span, CanonicalUserTypeAnnotation<'tcx>)>;
803 /// Canonicalized user type annotation.
804 pub type CanonicalUserTypeAnnotation<'gcx> = Canonical<'gcx, UserTypeAnnotation<'gcx>>;
806 impl CanonicalUserTypeAnnotation<'gcx> {
807 /// Returns `true` if this represents a substitution of the form `[?0, ?1, ?2]`,
808 /// i.e. each thing is mapped to a canonical variable with the same index.
809 pub fn is_identity(&self) -> bool {
811 UserTypeAnnotation::Ty(_) => false,
812 UserTypeAnnotation::TypeOf(_, user_substs) => {
813 if user_substs.user_self_ty.is_some() {
817 user_substs.substs.iter().zip(BoundVar::new(0)..).all(|(kind, cvar)| {
818 match kind.unpack() {
819 UnpackedKind::Type(ty) => match ty.sty {
820 ty::Bound(debruijn, b) => {
821 // We only allow a `ty::INNERMOST` index in substitutions.
822 assert_eq!(debruijn, ty::INNERMOST);
828 UnpackedKind::Lifetime(r) => match r {
829 ty::ReLateBound(debruijn, br) => {
830 // We only allow a `ty::INNERMOST` index in substitutions.
831 assert_eq!(*debruijn, ty::INNERMOST);
832 cvar == br.assert_bound_var()
843 /// A user-given type annotation attached to a constant. These arise
844 /// from constants that are named via paths, like `Foo::<A>::new` and
846 #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
847 pub enum UserTypeAnnotation<'tcx> {
850 /// The canonical type is the result of `type_of(def_id)` with the
851 /// given substitutions applied.
852 TypeOf(DefId, UserSubsts<'tcx>),
855 EnumTypeFoldableImpl! {
856 impl<'tcx> TypeFoldable<'tcx> for UserTypeAnnotation<'tcx> {
857 (UserTypeAnnotation::Ty)(ty),
858 (UserTypeAnnotation::TypeOf)(def, substs),
863 impl<'a, 'tcx> Lift<'tcx> for UserTypeAnnotation<'a> {
864 type Lifted = UserTypeAnnotation<'tcx>;
865 (UserTypeAnnotation::Ty)(ty),
866 (UserTypeAnnotation::TypeOf)(def, substs),
870 impl<'tcx> CommonTypes<'tcx> {
871 fn new(interners: &CtxtInterners<'tcx>) -> CommonTypes<'tcx> {
872 let mk = |sty| CtxtInterners::intern_ty(interners, interners, sty);
873 let mk_region = |r| {
874 interners.region.borrow_mut().intern(r, |r| {
875 Interned(interners.arena.alloc(r))
880 unit: mk(Tuple(List::empty())),
885 isize: mk(Int(ast::IntTy::Isize)),
886 i8: mk(Int(ast::IntTy::I8)),
887 i16: mk(Int(ast::IntTy::I16)),
888 i32: mk(Int(ast::IntTy::I32)),
889 i64: mk(Int(ast::IntTy::I64)),
890 i128: mk(Int(ast::IntTy::I128)),
891 usize: mk(Uint(ast::UintTy::Usize)),
892 u8: mk(Uint(ast::UintTy::U8)),
893 u16: mk(Uint(ast::UintTy::U16)),
894 u32: mk(Uint(ast::UintTy::U32)),
895 u64: mk(Uint(ast::UintTy::U64)),
896 u128: mk(Uint(ast::UintTy::U128)),
897 f32: mk(Float(ast::FloatTy::F32)),
898 f64: mk(Float(ast::FloatTy::F64)),
900 re_empty: mk_region(RegionKind::ReEmpty),
901 re_static: mk_region(RegionKind::ReStatic),
902 re_erased: mk_region(RegionKind::ReErased),
907 // This struct contains information regarding the `ReFree(FreeRegion)` corresponding to a lifetime
910 pub struct FreeRegionInfo {
911 // def id corresponding to FreeRegion
913 // the bound region corresponding to FreeRegion
914 pub boundregion: ty::BoundRegion,
915 // checks if bound region is in Impl Item
916 pub is_impl_item: bool,
919 /// The central data structure of the compiler. It stores references
920 /// to the various **arenas** and also houses the results of the
921 /// various **compiler queries** that have been performed. See the
922 /// [rustc guide] for more details.
924 /// [rustc guide]: https://rust-lang.github.io/rustc-guide/ty.html
925 #[derive(Copy, Clone)]
926 pub struct TyCtxt<'a, 'gcx: 'tcx, 'tcx: 'a> {
927 gcx: &'gcx GlobalCtxt<'gcx>,
928 interners: &'tcx CtxtInterners<'tcx>,
929 dummy: PhantomData<&'a ()>,
932 impl<'gcx> Deref for TyCtxt<'_, 'gcx, '_> {
933 type Target = &'gcx GlobalCtxt<'gcx>;
935 fn deref(&self) -> &Self::Target {
940 pub struct GlobalCtxt<'tcx> {
941 global_arenas: &'tcx WorkerLocal<GlobalArenas<'tcx>>,
942 global_interners: CtxtInterners<'tcx>,
944 cstore: &'tcx CrateStoreDyn,
946 pub sess: &'tcx Session,
948 pub dep_graph: DepGraph,
950 /// Common types, pre-interned for your convenience.
951 pub types: CommonTypes<'tcx>,
953 /// Map indicating what traits are in scope for places where this
954 /// is relevant; generated by resolve.
955 trait_map: FxHashMap<DefIndex,
956 Lrc<FxHashMap<ItemLocalId,
957 Lrc<StableVec<TraitCandidate>>>>>,
959 /// Export map produced by name resolution.
960 export_map: FxHashMap<DefId, Lrc<Vec<Export>>>,
962 hir_map: hir_map::Map<'tcx>,
964 /// A map from DefPathHash -> DefId. Includes DefIds from the local crate
965 /// as well as all upstream crates. Only populated in incremental mode.
966 pub def_path_hash_to_def_id: Option<FxHashMap<DefPathHash, DefId>>,
968 pub queries: query::Queries<'tcx>,
970 // Records the free variables referenced by every closure
971 // expression. Do not track deps for this, just recompute it from
972 // scratch every time.
973 freevars: FxHashMap<DefId, Lrc<Vec<hir::Freevar>>>,
975 maybe_unused_trait_imports: FxHashSet<DefId>,
976 maybe_unused_extern_crates: Vec<(DefId, Span)>,
977 /// Extern prelude entries. The value is `true` if the entry was introduced
978 /// via `extern crate` item and not `--extern` option or compiler built-in.
979 pub extern_prelude: FxHashMap<ast::Name, bool>,
981 // Internal cache for metadata decoding. No need to track deps on this.
982 pub rcache: Lock<FxHashMap<ty::CReaderCacheKey, Ty<'tcx>>>,
984 /// Caches the results of trait selection. This cache is used
985 /// for things that do not have to do with the parameters in scope.
986 pub selection_cache: traits::SelectionCache<'tcx>,
988 /// Caches the results of trait evaluation. This cache is used
989 /// for things that do not have to do with the parameters in scope.
990 /// Merge this with `selection_cache`?
991 pub evaluation_cache: traits::EvaluationCache<'tcx>,
993 /// The definite name of the current crate after taking into account
994 /// attributes, commandline parameters, etc.
995 pub crate_name: Symbol,
997 /// Data layout specification for the current target.
998 pub data_layout: TargetDataLayout,
1000 stability_interner: Lock<FxHashMap<&'tcx attr::Stability, ()>>,
1002 /// Stores the value of constants (and deduplicates the actual memory)
1003 allocation_interner: Lock<FxHashMap<&'tcx Allocation, ()>>,
1005 pub alloc_map: Lock<interpret::AllocMap<'tcx>>,
1007 layout_interner: Lock<FxHashMap<&'tcx LayoutDetails, ()>>,
1009 /// A general purpose channel to throw data out the back towards LLVM worker
1012 /// This is intended to only get used during the codegen phase of the compiler
1013 /// when satisfying the query for a particular codegen unit. Internally in
1014 /// the query it'll send data along this channel to get processed later.
1015 pub tx_to_llvm_workers: Lock<mpsc::Sender<Box<dyn Any + Send>>>,
1017 output_filenames: Arc<OutputFilenames>,
1020 impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
1021 /// Get the global TyCtxt.
1023 pub fn global_tcx(self) -> TyCtxt<'gcx, 'gcx, 'gcx> {
1026 interners: &self.gcx.global_interners,
1032 pub fn hir(self) -> &'a hir_map::Map<'gcx> {
1036 pub fn alloc_generics(self, generics: ty::Generics) -> &'gcx ty::Generics {
1037 self.global_arenas.generics.alloc(generics)
1040 pub fn alloc_steal_mir(self, mir: Mir<'gcx>) -> &'gcx Steal<Mir<'gcx>> {
1041 self.global_arenas.steal_mir.alloc(Steal::new(mir))
1044 pub fn alloc_mir(self, mir: Mir<'gcx>) -> &'gcx Mir<'gcx> {
1045 self.global_arenas.mir.alloc(mir)
1048 pub fn alloc_tables(self, tables: ty::TypeckTables<'gcx>) -> &'gcx ty::TypeckTables<'gcx> {
1049 self.global_arenas.tables.alloc(tables)
1052 pub fn alloc_trait_def(self, def: ty::TraitDef) -> &'gcx ty::TraitDef {
1053 self.global_arenas.trait_def.alloc(def)
1056 pub fn alloc_adt_def(self,
1059 variants: IndexVec<VariantIdx, ty::VariantDef>,
1061 -> &'gcx ty::AdtDef {
1062 let def = ty::AdtDef::new(self, did, kind, variants, repr);
1063 self.global_arenas.adt_def.alloc(def)
1066 pub fn alloc_byte_array(self, bytes: &[u8]) -> &'gcx [u8] {
1067 if bytes.is_empty() {
1070 self.global_interners.arena.alloc_slice(bytes)
1074 pub fn alloc_const_slice(self, values: &[&'tcx ty::Const<'tcx>])
1075 -> &'tcx [&'tcx ty::Const<'tcx>] {
1076 if values.is_empty() {
1079 self.interners.arena.alloc_slice(values)
1083 pub fn alloc_name_const_slice(self, values: &[(ast::Name, &'tcx ty::Const<'tcx>)])
1084 -> &'tcx [(ast::Name, &'tcx ty::Const<'tcx>)] {
1085 if values.is_empty() {
1088 self.interners.arena.alloc_slice(values)
1092 pub fn intern_const_alloc(
1095 ) -> &'gcx Allocation {
1096 self.allocation_interner.borrow_mut().intern(alloc, |alloc| {
1097 self.global_arenas.const_allocs.alloc(alloc)
1101 /// Allocates a byte or string literal for `mir::interpret`, read-only
1102 pub fn allocate_bytes(self, bytes: &[u8]) -> interpret::AllocId {
1103 // create an allocation that just contains these bytes
1104 let alloc = interpret::Allocation::from_byte_aligned_bytes(bytes, ());
1105 let alloc = self.intern_const_alloc(alloc);
1106 self.alloc_map.lock().allocate(alloc)
1109 pub fn intern_stability(self, stab: attr::Stability) -> &'gcx attr::Stability {
1110 self.stability_interner.borrow_mut().intern(stab, |stab| {
1111 self.global_interners.arena.alloc(stab)
1115 pub fn intern_lazy_const(self, c: ty::LazyConst<'tcx>) -> &'tcx ty::LazyConst<'tcx> {
1116 self.global_interners.arena.alloc(c)
1119 pub fn intern_layout(self, layout: LayoutDetails) -> &'gcx LayoutDetails {
1120 self.layout_interner.borrow_mut().intern(layout, |layout| {
1121 self.global_arenas.layout.alloc(layout)
1125 /// Returns a range of the start/end indices specified with the
1126 /// `rustc_layout_scalar_valid_range` attribute.
1127 pub fn layout_scalar_valid_range(self, def_id: DefId) -> (Bound<u128>, Bound<u128>) {
1128 let attrs = self.get_attrs(def_id);
1130 let attr = match attrs.iter().find(|a| a.check_name(name)) {
1132 None => return Bound::Unbounded,
1134 for meta in attr.meta_item_list().expect("rustc_layout_scalar_valid_range takes args") {
1135 match meta.literal().expect("attribute takes lit").node {
1136 ast::LitKind::Int(a, _) => return Bound::Included(a),
1137 _ => span_bug!(attr.span, "rustc_layout_scalar_valid_range expects int arg"),
1140 span_bug!(attr.span, "no arguments to `rustc_layout_scalar_valid_range` attribute");
1142 (get("rustc_layout_scalar_valid_range_start"), get("rustc_layout_scalar_valid_range_end"))
1145 pub fn lift<T: ?Sized + Lift<'tcx>>(self, value: &T) -> Option<T::Lifted> {
1146 value.lift_to_tcx(self)
1149 /// Like lift, but only tries in the global tcx.
1150 pub fn lift_to_global<T: ?Sized + Lift<'gcx>>(self, value: &T) -> Option<T::Lifted> {
1151 value.lift_to_tcx(self.global_tcx())
1154 /// Returns true if self is the same as self.global_tcx().
1155 fn is_global(self) -> bool {
1156 let local = self.interners as *const _;
1157 let global = &self.global_interners as *const _;
1158 local as usize == global as usize
1161 /// Create a type context and call the closure with a `TyCtxt` reference
1162 /// to the context. The closure enforces that the type context and any interned
1163 /// value (types, substs, etc.) can only be used while `ty::tls` has a valid
1164 /// reference to the context, to allow formatting values that need it.
1165 pub fn create_and_enter<F, R>(s: &'tcx Session,
1166 cstore: &'tcx CrateStoreDyn,
1167 local_providers: ty::query::Providers<'tcx>,
1168 extern_providers: ty::query::Providers<'tcx>,
1169 arenas: &'tcx mut AllArenas<'tcx>,
1170 resolutions: ty::Resolutions,
1171 hir: hir_map::Map<'tcx>,
1172 on_disk_query_result_cache: query::OnDiskCache<'tcx>,
1174 tx: mpsc::Sender<Box<dyn Any + Send>>,
1175 output_filenames: &OutputFilenames,
1177 where F: for<'b> FnOnce(TyCtxt<'b, 'tcx, 'tcx>) -> R
1179 let data_layout = TargetDataLayout::parse(&s.target.target).unwrap_or_else(|err| {
1182 let interners = CtxtInterners::new(&arenas.interner);
1183 let common_types = CommonTypes::new(&interners);
1184 let dep_graph = hir.dep_graph.clone();
1185 let max_cnum = cstore.crates_untracked().iter().map(|c| c.as_usize()).max().unwrap_or(0);
1186 let mut providers = IndexVec::from_elem_n(extern_providers, max_cnum + 1);
1187 providers[LOCAL_CRATE] = local_providers;
1189 let def_path_hash_to_def_id = if s.opts.build_dep_graph() {
1190 let upstream_def_path_tables: Vec<(CrateNum, Lrc<_>)> = cstore
1193 .map(|&cnum| (cnum, cstore.def_path_table(cnum)))
1196 let def_path_tables = || {
1197 upstream_def_path_tables
1199 .map(|&(cnum, ref rc)| (cnum, &**rc))
1200 .chain(iter::once((LOCAL_CRATE, hir.definitions().def_path_table())))
1203 // Precompute the capacity of the hashmap so we don't have to
1204 // re-allocate when populating it.
1205 let capacity = def_path_tables().map(|(_, t)| t.size()).sum::<usize>();
1207 let mut map: FxHashMap<_, _> = FxHashMap::with_capacity_and_hasher(
1209 ::std::default::Default::default()
1212 for (cnum, def_path_table) in def_path_tables() {
1213 def_path_table.add_def_path_hashes_to(cnum, &mut map);
1221 let mut trait_map: FxHashMap<_, Lrc<FxHashMap<_, _>>> = FxHashMap::default();
1222 for (k, v) in resolutions.trait_map {
1223 let hir_id = hir.node_to_hir_id(k);
1224 let map = trait_map.entry(hir_id.owner).or_default();
1225 Lrc::get_mut(map).unwrap()
1226 .insert(hir_id.local_id,
1227 Lrc::new(StableVec::new(v)));
1230 arenas.global_ctxt = Some(GlobalCtxt {
1233 global_arenas: &arenas.global,
1234 global_interners: interners,
1236 types: common_types,
1238 export_map: resolutions.export_map.into_iter().map(|(k, v)| {
1241 freevars: resolutions.freevars.into_iter().map(|(k, v)| {
1242 (hir.local_def_id(k), Lrc::new(v))
1244 maybe_unused_trait_imports:
1245 resolutions.maybe_unused_trait_imports
1247 .map(|id| hir.local_def_id(id))
1249 maybe_unused_extern_crates:
1250 resolutions.maybe_unused_extern_crates
1252 .map(|(id, sp)| (hir.local_def_id(id), sp))
1254 extern_prelude: resolutions.extern_prelude,
1256 def_path_hash_to_def_id,
1257 queries: query::Queries::new(
1260 on_disk_query_result_cache,
1262 rcache: Default::default(),
1263 selection_cache: Default::default(),
1264 evaluation_cache: Default::default(),
1265 crate_name: Symbol::intern(crate_name),
1267 layout_interner: Default::default(),
1268 stability_interner: Default::default(),
1269 allocation_interner: Default::default(),
1270 alloc_map: Lock::new(interpret::AllocMap::new()),
1271 tx_to_llvm_workers: Lock::new(tx),
1272 output_filenames: Arc::new(output_filenames.clone()),
1275 let gcx = arenas.global_ctxt.as_ref().unwrap();
1277 sync::assert_send_val(&gcx);
1279 tls::enter_global(gcx, f)
1282 pub fn consider_optimizing<T: Fn() -> String>(&self, msg: T) -> bool {
1283 let cname = self.crate_name(LOCAL_CRATE).as_str();
1284 self.sess.consider_optimizing(&cname, msg)
1287 pub fn lib_features(self) -> Lrc<middle::lib_features::LibFeatures> {
1288 self.get_lib_features(LOCAL_CRATE)
1291 pub fn lang_items(self) -> Lrc<middle::lang_items::LanguageItems> {
1292 self.get_lang_items(LOCAL_CRATE)
1295 /// Due to missing llvm support for lowering 128 bit math to software emulation
1296 /// (on some targets), the lowering can be done in MIR.
1298 /// This function only exists until said support is implemented.
1299 pub fn is_binop_lang_item(&self, def_id: DefId) -> Option<(mir::BinOp, bool)> {
1300 let items = self.lang_items();
1301 let def_id = Some(def_id);
1302 if items.i128_add_fn() == def_id { Some((mir::BinOp::Add, false)) }
1303 else if items.u128_add_fn() == def_id { Some((mir::BinOp::Add, false)) }
1304 else if items.i128_sub_fn() == def_id { Some((mir::BinOp::Sub, false)) }
1305 else if items.u128_sub_fn() == def_id { Some((mir::BinOp::Sub, false)) }
1306 else if items.i128_mul_fn() == def_id { Some((mir::BinOp::Mul, false)) }
1307 else if items.u128_mul_fn() == def_id { Some((mir::BinOp::Mul, false)) }
1308 else if items.i128_div_fn() == def_id { Some((mir::BinOp::Div, false)) }
1309 else if items.u128_div_fn() == def_id { Some((mir::BinOp::Div, false)) }
1310 else if items.i128_rem_fn() == def_id { Some((mir::BinOp::Rem, false)) }
1311 else if items.u128_rem_fn() == def_id { Some((mir::BinOp::Rem, false)) }
1312 else if items.i128_shl_fn() == def_id { Some((mir::BinOp::Shl, false)) }
1313 else if items.u128_shl_fn() == def_id { Some((mir::BinOp::Shl, false)) }
1314 else if items.i128_shr_fn() == def_id { Some((mir::BinOp::Shr, false)) }
1315 else if items.u128_shr_fn() == def_id { Some((mir::BinOp::Shr, false)) }
1316 else if items.i128_addo_fn() == def_id { Some((mir::BinOp::Add, true)) }
1317 else if items.u128_addo_fn() == def_id { Some((mir::BinOp::Add, true)) }
1318 else if items.i128_subo_fn() == def_id { Some((mir::BinOp::Sub, true)) }
1319 else if items.u128_subo_fn() == def_id { Some((mir::BinOp::Sub, true)) }
1320 else if items.i128_mulo_fn() == def_id { Some((mir::BinOp::Mul, true)) }
1321 else if items.u128_mulo_fn() == def_id { Some((mir::BinOp::Mul, true)) }
1322 else if items.i128_shlo_fn() == def_id { Some((mir::BinOp::Shl, true)) }
1323 else if items.u128_shlo_fn() == def_id { Some((mir::BinOp::Shl, true)) }
1324 else if items.i128_shro_fn() == def_id { Some((mir::BinOp::Shr, true)) }
1325 else if items.u128_shro_fn() == def_id { Some((mir::BinOp::Shr, true)) }
1329 pub fn stability(self) -> Lrc<stability::Index<'tcx>> {
1330 self.stability_index(LOCAL_CRATE)
1333 pub fn crates(self) -> Lrc<Vec<CrateNum>> {
1334 self.all_crate_nums(LOCAL_CRATE)
1337 pub fn features(self) -> Lrc<feature_gate::Features> {
1338 self.features_query(LOCAL_CRATE)
1341 pub fn def_key(self, id: DefId) -> hir_map::DefKey {
1343 self.hir().def_key(id)
1345 self.cstore.def_key(id)
1349 /// Convert a `DefId` into its fully expanded `DefPath` (every
1350 /// `DefId` is really just an interned def-path).
1352 /// Note that if `id` is not local to this crate, the result will
1353 /// be a non-local `DefPath`.
1354 pub fn def_path(self, id: DefId) -> hir_map::DefPath {
1356 self.hir().def_path(id)
1358 self.cstore.def_path(id)
1363 pub fn def_path_hash(self, def_id: DefId) -> hir_map::DefPathHash {
1364 if def_id.is_local() {
1365 self.hir().definitions().def_path_hash(def_id.index)
1367 self.cstore.def_path_hash(def_id)
1371 pub fn def_path_debug_str(self, def_id: DefId) -> String {
1372 // We are explicitly not going through queries here in order to get
1373 // crate name and disambiguator since this code is called from debug!()
1374 // statements within the query system and we'd run into endless
1375 // recursion otherwise.
1376 let (crate_name, crate_disambiguator) = if def_id.is_local() {
1377 (self.crate_name.clone(),
1378 self.sess.local_crate_disambiguator())
1380 (self.cstore.crate_name_untracked(def_id.krate),
1381 self.cstore.crate_disambiguator_untracked(def_id.krate))
1386 // Don't print the whole crate disambiguator. That's just
1387 // annoying in debug output.
1388 &(crate_disambiguator.to_fingerprint().to_hex())[..4],
1389 self.def_path(def_id).to_string_no_crate())
1392 pub fn metadata_encoding_version(self) -> Vec<u8> {
1393 self.cstore.metadata_encoding_version().to_vec()
1396 // Note that this is *untracked* and should only be used within the query
1397 // system if the result is otherwise tracked through queries
1398 pub fn crate_data_as_rc_any(self, cnum: CrateNum) -> Lrc<dyn Any> {
1399 self.cstore.crate_data_as_rc_any(cnum)
1403 pub fn create_stable_hashing_context(self) -> StableHashingContext<'a> {
1404 let krate = self.gcx.hir_map.forest.untracked_krate();
1406 StableHashingContext::new(self.sess,
1408 self.hir().definitions(),
1412 // This method makes sure that we have a DepNode and a Fingerprint for
1413 // every upstream crate. It needs to be called once right after the tcx is
1415 // With full-fledged red/green, the method will probably become unnecessary
1416 // as this will be done on-demand.
1417 pub fn allocate_metadata_dep_nodes(self) {
1418 // We cannot use the query versions of crates() and crate_hash(), since
1419 // those would need the DepNodes that we are allocating here.
1420 for cnum in self.cstore.crates_untracked() {
1421 let dep_node = DepNode::new(self, DepConstructor::CrateMetadata(cnum));
1422 let crate_hash = self.cstore.crate_hash_untracked(cnum);
1423 self.dep_graph.with_task(dep_node,
1426 |_, x| x // No transformation needed
1431 // This method exercises the `in_scope_traits_map` query for all possible
1432 // values so that we have their fingerprints available in the DepGraph.
1433 // This is only required as long as we still use the old dependency tracking
1434 // which needs to have the fingerprints of all input nodes beforehand.
1435 pub fn precompute_in_scope_traits_hashes(self) {
1436 for &def_index in self.trait_map.keys() {
1437 self.in_scope_traits_map(def_index);
1441 pub fn serialize_query_result_cache<E>(self,
1443 -> Result<(), E::Error>
1444 where E: ty::codec::TyEncoder
1446 self.queries.on_disk_cache.serialize(self.global_tcx(), encoder)
1449 /// This checks whether one is allowed to have pattern bindings
1450 /// that bind-by-move on a match arm that has a guard, e.g.:
1453 /// match foo { A(inner) if { /* something */ } => ..., ... }
1456 /// It is separate from check_for_mutation_in_guard_via_ast_walk,
1457 /// because that method has a narrower effect that can be toggled
1458 /// off via a separate `-Z` flag, at least for the short term.
1459 pub fn allow_bind_by_move_patterns_with_guards(self) -> bool {
1460 self.features().bind_by_move_pattern_guards && self.use_mir_borrowck()
1463 /// If true, we should use a naive AST walk to determine if match
1464 /// guard could perform bad mutations (or mutable-borrows).
1465 pub fn check_for_mutation_in_guard_via_ast_walk(self) -> bool {
1466 // If someone requests the feature, then be a little more
1467 // careful and ensure that MIR-borrowck is enabled (which can
1468 // happen via edition selection, via `feature(nll)`, or via an
1469 // appropriate `-Z` flag) before disabling the mutation check.
1470 if self.allow_bind_by_move_patterns_with_guards() {
1477 /// If true, we should use the AST-based borrowck (we may *also* use
1478 /// the MIR-based borrowck).
1479 pub fn use_ast_borrowck(self) -> bool {
1480 self.borrowck_mode().use_ast()
1483 /// If true, we should use the MIR-based borrowck (we may *also* use
1484 /// the AST-based borrowck).
1485 pub fn use_mir_borrowck(self) -> bool {
1486 self.borrowck_mode().use_mir()
1489 /// If true, we should use the MIR-based borrow check, but also
1490 /// fall back on the AST borrow check if the MIR-based one errors.
1491 pub fn migrate_borrowck(self) -> bool {
1492 self.borrowck_mode().migrate()
1495 /// If true, make MIR codegen for `match` emit a temp that holds a
1496 /// borrow of the input to the match expression.
1497 pub fn generate_borrow_of_any_match_input(&self) -> bool {
1498 self.emit_read_for_match()
1501 /// If true, make MIR codegen for `match` emit FakeRead
1502 /// statements (which simulate the maximal effect of executing the
1503 /// patterns in a match arm).
1504 pub fn emit_read_for_match(&self) -> bool {
1505 self.use_mir_borrowck() && !self.sess.opts.debugging_opts.nll_dont_emit_read_for_match
1508 /// If true, pattern variables for use in guards on match arms
1509 /// will be bound as references to the data, and occurrences of
1510 /// those variables in the guard expression will implicitly
1511 /// dereference those bindings. (See rust-lang/rust#27282.)
1512 pub fn all_pat_vars_are_implicit_refs_within_guards(self) -> bool {
1513 self.borrowck_mode().use_mir()
1516 /// If true, we should enable two-phase borrows checks. This is
1517 /// done with either: `-Ztwo-phase-borrows`, `#![feature(nll)]`,
1518 /// or by opting into an edition after 2015.
1519 pub fn two_phase_borrows(self) -> bool {
1520 self.sess.rust_2018() || self.features().nll ||
1521 self.sess.opts.debugging_opts.two_phase_borrows
1524 /// What mode(s) of borrowck should we run? AST? MIR? both?
1525 /// (Also considers the `#![feature(nll)]` setting.)
1526 pub fn borrowck_mode(&self) -> BorrowckMode {
1527 // Here are the main constraints we need to deal with:
1529 // 1. An opts.borrowck_mode of `BorrowckMode::Ast` is
1530 // synonymous with no `-Z borrowck=...` flag at all.
1531 // (This is arguably a historical accident.)
1533 // 2. `BorrowckMode::Migrate` is the limited migration to
1534 // NLL that we are deploying with the 2018 edition.
1536 // 3. We want to allow developers on the Nightly channel
1537 // to opt back into the "hard error" mode for NLL,
1538 // (which they can do via specifying `#![feature(nll)]`
1539 // explicitly in their crate).
1541 // So, this precedence list is how pnkfelix chose to work with
1542 // the above constraints:
1544 // * `#![feature(nll)]` *always* means use NLL with hard
1545 // errors. (To simplify the code here, it now even overrides
1546 // a user's attempt to specify `-Z borrowck=compare`, which
1547 // we arguably do not need anymore and should remove.)
1549 // * Otherwise, if no `-Z borrowck=...` flag was given (or
1550 // if `borrowck=ast` was specified), then use the default
1551 // as required by the edition.
1553 // * Otherwise, use the behavior requested via `-Z borrowck=...`
1555 if self.features().nll { return BorrowckMode::Mir; }
1557 match self.sess.opts.borrowck_mode {
1558 mode @ BorrowckMode::Mir |
1559 mode @ BorrowckMode::Compare |
1560 mode @ BorrowckMode::Migrate => mode,
1562 BorrowckMode::Ast => match self.sess.edition() {
1563 Edition::Edition2015 => BorrowckMode::Ast,
1564 Edition::Edition2018 => BorrowckMode::Migrate,
1570 pub fn local_crate_exports_generics(self) -> bool {
1571 debug_assert!(self.sess.opts.share_generics());
1573 self.sess.crate_types.borrow().iter().any(|crate_type| {
1575 CrateType::Executable |
1576 CrateType::Staticlib |
1577 CrateType::ProcMacro |
1578 CrateType::Cdylib => false,
1580 CrateType::Dylib => true,
1585 // This method returns the DefId and the BoundRegion corresponding to the given region.
1586 pub fn is_suitable_region(&self, region: Region<'tcx>) -> Option<FreeRegionInfo> {
1587 let (suitable_region_binding_scope, bound_region) = match *region {
1588 ty::ReFree(ref free_region) => (free_region.scope, free_region.bound_region),
1589 ty::ReEarlyBound(ref ebr) => (
1590 self.parent_def_id(ebr.def_id).unwrap(),
1591 ty::BoundRegion::BrNamed(ebr.def_id, ebr.name),
1593 _ => return None, // not a free region
1596 let node_id = self.hir()
1597 .as_local_node_id(suitable_region_binding_scope)
1599 let is_impl_item = match self.hir().find(node_id) {
1600 Some(Node::Item(..)) | Some(Node::TraitItem(..)) => false,
1601 Some(Node::ImplItem(..)) => {
1602 self.is_bound_region_in_impl_item(suitable_region_binding_scope)
1607 return Some(FreeRegionInfo {
1608 def_id: suitable_region_binding_scope,
1609 boundregion: bound_region,
1610 is_impl_item: is_impl_item,
1614 pub fn return_type_impl_trait(
1616 scope_def_id: DefId,
1617 ) -> Option<Ty<'tcx>> {
1618 // HACK: `type_of_def_id()` will fail on these (#55796), so return None
1619 let node_id = self.hir().as_local_node_id(scope_def_id).unwrap();
1620 match self.hir().get(node_id) {
1621 Node::Item(item) => {
1623 ItemKind::Fn(..) => { /* type_of_def_id() will work */ }
1629 _ => { /* type_of_def_id() will work or panic */ }
1632 let ret_ty = self.type_of(scope_def_id);
1634 ty::FnDef(_, _) => {
1635 let sig = ret_ty.fn_sig(*self);
1636 let output = self.erase_late_bound_regions(&sig.output());
1637 if output.is_impl_trait() {
1647 // Here we check if the bound region is in Impl Item.
1648 pub fn is_bound_region_in_impl_item(
1650 suitable_region_binding_scope: DefId,
1652 let container_id = self.associated_item(suitable_region_binding_scope)
1655 if self.impl_trait_ref(container_id).is_some() {
1656 // For now, we do not try to target impls of traits. This is
1657 // because this message is going to suggest that the user
1658 // change the fn signature, but they may not be free to do so,
1659 // since the signature must match the trait.
1661 // FIXME(#42706) -- in some cases, we could do better here.
1668 impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
1669 pub fn encode_metadata(self)
1672 self.cstore.encode_metadata(self)
1676 impl<'gcx> GlobalCtxt<'gcx> {
1677 /// Call the closure with a local `TyCtxt` using the given arena.
1678 /// `interners` is a slot passed so we can create a CtxtInterners
1679 /// with the same lifetime as `arena`.
1680 pub fn enter_local<'tcx, F, R>(
1682 arena: &'tcx SyncDroplessArena,
1683 interners: &'tcx mut Option<CtxtInterners<'tcx>>,
1687 F: FnOnce(TyCtxt<'tcx, 'gcx, 'tcx>) -> R,
1690 *interners = Some(CtxtInterners::new(&arena));
1693 interners: interners.as_ref().unwrap(),
1696 ty::tls::with_related_context(tcx.global_tcx(), |icx| {
1697 let new_icx = ty::tls::ImplicitCtxt {
1699 query: icx.query.clone(),
1700 layout_depth: icx.layout_depth,
1703 ty::tls::enter_context(&new_icx, |_| {
1710 /// A trait implemented for all X<'a> types which can be safely and
1711 /// efficiently converted to X<'tcx> as long as they are part of the
1712 /// provided TyCtxt<'tcx>.
1713 /// This can be done, for example, for Ty<'tcx> or &'tcx Substs<'tcx>
1714 /// by looking them up in their respective interners.
1716 /// However, this is still not the best implementation as it does
1717 /// need to compare the components, even for interned values.
1718 /// It would be more efficient if TypedArena provided a way to
1719 /// determine whether the address is in the allocated range.
1721 /// None is returned if the value or one of the components is not part
1722 /// of the provided context.
1723 /// For Ty, None can be returned if either the type interner doesn't
1724 /// contain the TyKind key or if the address of the interned
1725 /// pointer differs. The latter case is possible if a primitive type,
1726 /// e.g., `()` or `u8`, was interned in a different context.
1727 pub trait Lift<'tcx>: fmt::Debug {
1728 type Lifted: fmt::Debug + 'tcx;
1729 fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option<Self::Lifted>;
1732 impl<'a, 'tcx> Lift<'tcx> for Ty<'a> {
1733 type Lifted = Ty<'tcx>;
1734 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Ty<'tcx>> {
1735 if tcx.interners.arena.in_arena(*self as *const _) {
1736 return Some(unsafe { mem::transmute(*self) });
1738 // Also try in the global tcx if we're not that.
1739 if !tcx.is_global() {
1740 self.lift_to_tcx(tcx.global_tcx())
1747 impl<'a, 'tcx> Lift<'tcx> for Region<'a> {
1748 type Lifted = Region<'tcx>;
1749 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Region<'tcx>> {
1750 if tcx.interners.arena.in_arena(*self as *const _) {
1751 return Some(unsafe { mem::transmute(*self) });
1753 // Also try in the global tcx if we're not that.
1754 if !tcx.is_global() {
1755 self.lift_to_tcx(tcx.global_tcx())
1762 impl<'a, 'tcx> Lift<'tcx> for Goal<'a> {
1763 type Lifted = Goal<'tcx>;
1764 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Goal<'tcx>> {
1765 if tcx.interners.arena.in_arena(*self as *const _) {
1766 return Some(unsafe { mem::transmute(*self) });
1768 // Also try in the global tcx if we're not that.
1769 if !tcx.is_global() {
1770 self.lift_to_tcx(tcx.global_tcx())
1777 impl<'a, 'tcx> Lift<'tcx> for &'a List<Goal<'a>> {
1778 type Lifted = &'tcx List<Goal<'tcx>>;
1779 fn lift_to_tcx<'b, 'gcx>(
1781 tcx: TyCtxt<'b, 'gcx, 'tcx>,
1782 ) -> Option<&'tcx List<Goal<'tcx>>> {
1783 if self.is_empty() {
1784 return Some(List::empty());
1787 if tcx.interners.arena.in_arena(*self as *const _) {
1788 return Some(unsafe { mem::transmute(*self) });
1790 // Also try in the global tcx if we're not that.
1791 if !tcx.is_global() {
1792 self.lift_to_tcx(tcx.global_tcx())
1799 impl<'a, 'tcx> Lift<'tcx> for &'a List<Clause<'a>> {
1800 type Lifted = &'tcx List<Clause<'tcx>>;
1801 fn lift_to_tcx<'b, 'gcx>(
1803 tcx: TyCtxt<'b, 'gcx, 'tcx>,
1804 ) -> Option<&'tcx List<Clause<'tcx>>> {
1805 if self.is_empty() {
1806 return Some(List::empty());
1809 if tcx.interners.arena.in_arena(*self as *const _) {
1810 return Some(unsafe { mem::transmute(*self) });
1812 // Also try in the global tcx if we're not that.
1813 if !tcx.is_global() {
1814 self.lift_to_tcx(tcx.global_tcx())
1821 impl<'a, 'tcx> Lift<'tcx> for &'a LazyConst<'a> {
1822 type Lifted = &'tcx LazyConst<'tcx>;
1823 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<&'tcx LazyConst<'tcx>> {
1824 if tcx.interners.arena.in_arena(*self as *const _) {
1825 return Some(unsafe { mem::transmute(*self) });
1827 // Also try in the global tcx if we're not that.
1828 if !tcx.is_global() {
1829 self.lift_to_tcx(tcx.global_tcx())
1836 impl<'a, 'tcx> Lift<'tcx> for &'a Const<'a> {
1837 type Lifted = &'tcx Const<'tcx>;
1838 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<&'tcx Const<'tcx>> {
1839 if tcx.interners.arena.in_arena(*self as *const _) {
1840 return Some(unsafe { mem::transmute(*self) });
1842 // Also try in the global tcx if we're not that.
1843 if !tcx.is_global() {
1844 self.lift_to_tcx(tcx.global_tcx())
1851 impl<'a, 'tcx> Lift<'tcx> for &'a Substs<'a> {
1852 type Lifted = &'tcx Substs<'tcx>;
1853 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<&'tcx Substs<'tcx>> {
1854 if self.len() == 0 {
1855 return Some(List::empty());
1857 if tcx.interners.arena.in_arena(&self[..] as *const _) {
1858 return Some(unsafe { mem::transmute(*self) });
1860 // Also try in the global tcx if we're not that.
1861 if !tcx.is_global() {
1862 self.lift_to_tcx(tcx.global_tcx())
1869 impl<'a, 'tcx> Lift<'tcx> for &'a List<Ty<'a>> {
1870 type Lifted = &'tcx List<Ty<'tcx>>;
1871 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>)
1872 -> Option<&'tcx List<Ty<'tcx>>> {
1873 if self.len() == 0 {
1874 return Some(List::empty());
1876 if tcx.interners.arena.in_arena(*self as *const _) {
1877 return Some(unsafe { mem::transmute(*self) });
1879 // Also try in the global tcx if we're not that.
1880 if !tcx.is_global() {
1881 self.lift_to_tcx(tcx.global_tcx())
1888 impl<'a, 'tcx> Lift<'tcx> for &'a List<ExistentialPredicate<'a>> {
1889 type Lifted = &'tcx List<ExistentialPredicate<'tcx>>;
1890 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>)
1891 -> Option<&'tcx List<ExistentialPredicate<'tcx>>> {
1892 if self.is_empty() {
1893 return Some(List::empty());
1895 if tcx.interners.arena.in_arena(*self as *const _) {
1896 return Some(unsafe { mem::transmute(*self) });
1898 // Also try in the global tcx if we're not that.
1899 if !tcx.is_global() {
1900 self.lift_to_tcx(tcx.global_tcx())
1907 impl<'a, 'tcx> Lift<'tcx> for &'a List<Predicate<'a>> {
1908 type Lifted = &'tcx List<Predicate<'tcx>>;
1909 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>)
1910 -> Option<&'tcx List<Predicate<'tcx>>> {
1911 if self.is_empty() {
1912 return Some(List::empty());
1914 if tcx.interners.arena.in_arena(*self as *const _) {
1915 return Some(unsafe { mem::transmute(*self) });
1917 // Also try in the global tcx if we're not that.
1918 if !tcx.is_global() {
1919 self.lift_to_tcx(tcx.global_tcx())
1926 impl<'a, 'tcx> Lift<'tcx> for &'a List<CanonicalVarInfo> {
1927 type Lifted = &'tcx List<CanonicalVarInfo>;
1928 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
1929 if self.len() == 0 {
1930 return Some(List::empty());
1932 if tcx.interners.arena.in_arena(*self as *const _) {
1933 return Some(unsafe { mem::transmute(*self) });
1935 // Also try in the global tcx if we're not that.
1936 if !tcx.is_global() {
1937 self.lift_to_tcx(tcx.global_tcx())
1944 impl<'a, 'tcx> Lift<'tcx> for &'a List<ProjectionKind<'a>> {
1945 type Lifted = &'tcx List<ProjectionKind<'tcx>>;
1946 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
1947 if self.len() == 0 {
1948 return Some(List::empty());
1950 if tcx.interners.arena.in_arena(*self as *const _) {
1951 return Some(unsafe { mem::transmute(*self) });
1953 // Also try in the global tcx if we're not that.
1954 if !tcx.is_global() {
1955 self.lift_to_tcx(tcx.global_tcx())
1963 use super::{GlobalCtxt, TyCtxt};
1967 use std::marker::PhantomData;
1970 use errors::{Diagnostic, TRACK_DIAGNOSTICS};
1971 use rustc_data_structures::OnDrop;
1972 use rustc_data_structures::sync::{self, Lrc, Lock};
1973 use dep_graph::OpenTask;
1975 #[cfg(not(parallel_queries))]
1976 use std::cell::Cell;
1978 #[cfg(parallel_queries)]
1981 /// This is the implicit state of rustc. It contains the current
1982 /// TyCtxt and query. It is updated when creating a local interner or
1983 /// executing a new query. Whenever there's a TyCtxt value available
1984 /// you should also have access to an ImplicitCtxt through the functions
1987 pub struct ImplicitCtxt<'a, 'gcx: 'tcx, 'tcx> {
1988 /// The current TyCtxt. Initially created by `enter_global` and updated
1989 /// by `enter_local` with a new local interner
1990 pub tcx: TyCtxt<'tcx, 'gcx, 'tcx>,
1992 /// The current query job, if any. This is updated by start_job in
1993 /// ty::query::plumbing when executing a query
1994 pub query: Option<Lrc<query::QueryJob<'gcx>>>,
1996 /// Used to prevent layout from recursing too deeply.
1997 pub layout_depth: usize,
1999 /// The current dep graph task. This is used to add dependencies to queries
2000 /// when executing them
2001 pub task: &'a OpenTask,
2004 /// Sets Rayon's thread local variable which is preserved for Rayon jobs
2005 /// to `value` during the call to `f`. It is restored to its previous value after.
2006 /// This is used to set the pointer to the new ImplicitCtxt.
2007 #[cfg(parallel_queries)]
2009 fn set_tlv<F: FnOnce() -> R, R>(value: usize, f: F) -> R {
2010 rayon_core::tlv::with(value, f)
2013 /// Gets Rayon's thread local variable which is preserved for Rayon jobs.
2014 /// This is used to get the pointer to the current ImplicitCtxt.
2015 #[cfg(parallel_queries)]
2017 fn get_tlv() -> usize {
2018 rayon_core::tlv::get()
2021 /// A thread local variable which stores a pointer to the current ImplicitCtxt
2022 #[cfg(not(parallel_queries))]
2023 thread_local!(static TLV: Cell<usize> = Cell::new(0));
2025 /// Sets TLV to `value` during the call to `f`.
2026 /// It is restored to its previous value after.
2027 /// This is used to set the pointer to the new ImplicitCtxt.
2028 #[cfg(not(parallel_queries))]
2030 fn set_tlv<F: FnOnce() -> R, R>(value: usize, f: F) -> R {
2031 let old = get_tlv();
2032 let _reset = OnDrop(move || TLV.with(|tlv| tlv.set(old)));
2033 TLV.with(|tlv| tlv.set(value));
2037 /// This is used to get the pointer to the current ImplicitCtxt.
2038 #[cfg(not(parallel_queries))]
2039 fn get_tlv() -> usize {
2040 TLV.with(|tlv| tlv.get())
2043 /// This is a callback from libsyntax as it cannot access the implicit state
2044 /// in librustc otherwise
2045 fn span_debug(span: syntax_pos::Span, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2047 if let Some(tcx) = tcx {
2048 write!(f, "{}", tcx.sess.source_map().span_to_string(span))
2050 syntax_pos::default_span_debug(span, f)
2055 /// This is a callback from libsyntax as it cannot access the implicit state
2056 /// in librustc otherwise. It is used to when diagnostic messages are
2057 /// emitted and stores them in the current query, if there is one.
2058 fn track_diagnostic(diagnostic: &Diagnostic) {
2059 with_context_opt(|icx| {
2060 if let Some(icx) = icx {
2061 if let Some(ref query) = icx.query {
2062 query.diagnostics.lock().push(diagnostic.clone());
2068 /// Sets up the callbacks from libsyntax on the current thread
2069 pub fn with_thread_locals<F, R>(f: F) -> R
2070 where F: FnOnce() -> R
2072 syntax_pos::SPAN_DEBUG.with(|span_dbg| {
2073 let original_span_debug = span_dbg.get();
2074 span_dbg.set(span_debug);
2076 let _on_drop = OnDrop(move || {
2077 span_dbg.set(original_span_debug);
2080 TRACK_DIAGNOSTICS.with(|current| {
2081 let original = current.get();
2082 current.set(track_diagnostic);
2084 let _on_drop = OnDrop(move || {
2085 current.set(original);
2093 /// Sets `context` as the new current ImplicitCtxt for the duration of the function `f`
2095 pub fn enter_context<'a, 'gcx: 'tcx, 'tcx, F, R>(context: &ImplicitCtxt<'a, 'gcx, 'tcx>,
2097 where F: FnOnce(&ImplicitCtxt<'a, 'gcx, 'tcx>) -> R
2099 set_tlv(context as *const _ as usize, || {
2104 /// Enters GlobalCtxt by setting up libsyntax callbacks and
2105 /// creating a initial TyCtxt and ImplicitCtxt.
2106 /// This happens once per rustc session and TyCtxts only exists
2107 /// inside the `f` function.
2108 pub fn enter_global<'gcx, F, R>(gcx: &'gcx GlobalCtxt<'gcx>, f: F) -> R
2109 where F: FnOnce(TyCtxt<'gcx, 'gcx, 'gcx>) -> R
2111 with_thread_locals(|| {
2112 // Update GCX_PTR to indicate there's a GlobalCtxt available
2113 GCX_PTR.with(|lock| {
2114 *lock.lock() = gcx as *const _ as usize;
2116 // Set GCX_PTR back to 0 when we exit
2117 let _on_drop = OnDrop(move || {
2118 GCX_PTR.with(|lock| *lock.lock() = 0);
2123 interners: &gcx.global_interners,
2126 let icx = ImplicitCtxt {
2130 task: &OpenTask::Ignore,
2132 enter_context(&icx, |_| {
2138 /// Stores a pointer to the GlobalCtxt if one is available.
2139 /// This is used to access the GlobalCtxt in the deadlock handler
2141 scoped_thread_local!(pub static GCX_PTR: Lock<usize>);
2143 /// Creates a TyCtxt and ImplicitCtxt based on the GCX_PTR thread local.
2144 /// This is used in the deadlock handler.
2145 pub unsafe fn with_global<F, R>(f: F) -> R
2146 where F: for<'a, 'gcx, 'tcx> FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R
2148 let gcx = GCX_PTR.with(|lock| *lock.lock());
2150 let gcx = &*(gcx as *const GlobalCtxt<'_>);
2153 interners: &gcx.global_interners,
2156 let icx = ImplicitCtxt {
2160 task: &OpenTask::Ignore,
2162 enter_context(&icx, |_| f(tcx))
2165 /// Allows access to the current ImplicitCtxt in a closure if one is available
2167 pub fn with_context_opt<F, R>(f: F) -> R
2168 where F: for<'a, 'gcx, 'tcx> FnOnce(Option<&ImplicitCtxt<'a, 'gcx, 'tcx>>) -> R
2170 let context = get_tlv();
2174 // We could get a ImplicitCtxt pointer from another thread.
2175 // Ensure that ImplicitCtxt is Sync
2176 sync::assert_sync::<ImplicitCtxt<'_, '_, '_>>();
2178 unsafe { f(Some(&*(context as *const ImplicitCtxt<'_, '_, '_>))) }
2182 /// Allows access to the current ImplicitCtxt.
2183 /// Panics if there is no ImplicitCtxt available
2185 pub fn with_context<F, R>(f: F) -> R
2186 where F: for<'a, 'gcx, 'tcx> FnOnce(&ImplicitCtxt<'a, 'gcx, 'tcx>) -> R
2188 with_context_opt(|opt_context| f(opt_context.expect("no ImplicitCtxt stored in tls")))
2191 /// Allows access to the current ImplicitCtxt whose tcx field has the same global
2192 /// interner as the tcx argument passed in. This means the closure is given an ImplicitCtxt
2193 /// with the same 'gcx lifetime as the TyCtxt passed in.
2194 /// This will panic if you pass it a TyCtxt which has a different global interner from
2195 /// the current ImplicitCtxt's tcx field.
2197 pub fn with_related_context<'a, 'gcx, 'tcx1, F, R>(tcx: TyCtxt<'a, 'gcx, 'tcx1>, f: F) -> R
2198 where F: for<'b, 'tcx2> FnOnce(&ImplicitCtxt<'b, 'gcx, 'tcx2>) -> R
2200 with_context(|context| {
2202 let gcx = tcx.gcx as *const _ as usize;
2203 assert!(context.tcx.gcx as *const _ as usize == gcx);
2204 let context: &ImplicitCtxt<'_, '_, '_> = mem::transmute(context);
2210 /// Allows access to the current ImplicitCtxt whose tcx field has the same global
2211 /// interner and local interner as the tcx argument passed in. This means the closure
2212 /// is given an ImplicitCtxt with the same 'tcx and 'gcx lifetimes as the TyCtxt passed in.
2213 /// This will panic if you pass it a TyCtxt which has a different global interner or
2214 /// a different local interner from the current ImplicitCtxt's tcx field.
2216 pub fn with_fully_related_context<'a, 'gcx, 'tcx, F, R>(tcx: TyCtxt<'a, 'gcx, 'tcx>, f: F) -> R
2217 where F: for<'b> FnOnce(&ImplicitCtxt<'b, 'gcx, 'tcx>) -> R
2219 with_context(|context| {
2221 let gcx = tcx.gcx as *const _ as usize;
2222 let interners = tcx.interners as *const _ as usize;
2223 assert!(context.tcx.gcx as *const _ as usize == gcx);
2224 assert!(context.tcx.interners as *const _ as usize == interners);
2225 let context: &ImplicitCtxt<'_, '_, '_> = mem::transmute(context);
2231 /// Allows access to the TyCtxt in the current ImplicitCtxt.
2232 /// Panics if there is no ImplicitCtxt available
2234 pub fn with<F, R>(f: F) -> R
2235 where F: for<'a, 'gcx, 'tcx> FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R
2237 with_context(|context| f(context.tcx))
2240 /// Allows access to the TyCtxt in the current ImplicitCtxt.
2241 /// The closure is passed None if there is no ImplicitCtxt available
2243 pub fn with_opt<F, R>(f: F) -> R
2244 where F: for<'a, 'gcx, 'tcx> FnOnce(Option<TyCtxt<'a, 'gcx, 'tcx>>) -> R
2246 with_context_opt(|opt_context| f(opt_context.map(|context| context.tcx)))
2250 macro_rules! sty_debug_print {
2251 ($ctxt: expr, $($variant: ident),*) => {{
2252 // curious inner module to allow variant names to be used as
2254 #[allow(non_snake_case)]
2256 use ty::{self, TyCtxt};
2257 use ty::context::Interned;
2259 #[derive(Copy, Clone)]
2262 region_infer: usize,
2267 pub fn go(tcx: TyCtxt<'_, '_, '_>) {
2268 let mut total = DebugStat {
2270 region_infer: 0, ty_infer: 0, both_infer: 0,
2272 $(let mut $variant = total;)*
2274 for &Interned(t) in tcx.interners.type_.borrow().keys() {
2275 let variant = match t.sty {
2276 ty::Bool | ty::Char | ty::Int(..) | ty::Uint(..) |
2277 ty::Float(..) | ty::Str | ty::Never => continue,
2278 ty::Error => /* unimportant */ continue,
2279 $(ty::$variant(..) => &mut $variant,)*
2281 let region = t.flags.intersects(ty::TypeFlags::HAS_RE_INFER);
2282 let ty = t.flags.intersects(ty::TypeFlags::HAS_TY_INFER);
2286 if region { total.region_infer += 1; variant.region_infer += 1 }
2287 if ty { total.ty_infer += 1; variant.ty_infer += 1 }
2288 if region && ty { total.both_infer += 1; variant.both_infer += 1 }
2290 println!("Ty interner total ty region both");
2291 $(println!(" {:18}: {uses:6} {usespc:4.1}%, \
2292 {ty:4.1}% {region:5.1}% {both:4.1}%",
2293 stringify!($variant),
2294 uses = $variant.total,
2295 usespc = $variant.total as f64 * 100.0 / total.total as f64,
2296 ty = $variant.ty_infer as f64 * 100.0 / total.total as f64,
2297 region = $variant.region_infer as f64 * 100.0 / total.total as f64,
2298 both = $variant.both_infer as f64 * 100.0 / total.total as f64);
2300 println!(" total {uses:6} \
2301 {ty:4.1}% {region:5.1}% {both:4.1}%",
2303 ty = total.ty_infer as f64 * 100.0 / total.total as f64,
2304 region = total.region_infer as f64 * 100.0 / total.total as f64,
2305 both = total.both_infer as f64 * 100.0 / total.total as f64)
2313 impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
2314 pub fn print_debug_stats(self) {
2317 Adt, Array, Slice, RawPtr, Ref, FnDef, FnPtr, Placeholder,
2318 Generator, GeneratorWitness, Dynamic, Closure, Tuple, Bound,
2319 Param, Infer, UnnormalizedProjection, Projection, Opaque, Foreign);
2321 println!("Substs interner: #{}", self.interners.substs.borrow().len());
2322 println!("Region interner: #{}", self.interners.region.borrow().len());
2323 println!("Stability interner: #{}", self.stability_interner.borrow().len());
2324 println!("Allocation interner: #{}", self.allocation_interner.borrow().len());
2325 println!("Layout interner: #{}", self.layout_interner.borrow().len());
2330 /// An entry in an interner.
2331 struct Interned<'tcx, T: 'tcx+?Sized>(&'tcx T);
2333 impl<'tcx, T: 'tcx+?Sized> Clone for Interned<'tcx, T> {
2334 fn clone(&self) -> Self {
2338 impl<'tcx, T: 'tcx+?Sized> Copy for Interned<'tcx, T> {}
2340 // N.B., an `Interned<Ty>` compares and hashes as a sty.
2341 impl<'tcx> PartialEq for Interned<'tcx, TyS<'tcx>> {
2342 fn eq(&self, other: &Interned<'tcx, TyS<'tcx>>) -> bool {
2343 self.0.sty == other.0.sty
2347 impl<'tcx> Eq for Interned<'tcx, TyS<'tcx>> {}
2349 impl<'tcx> Hash for Interned<'tcx, TyS<'tcx>> {
2350 fn hash<H: Hasher>(&self, s: &mut H) {
2355 impl<'tcx: 'lcx, 'lcx> Borrow<TyKind<'lcx>> for Interned<'tcx, TyS<'tcx>> {
2356 fn borrow<'a>(&'a self) -> &'a TyKind<'lcx> {
2361 // N.B., an `Interned<List<T>>` compares and hashes as its elements.
2362 impl<'tcx, T: PartialEq> PartialEq for Interned<'tcx, List<T>> {
2363 fn eq(&self, other: &Interned<'tcx, List<T>>) -> bool {
2364 self.0[..] == other.0[..]
2368 impl<'tcx, T: Eq> Eq for Interned<'tcx, List<T>> {}
2370 impl<'tcx, T: Hash> Hash for Interned<'tcx, List<T>> {
2371 fn hash<H: Hasher>(&self, s: &mut H) {
2376 impl<'tcx: 'lcx, 'lcx> Borrow<[Ty<'lcx>]> for Interned<'tcx, List<Ty<'tcx>>> {
2377 fn borrow<'a>(&'a self) -> &'a [Ty<'lcx>] {
2382 impl<'tcx: 'lcx, 'lcx> Borrow<[CanonicalVarInfo]> for Interned<'tcx, List<CanonicalVarInfo>> {
2383 fn borrow<'a>(&'a self) -> &'a [CanonicalVarInfo] {
2388 impl<'tcx: 'lcx, 'lcx> Borrow<[Kind<'lcx>]> for Interned<'tcx, Substs<'tcx>> {
2389 fn borrow<'a>(&'a self) -> &'a [Kind<'lcx>] {
2394 impl<'tcx: 'lcx, 'lcx> Borrow<[ProjectionKind<'lcx>]>
2395 for Interned<'tcx, List<ProjectionKind<'tcx>>> {
2396 fn borrow<'a>(&'a self) -> &'a [ProjectionKind<'lcx>] {
2401 impl<'tcx> Borrow<RegionKind> for Interned<'tcx, RegionKind> {
2402 fn borrow<'a>(&'a self) -> &'a RegionKind {
2407 impl<'tcx: 'lcx, 'lcx> Borrow<GoalKind<'lcx>> for Interned<'tcx, GoalKind<'tcx>> {
2408 fn borrow<'a>(&'a self) -> &'a GoalKind<'lcx> {
2413 impl<'tcx: 'lcx, 'lcx> Borrow<[ExistentialPredicate<'lcx>]>
2414 for Interned<'tcx, List<ExistentialPredicate<'tcx>>> {
2415 fn borrow<'a>(&'a self) -> &'a [ExistentialPredicate<'lcx>] {
2420 impl<'tcx: 'lcx, 'lcx> Borrow<[Predicate<'lcx>]>
2421 for Interned<'tcx, List<Predicate<'tcx>>> {
2422 fn borrow<'a>(&'a self) -> &'a [Predicate<'lcx>] {
2427 impl<'tcx: 'lcx, 'lcx> Borrow<Const<'lcx>> for Interned<'tcx, Const<'tcx>> {
2428 fn borrow<'a>(&'a self) -> &'a Const<'lcx> {
2433 impl<'tcx: 'lcx, 'lcx> Borrow<[Clause<'lcx>]>
2434 for Interned<'tcx, List<Clause<'tcx>>> {
2435 fn borrow<'a>(&'a self) -> &'a [Clause<'lcx>] {
2440 impl<'tcx: 'lcx, 'lcx> Borrow<[Goal<'lcx>]>
2441 for Interned<'tcx, List<Goal<'tcx>>> {
2442 fn borrow<'a>(&'a self) -> &'a [Goal<'lcx>] {
2447 macro_rules! intern_method {
2448 ($lt_tcx:tt, $name:ident: $method:ident($alloc:ty,
2451 $keep_in_local_tcx:expr) -> $ty:ty) => {
2452 impl<'a, 'gcx, $lt_tcx> TyCtxt<'a, 'gcx, $lt_tcx> {
2453 pub fn $method(self, v: $alloc) -> &$lt_tcx $ty {
2454 let key = ($alloc_to_key)(&v);
2456 // HACK(eddyb) Depend on flags being accurate to
2457 // determine that all contents are in the global tcx.
2458 // See comments on Lift for why we can't use that.
2459 if ($keep_in_local_tcx)(&v) {
2460 self.interners.$name.borrow_mut().intern_ref(key, || {
2461 // Make sure we don't end up with inference
2462 // types/regions in the global tcx.
2463 if self.is_global() {
2464 bug!("Attempted to intern `{:?}` which contains \
2465 inference types/regions in the global type context",
2469 Interned($alloc_method(&self.interners.arena, v))
2472 self.global_interners.$name.borrow_mut().intern_ref(key, || {
2473 // This transmutes $alloc<'tcx> to $alloc<'gcx>
2477 let i: &$lt_tcx $ty = $alloc_method(&self.global_interners.arena, v);
2479 let i = unsafe { mem::transmute(i) };
2488 macro_rules! direct_interners {
2489 ($lt_tcx:tt, $($name:ident: $method:ident($keep_in_local_tcx:expr) -> $ty:ty),+) => {
2490 $(impl<$lt_tcx> PartialEq for Interned<$lt_tcx, $ty> {
2491 fn eq(&self, other: &Self) -> bool {
2496 impl<$lt_tcx> Eq for Interned<$lt_tcx, $ty> {}
2498 impl<$lt_tcx> Hash for Interned<$lt_tcx, $ty> {
2499 fn hash<H: Hasher>(&self, s: &mut H) {
2507 |a: &$lt_tcx SyncDroplessArena, v| -> &$lt_tcx $ty { a.alloc(v) },
2509 $keep_in_local_tcx) -> $ty);)+
2513 pub fn keep_local<'tcx, T: ty::TypeFoldable<'tcx>>(x: &T) -> bool {
2514 x.has_type_flags(ty::TypeFlags::KEEP_IN_LOCAL_TCX)
2517 direct_interners!('tcx,
2518 region: mk_region(|r: &RegionKind| r.keep_in_local_tcx()) -> RegionKind,
2519 const_: mk_const(|c: &Const<'_>| keep_local(&c.ty) || keep_local(&c.val)) -> Const<'tcx>,
2520 goal: mk_goal(|c: &GoalKind<'_>| keep_local(c)) -> GoalKind<'tcx>
2523 macro_rules! slice_interners {
2524 ($($field:ident: $method:ident($ty:ident)),+) => (
2525 $(intern_method!( 'tcx, $field: $method(
2527 |a, v| List::from_arena(a, v),
2529 |xs: &[$ty<'_>]| xs.iter().any(keep_local)) -> List<$ty<'tcx>>);)+
2534 existential_predicates: _intern_existential_predicates(ExistentialPredicate),
2535 predicates: _intern_predicates(Predicate),
2536 type_list: _intern_type_list(Ty),
2537 substs: _intern_substs(Kind),
2538 clauses: _intern_clauses(Clause),
2539 goal_list: _intern_goals(Goal),
2540 projs: _intern_projs(ProjectionKind)
2543 // This isn't a perfect fit: CanonicalVarInfo slices are always
2544 // allocated in the global arena, so this `intern_method!` macro is
2545 // overly general. But we just return false for the code that checks
2546 // whether they belong in the thread-local arena, so no harm done, and
2547 // seems better than open-coding the rest.
2550 canonical_var_infos: _intern_canonical_var_infos(
2551 &[CanonicalVarInfo],
2552 |a, v| List::from_arena(a, v),
2554 |_xs: &[CanonicalVarInfo]| -> bool { false }
2555 ) -> List<CanonicalVarInfo>
2558 impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
2559 /// Given a `fn` type, returns an equivalent `unsafe fn` type;
2560 /// that is, a `fn` type that is equivalent in every way for being
2562 pub fn safe_to_unsafe_fn_ty(self, sig: PolyFnSig<'tcx>) -> Ty<'tcx> {
2563 assert_eq!(sig.unsafety(), hir::Unsafety::Normal);
2564 self.mk_fn_ptr(sig.map_bound(|sig| ty::FnSig {
2565 unsafety: hir::Unsafety::Unsafe,
2570 /// Given a closure signature `sig`, returns an equivalent `fn`
2571 /// type with the same signature. Detuples and so forth -- so
2572 /// e.g., if we have a sig with `Fn<(u32, i32)>` then you would get
2573 /// a `fn(u32, i32)`.
2574 pub fn coerce_closure_fn_ty(self, sig: PolyFnSig<'tcx>) -> Ty<'tcx> {
2575 let converted_sig = sig.map_bound(|s| {
2576 let params_iter = match s.inputs()[0].sty {
2577 ty::Tuple(params) => {
2578 params.into_iter().cloned()
2586 hir::Unsafety::Normal,
2591 self.mk_fn_ptr(converted_sig)
2595 pub fn mk_ty(&self, st: TyKind<'tcx>) -> Ty<'tcx> {
2596 CtxtInterners::intern_ty(&self.interners, &self.global_interners, st)
2599 pub fn mk_mach_int(self, tm: ast::IntTy) -> Ty<'tcx> {
2601 ast::IntTy::Isize => self.types.isize,
2602 ast::IntTy::I8 => self.types.i8,
2603 ast::IntTy::I16 => self.types.i16,
2604 ast::IntTy::I32 => self.types.i32,
2605 ast::IntTy::I64 => self.types.i64,
2606 ast::IntTy::I128 => self.types.i128,
2610 pub fn mk_mach_uint(self, tm: ast::UintTy) -> Ty<'tcx> {
2612 ast::UintTy::Usize => self.types.usize,
2613 ast::UintTy::U8 => self.types.u8,
2614 ast::UintTy::U16 => self.types.u16,
2615 ast::UintTy::U32 => self.types.u32,
2616 ast::UintTy::U64 => self.types.u64,
2617 ast::UintTy::U128 => self.types.u128,
2621 pub fn mk_mach_float(self, tm: ast::FloatTy) -> Ty<'tcx> {
2623 ast::FloatTy::F32 => self.types.f32,
2624 ast::FloatTy::F64 => self.types.f64,
2629 pub fn mk_str(self) -> Ty<'tcx> {
2634 pub fn mk_static_str(self) -> Ty<'tcx> {
2635 self.mk_imm_ref(self.types.re_static, self.mk_str())
2639 pub fn mk_adt(self, def: &'tcx AdtDef, substs: &'tcx Substs<'tcx>) -> Ty<'tcx> {
2640 // take a copy of substs so that we own the vectors inside
2641 self.mk_ty(Adt(def, substs))
2645 pub fn mk_foreign(self, def_id: DefId) -> Ty<'tcx> {
2646 self.mk_ty(Foreign(def_id))
2649 pub fn mk_box(self, ty: Ty<'tcx>) -> Ty<'tcx> {
2650 let def_id = self.require_lang_item(lang_items::OwnedBoxLangItem);
2651 let adt_def = self.adt_def(def_id);
2652 let substs = Substs::for_item(self, def_id, |param, substs| {
2654 GenericParamDefKind::Lifetime => bug!(),
2655 GenericParamDefKind::Type { has_default, .. } => {
2656 if param.index == 0 {
2659 assert!(has_default);
2660 self.type_of(param.def_id).subst(self, substs).into()
2665 self.mk_ty(Adt(adt_def, substs))
2669 pub fn mk_ptr(self, tm: TypeAndMut<'tcx>) -> Ty<'tcx> {
2670 self.mk_ty(RawPtr(tm))
2674 pub fn mk_ref(self, r: Region<'tcx>, tm: TypeAndMut<'tcx>) -> Ty<'tcx> {
2675 self.mk_ty(Ref(r, tm.ty, tm.mutbl))
2679 pub fn mk_mut_ref(self, r: Region<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> {
2680 self.mk_ref(r, TypeAndMut {ty: ty, mutbl: hir::MutMutable})
2684 pub fn mk_imm_ref(self, r: Region<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> {
2685 self.mk_ref(r, TypeAndMut {ty: ty, mutbl: hir::MutImmutable})
2689 pub fn mk_mut_ptr(self, ty: Ty<'tcx>) -> Ty<'tcx> {
2690 self.mk_ptr(TypeAndMut {ty: ty, mutbl: hir::MutMutable})
2694 pub fn mk_imm_ptr(self, ty: Ty<'tcx>) -> Ty<'tcx> {
2695 self.mk_ptr(TypeAndMut {ty: ty, mutbl: hir::MutImmutable})
2699 pub fn mk_nil_ptr(self) -> Ty<'tcx> {
2700 self.mk_imm_ptr(self.mk_unit())
2704 pub fn mk_array(self, ty: Ty<'tcx>, n: u64) -> Ty<'tcx> {
2705 self.mk_ty(Array(ty, self.intern_lazy_const(
2706 ty::LazyConst::Evaluated(ty::Const::from_usize(self.global_tcx(), n))
2711 pub fn mk_slice(self, ty: Ty<'tcx>) -> Ty<'tcx> {
2712 self.mk_ty(Slice(ty))
2716 pub fn intern_tup(self, ts: &[Ty<'tcx>]) -> Ty<'tcx> {
2717 self.mk_ty(Tuple(self.intern_type_list(ts)))
2720 pub fn mk_tup<I: InternAs<[Ty<'tcx>], Ty<'tcx>>>(self, iter: I) -> I::Output {
2721 iter.intern_with(|ts| self.mk_ty(Tuple(self.intern_type_list(ts))))
2725 pub fn mk_unit(self) -> Ty<'tcx> {
2730 pub fn mk_diverging_default(self) -> Ty<'tcx> {
2731 if self.features().never_type {
2734 self.intern_tup(&[])
2739 pub fn mk_bool(self) -> Ty<'tcx> {
2744 pub fn mk_fn_def(self, def_id: DefId,
2745 substs: &'tcx Substs<'tcx>) -> Ty<'tcx> {
2746 self.mk_ty(FnDef(def_id, substs))
2750 pub fn mk_fn_ptr(self, fty: PolyFnSig<'tcx>) -> Ty<'tcx> {
2751 self.mk_ty(FnPtr(fty))
2757 obj: ty::Binder<&'tcx List<ExistentialPredicate<'tcx>>>,
2758 reg: ty::Region<'tcx>
2760 self.mk_ty(Dynamic(obj, reg))
2764 pub fn mk_projection(self,
2766 substs: &'tcx Substs<'tcx>)
2768 self.mk_ty(Projection(ProjectionTy {
2775 pub fn mk_closure(self, closure_id: DefId, closure_substs: ClosureSubsts<'tcx>)
2777 self.mk_ty(Closure(closure_id, closure_substs))
2781 pub fn mk_generator(self,
2783 generator_substs: GeneratorSubsts<'tcx>,
2784 movability: hir::GeneratorMovability)
2786 self.mk_ty(Generator(id, generator_substs, movability))
2790 pub fn mk_generator_witness(self, types: ty::Binder<&'tcx List<Ty<'tcx>>>) -> Ty<'tcx> {
2791 self.mk_ty(GeneratorWitness(types))
2795 pub fn mk_var(self, v: TyVid) -> Ty<'tcx> {
2796 self.mk_infer(TyVar(v))
2800 pub fn mk_int_var(self, v: IntVid) -> Ty<'tcx> {
2801 self.mk_infer(IntVar(v))
2805 pub fn mk_float_var(self, v: FloatVid) -> Ty<'tcx> {
2806 self.mk_infer(FloatVar(v))
2810 pub fn mk_infer(self, it: InferTy) -> Ty<'tcx> {
2811 self.mk_ty(Infer(it))
2815 pub fn mk_ty_param(self,
2817 name: InternedString) -> Ty<'tcx> {
2818 self.mk_ty(Param(ParamTy { idx: index, name: name }))
2822 pub fn mk_self_type(self) -> Ty<'tcx> {
2823 self.mk_ty_param(0, keywords::SelfUpper.name().as_interned_str())
2826 pub fn mk_param_from_def(self, param: &ty::GenericParamDef) -> Kind<'tcx> {
2828 GenericParamDefKind::Lifetime => {
2829 self.mk_region(ty::ReEarlyBound(param.to_early_bound_region_data())).into()
2831 GenericParamDefKind::Type {..} => self.mk_ty_param(param.index, param.name).into(),
2836 pub fn mk_opaque(self, def_id: DefId, substs: &'tcx Substs<'tcx>) -> Ty<'tcx> {
2837 self.mk_ty(Opaque(def_id, substs))
2840 pub fn intern_existential_predicates(self, eps: &[ExistentialPredicate<'tcx>])
2841 -> &'tcx List<ExistentialPredicate<'tcx>> {
2842 assert!(!eps.is_empty());
2843 assert!(eps.windows(2).all(|w| w[0].stable_cmp(self, &w[1]) != Ordering::Greater));
2844 self._intern_existential_predicates(eps)
2847 pub fn intern_predicates(self, preds: &[Predicate<'tcx>])
2848 -> &'tcx List<Predicate<'tcx>> {
2849 // FIXME consider asking the input slice to be sorted to avoid
2850 // re-interning permutations, in which case that would be asserted
2852 if preds.len() == 0 {
2853 // The macro-generated method below asserts we don't intern an empty slice.
2856 self._intern_predicates(preds)
2860 pub fn intern_type_list(self, ts: &[Ty<'tcx>]) -> &'tcx List<Ty<'tcx>> {
2864 self._intern_type_list(ts)
2868 pub fn intern_substs(self, ts: &[Kind<'tcx>]) -> &'tcx List<Kind<'tcx>> {
2872 self._intern_substs(ts)
2876 pub fn intern_projs(self, ps: &[ProjectionKind<'tcx>]) -> &'tcx List<ProjectionKind<'tcx>> {
2880 self._intern_projs(ps)
2884 pub fn intern_canonical_var_infos(self, ts: &[CanonicalVarInfo]) -> CanonicalVarInfos<'gcx> {
2888 self.global_tcx()._intern_canonical_var_infos(ts)
2892 pub fn intern_clauses(self, ts: &[Clause<'tcx>]) -> Clauses<'tcx> {
2896 self._intern_clauses(ts)
2900 pub fn intern_goals(self, ts: &[Goal<'tcx>]) -> Goals<'tcx> {
2904 self._intern_goals(ts)
2908 pub fn mk_fn_sig<I>(self,
2912 unsafety: hir::Unsafety,
2914 -> <I::Item as InternIteratorElement<Ty<'tcx>, ty::FnSig<'tcx>>>::Output
2916 I::Item: InternIteratorElement<Ty<'tcx>, ty::FnSig<'tcx>>
2918 inputs.chain(iter::once(output)).intern_with(|xs| ty::FnSig {
2919 inputs_and_output: self.intern_type_list(xs),
2920 variadic, unsafety, abi
2924 pub fn mk_existential_predicates<I: InternAs<[ExistentialPredicate<'tcx>],
2925 &'tcx List<ExistentialPredicate<'tcx>>>>(self, iter: I)
2927 iter.intern_with(|xs| self.intern_existential_predicates(xs))
2930 pub fn mk_predicates<I: InternAs<[Predicate<'tcx>],
2931 &'tcx List<Predicate<'tcx>>>>(self, iter: I)
2933 iter.intern_with(|xs| self.intern_predicates(xs))
2936 pub fn mk_type_list<I: InternAs<[Ty<'tcx>],
2937 &'tcx List<Ty<'tcx>>>>(self, iter: I) -> I::Output {
2938 iter.intern_with(|xs| self.intern_type_list(xs))
2941 pub fn mk_substs<I: InternAs<[Kind<'tcx>],
2942 &'tcx List<Kind<'tcx>>>>(self, iter: I) -> I::Output {
2943 iter.intern_with(|xs| self.intern_substs(xs))
2946 pub fn mk_substs_trait(self,
2948 rest: &[Kind<'tcx>])
2949 -> &'tcx Substs<'tcx>
2951 self.mk_substs(iter::once(self_ty.into()).chain(rest.iter().cloned()))
2954 pub fn mk_clauses<I: InternAs<[Clause<'tcx>], Clauses<'tcx>>>(self, iter: I) -> I::Output {
2955 iter.intern_with(|xs| self.intern_clauses(xs))
2958 pub fn mk_goals<I: InternAs<[Goal<'tcx>], Goals<'tcx>>>(self, iter: I) -> I::Output {
2959 iter.intern_with(|xs| self.intern_goals(xs))
2962 pub fn lint_hir<S: Into<MultiSpan>>(self,
2963 lint: &'static Lint,
2967 self.struct_span_lint_hir(lint, hir_id, span.into(), msg).emit()
2970 pub fn lint_node<S: Into<MultiSpan>>(self,
2971 lint: &'static Lint,
2975 self.struct_span_lint_node(lint, id, span.into(), msg).emit()
2978 pub fn lint_hir_note<S: Into<MultiSpan>>(self,
2979 lint: &'static Lint,
2984 let mut err = self.struct_span_lint_hir(lint, hir_id, span.into(), msg);
2989 pub fn lint_node_note<S: Into<MultiSpan>>(self,
2990 lint: &'static Lint,
2995 let mut err = self.struct_span_lint_node(lint, id, span.into(), msg);
3000 pub fn lint_level_at_node(self, lint: &'static Lint, mut id: NodeId)
3001 -> (lint::Level, lint::LintSource)
3003 // Right now we insert a `with_ignore` node in the dep graph here to
3004 // ignore the fact that `lint_levels` below depends on the entire crate.
3005 // For now this'll prevent false positives of recompiling too much when
3006 // anything changes.
3008 // Once red/green incremental compilation lands we should be able to
3009 // remove this because while the crate changes often the lint level map
3010 // will change rarely.
3011 self.dep_graph.with_ignore(|| {
3012 let sets = self.lint_levels(LOCAL_CRATE);
3014 let hir_id = self.hir().definitions().node_to_hir_id(id);
3015 if let Some(pair) = sets.level_and_source(lint, hir_id, self.sess) {
3018 let next = self.hir().get_parent_node(id);
3020 bug!("lint traversal reached the root of the crate");
3027 pub fn struct_span_lint_hir<S: Into<MultiSpan>>(self,
3028 lint: &'static Lint,
3032 -> DiagnosticBuilder<'tcx>
3034 let node_id = self.hir().hir_to_node_id(hir_id);
3035 let (level, src) = self.lint_level_at_node(lint, node_id);
3036 lint::struct_lint_level(self.sess, lint, level, src, Some(span.into()), msg)
3039 pub fn struct_span_lint_node<S: Into<MultiSpan>>(self,
3040 lint: &'static Lint,
3044 -> DiagnosticBuilder<'tcx>
3046 let (level, src) = self.lint_level_at_node(lint, id);
3047 lint::struct_lint_level(self.sess, lint, level, src, Some(span.into()), msg)
3050 pub fn struct_lint_node(self, lint: &'static Lint, id: NodeId, msg: &str)
3051 -> DiagnosticBuilder<'tcx>
3053 let (level, src) = self.lint_level_at_node(lint, id);
3054 lint::struct_lint_level(self.sess, lint, level, src, None, msg)
3057 pub fn in_scope_traits(self, id: HirId) -> Option<Lrc<StableVec<TraitCandidate>>> {
3058 self.in_scope_traits_map(id.owner)
3059 .and_then(|map| map.get(&id.local_id).cloned())
3062 pub fn named_region(self, id: HirId) -> Option<resolve_lifetime::Region> {
3063 self.named_region_map(id.owner)
3064 .and_then(|map| map.get(&id.local_id).cloned())
3067 pub fn is_late_bound(self, id: HirId) -> bool {
3068 self.is_late_bound_map(id.owner)
3069 .map(|set| set.contains(&id.local_id))
3073 pub fn object_lifetime_defaults(self, id: HirId)
3074 -> Option<Lrc<Vec<ObjectLifetimeDefault>>>
3076 self.object_lifetime_defaults_map(id.owner)
3077 .and_then(|map| map.get(&id.local_id).cloned())
3081 pub trait InternAs<T: ?Sized, R> {
3083 fn intern_with<F>(self, f: F) -> Self::Output
3084 where F: FnOnce(&T) -> R;
3087 impl<I, T, R, E> InternAs<[T], R> for I
3088 where E: InternIteratorElement<T, R>,
3089 I: Iterator<Item=E> {
3090 type Output = E::Output;
3091 fn intern_with<F>(self, f: F) -> Self::Output
3092 where F: FnOnce(&[T]) -> R {
3093 E::intern_with(self, f)
3097 pub trait InternIteratorElement<T, R>: Sized {
3099 fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output;
3102 impl<T, R> InternIteratorElement<T, R> for T {
3104 fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output {
3105 f(&iter.collect::<SmallVec<[_; 8]>>())
3109 impl<'a, T, R> InternIteratorElement<T, R> for &'a T
3113 fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output {
3114 f(&iter.cloned().collect::<SmallVec<[_; 8]>>())
3118 impl<T, R, E> InternIteratorElement<T, R> for Result<T, E> {
3119 type Output = Result<R, E>;
3120 fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output {
3121 Ok(f(&iter.collect::<Result<SmallVec<[_; 8]>, _>>()?))
3125 pub fn provide(providers: &mut ty::query::Providers<'_>) {
3126 // FIXME(#44234): almost all of these queries have no sub-queries and
3127 // therefore no actual inputs, they're just reading tables calculated in
3128 // resolve! Does this work? Unsure! That's what the issue is about.
3129 providers.in_scope_traits_map = |tcx, id| tcx.gcx.trait_map.get(&id).cloned();
3130 providers.module_exports = |tcx, id| tcx.gcx.export_map.get(&id).cloned();
3131 providers.crate_name = |tcx, id| {
3132 assert_eq!(id, LOCAL_CRATE);
3135 providers.get_lib_features = |tcx, id| {
3136 assert_eq!(id, LOCAL_CRATE);
3137 Lrc::new(middle::lib_features::collect(tcx))
3139 providers.get_lang_items = |tcx, id| {
3140 assert_eq!(id, LOCAL_CRATE);
3141 Lrc::new(middle::lang_items::collect(tcx))
3143 providers.freevars = |tcx, id| tcx.gcx.freevars.get(&id).cloned();
3144 providers.maybe_unused_trait_import = |tcx, id| {
3145 tcx.maybe_unused_trait_imports.contains(&id)
3147 providers.maybe_unused_extern_crates = |tcx, cnum| {
3148 assert_eq!(cnum, LOCAL_CRATE);
3149 Lrc::new(tcx.maybe_unused_extern_crates.clone())
3152 providers.stability_index = |tcx, cnum| {
3153 assert_eq!(cnum, LOCAL_CRATE);
3154 Lrc::new(stability::Index::new(tcx))
3156 providers.lookup_stability = |tcx, id| {
3157 assert_eq!(id.krate, LOCAL_CRATE);
3158 let id = tcx.hir().definitions().def_index_to_hir_id(id.index);
3159 tcx.stability().local_stability(id)
3161 providers.lookup_deprecation_entry = |tcx, id| {
3162 assert_eq!(id.krate, LOCAL_CRATE);
3163 let id = tcx.hir().definitions().def_index_to_hir_id(id.index);
3164 tcx.stability().local_deprecation_entry(id)
3166 providers.extern_mod_stmt_cnum = |tcx, id| {
3167 let id = tcx.hir().as_local_node_id(id).unwrap();
3168 tcx.cstore.extern_mod_stmt_cnum_untracked(id)
3170 providers.all_crate_nums = |tcx, cnum| {
3171 assert_eq!(cnum, LOCAL_CRATE);
3172 Lrc::new(tcx.cstore.crates_untracked())
3174 providers.postorder_cnums = |tcx, cnum| {
3175 assert_eq!(cnum, LOCAL_CRATE);
3176 Lrc::new(tcx.cstore.postorder_cnums_untracked())
3178 providers.output_filenames = |tcx, cnum| {
3179 assert_eq!(cnum, LOCAL_CRATE);
3180 tcx.output_filenames.clone()
3182 providers.features_query = |tcx, cnum| {
3183 assert_eq!(cnum, LOCAL_CRATE);
3184 Lrc::new(tcx.sess.features_untracked().clone())
3186 providers.is_panic_runtime = |tcx, cnum| {
3187 assert_eq!(cnum, LOCAL_CRATE);
3188 attr::contains_name(tcx.hir().krate_attrs(), "panic_runtime")
3190 providers.is_compiler_builtins = |tcx, cnum| {
3191 assert_eq!(cnum, LOCAL_CRATE);
3192 attr::contains_name(tcx.hir().krate_attrs(), "compiler_builtins")