1 //! type context book-keeping
3 use dep_graph::DepGraph;
4 use dep_graph::{DepNode, DepConstructor};
5 use errors::DiagnosticBuilder;
7 use session::config::{BorrowckMode, OutputFilenames};
8 use session::config::CrateType;
10 use hir::{TraitCandidate, HirId, ItemKind, ItemLocalId, Node};
11 use hir::def::{Def, Export};
12 use hir::def_id::{CrateNum, DefId, DefIndex, LOCAL_CRATE};
13 use hir::map as hir_map;
14 use hir::map::DefPathHash;
15 use lint::{self, Lint};
16 use ich::{StableHashingContext, NodeIdHashingMode};
17 use infer::canonical::{Canonical, CanonicalVarInfo, CanonicalVarInfos};
18 use infer::outlives::free_region_map::FreeRegionMap;
19 use middle::cstore::CrateStoreDyn;
20 use middle::cstore::EncodedMetadata;
21 use middle::lang_items;
22 use middle::resolve_lifetime::{self, ObjectLifetimeDefault};
23 use middle::stability;
24 use mir::{self, Mir, interpret, ProjectionKind};
25 use mir::interpret::Allocation;
26 use ty::subst::{Kind, Substs, Subst};
29 use traits::{Clause, Clauses, GoalKind, Goal, Goals};
30 use ty::{self, Ty, TypeAndMut};
31 use ty::{TyS, TyKind, List};
32 use ty::{AdtKind, AdtDef, ClosureSubsts, GeneratorSubsts, Region, Const};
33 use ty::{PolyFnSig, InferTy, ParamTy, ProjectionTy, ExistentialPredicate, Predicate};
35 use ty::{TyVar, TyVid, IntVar, IntVid, FloatVar, FloatVid};
37 use ty::GenericParamDefKind;
38 use ty::layout::{LayoutDetails, TargetDataLayout, VariantIdx};
41 use ty::subst::{UserSubsts, UnpackedKind};
42 use ty::{BoundVar, BindingMode};
43 use ty::CanonicalPolyFnSig;
44 use util::nodemap::{DefIdMap, DefIdSet, ItemLocalMap};
45 use util::nodemap::{FxHashMap, FxHashSet};
46 use rustc_data_structures::interner::HashInterner;
47 use smallvec::SmallVec;
48 use rustc_data_structures::stable_hasher::{HashStable, hash_stable_hashmap,
49 StableHasher, StableHasherResult,
51 use arena::{TypedArena, SyncDroplessArena};
52 use rustc_data_structures::indexed_vec::{Idx, IndexVec};
53 use rustc_data_structures::sync::{self, Lrc, Lock, WorkerLocal};
55 use std::borrow::Borrow;
56 use std::cmp::Ordering;
57 use std::collections::hash_map::{self, Entry};
58 use std::hash::{Hash, Hasher};
61 use std::ops::{Deref, Bound};
65 use std::marker::PhantomData;
66 use rustc_target::spec::abi;
67 use syntax::ast::{self, NodeId};
69 use syntax::source_map::MultiSpan;
70 use syntax::edition::Edition;
71 use syntax::feature_gate;
72 use syntax::symbol::{Symbol, keywords, InternedString};
77 pub struct AllArenas<'tcx> {
78 pub global: WorkerLocal<GlobalArenas<'tcx>>,
79 pub interner: SyncDroplessArena,
80 global_ctxt: Option<GlobalCtxt<'tcx>>,
83 impl<'tcx> AllArenas<'tcx> {
84 pub fn new() -> Self {
86 global: WorkerLocal::new(|_| GlobalArenas::default()),
87 interner: SyncDroplessArena::default(),
95 pub struct GlobalArenas<'tcx> {
97 layout: TypedArena<LayoutDetails>,
100 generics: TypedArena<ty::Generics>,
101 trait_def: TypedArena<ty::TraitDef>,
102 adt_def: TypedArena<ty::AdtDef>,
103 steal_mir: TypedArena<Steal<Mir<'tcx>>>,
104 mir: TypedArena<Mir<'tcx>>,
105 tables: TypedArena<ty::TypeckTables<'tcx>>,
107 const_allocs: TypedArena<interpret::Allocation>,
110 type InternedSet<'tcx, T> = Lock<FxHashMap<Interned<'tcx, T>, ()>>;
112 pub struct CtxtInterners<'tcx> {
113 /// The arena that types, regions, etc are allocated from
114 arena: &'tcx SyncDroplessArena,
116 /// Specifically use a speedy hash algorithm for these hash sets,
117 /// they're accessed quite often.
118 type_: InternedSet<'tcx, TyS<'tcx>>,
119 type_list: InternedSet<'tcx, List<Ty<'tcx>>>,
120 substs: InternedSet<'tcx, Substs<'tcx>>,
121 canonical_var_infos: InternedSet<'tcx, List<CanonicalVarInfo>>,
122 region: InternedSet<'tcx, RegionKind>,
123 existential_predicates: InternedSet<'tcx, List<ExistentialPredicate<'tcx>>>,
124 predicates: InternedSet<'tcx, List<Predicate<'tcx>>>,
125 const_: InternedSet<'tcx, Const<'tcx>>,
126 clauses: InternedSet<'tcx, List<Clause<'tcx>>>,
127 goal: InternedSet<'tcx, GoalKind<'tcx>>,
128 goal_list: InternedSet<'tcx, List<Goal<'tcx>>>,
129 projs: InternedSet<'tcx, List<ProjectionKind<'tcx>>>,
132 impl<'gcx: 'tcx, 'tcx> CtxtInterners<'tcx> {
133 fn new(arena: &'tcx SyncDroplessArena) -> CtxtInterners<'tcx> {
136 type_: Default::default(),
137 type_list: Default::default(),
138 substs: Default::default(),
139 region: Default::default(),
140 existential_predicates: Default::default(),
141 canonical_var_infos: Default::default(),
142 predicates: Default::default(),
143 const_: Default::default(),
144 clauses: Default::default(),
145 goal: Default::default(),
146 goal_list: Default::default(),
147 projs: Default::default(),
154 local: &CtxtInterners<'tcx>,
155 global: &CtxtInterners<'gcx>,
158 let flags = super::flags::FlagComputation::for_sty(&st);
160 // HACK(eddyb) Depend on flags being accurate to
161 // determine that all contents are in the global tcx.
162 // See comments on Lift for why we can't use that.
163 if flags.flags.intersects(ty::TypeFlags::KEEP_IN_LOCAL_TCX) {
164 local.type_.borrow_mut().intern(st, |st| {
165 let ty_struct = TyS {
168 outer_exclusive_binder: flags.outer_exclusive_binder,
171 // Make sure we don't end up with inference
172 // types/regions in the global interner
173 if local as *const _ as usize == global as *const _ as usize {
174 bug!("Attempted to intern `{:?}` which contains \
175 inference types/regions in the global type context",
179 Interned(local.arena.alloc(ty_struct))
182 global.type_.borrow_mut().intern(st, |st| {
183 let ty_struct = TyS {
186 outer_exclusive_binder: flags.outer_exclusive_binder,
189 // This is safe because all the types the ty_struct can point to
190 // already is in the global arena
191 let ty_struct: TyS<'gcx> = unsafe {
192 mem::transmute(ty_struct)
195 Interned(global.arena.alloc(ty_struct))
201 pub struct CommonTypes<'tcx> {
222 pub re_empty: Region<'tcx>,
223 pub re_static: Region<'tcx>,
224 pub re_erased: Region<'tcx>,
227 pub struct LocalTableInContext<'a, V: 'a> {
228 local_id_root: Option<DefId>,
229 data: &'a ItemLocalMap<V>
232 /// Validate that the given HirId (respectively its `local_id` part) can be
233 /// safely used as a key in the tables of a TypeckTable. For that to be
234 /// the case, the HirId must have the same `owner` as all the other IDs in
235 /// this table (signified by `local_id_root`). Otherwise the HirId
236 /// would be in a different frame of reference and using its `local_id`
237 /// would result in lookup errors, or worse, in silently wrong data being
239 fn validate_hir_id_for_typeck_tables(local_id_root: Option<DefId>,
242 if cfg!(debug_assertions) {
243 if let Some(local_id_root) = local_id_root {
244 if hir_id.owner != local_id_root.index {
245 ty::tls::with(|tcx| {
246 let node_id = tcx.hir().hir_to_node_id(hir_id);
248 bug!("node {} with HirId::owner {:?} cannot be placed in \
249 TypeckTables with local_id_root {:?}",
250 tcx.hir().node_to_string(node_id),
251 DefId::local(hir_id.owner),
256 // We use "Null Object" TypeckTables in some of the analysis passes.
257 // These are just expected to be empty and their `local_id_root` is
258 // `None`. Therefore we cannot verify whether a given `HirId` would
259 // be a valid key for the given table. Instead we make sure that
260 // nobody tries to write to such a Null Object table.
262 bug!("access to invalid TypeckTables")
268 impl<'a, V> LocalTableInContext<'a, V> {
269 pub fn contains_key(&self, id: hir::HirId) -> bool {
270 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
271 self.data.contains_key(&id.local_id)
274 pub fn get(&self, id: hir::HirId) -> Option<&V> {
275 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
276 self.data.get(&id.local_id)
279 pub fn iter(&self) -> hash_map::Iter<'_, hir::ItemLocalId, V> {
284 impl<'a, V> ::std::ops::Index<hir::HirId> for LocalTableInContext<'a, V> {
287 fn index(&self, key: hir::HirId) -> &V {
288 self.get(key).expect("LocalTableInContext: key not found")
292 pub struct LocalTableInContextMut<'a, V: 'a> {
293 local_id_root: Option<DefId>,
294 data: &'a mut ItemLocalMap<V>
297 impl<'a, V> LocalTableInContextMut<'a, V> {
298 pub fn get_mut(&mut self, id: hir::HirId) -> Option<&mut V> {
299 validate_hir_id_for_typeck_tables(self.local_id_root, id, true);
300 self.data.get_mut(&id.local_id)
303 pub fn entry(&mut self, id: hir::HirId) -> Entry<'_, hir::ItemLocalId, V> {
304 validate_hir_id_for_typeck_tables(self.local_id_root, id, true);
305 self.data.entry(id.local_id)
308 pub fn insert(&mut self, id: hir::HirId, val: V) -> Option<V> {
309 validate_hir_id_for_typeck_tables(self.local_id_root, id, true);
310 self.data.insert(id.local_id, val)
313 pub fn remove(&mut self, id: hir::HirId) -> Option<V> {
314 validate_hir_id_for_typeck_tables(self.local_id_root, id, true);
315 self.data.remove(&id.local_id)
319 #[derive(RustcEncodable, RustcDecodable, Debug)]
320 pub struct TypeckTables<'tcx> {
321 /// The HirId::owner all ItemLocalIds in this table are relative to.
322 pub local_id_root: Option<DefId>,
324 /// Resolved definitions for `<T>::X` associated paths and
325 /// method calls, including those of overloaded operators.
326 type_dependent_defs: ItemLocalMap<Def>,
328 /// Resolved field indices for field accesses in expressions (`S { field }`, `obj.field`)
329 /// or patterns (`S { field }`). The index is often useful by itself, but to learn more
330 /// about the field you also need definition of the variant to which the field
331 /// belongs, but it may not exist if it's a tuple field (`tuple.0`).
332 field_indices: ItemLocalMap<usize>,
334 /// Stores the types for various nodes in the AST. Note that this table
335 /// is not guaranteed to be populated until after typeck. See
336 /// typeck::check::fn_ctxt for details.
337 node_types: ItemLocalMap<Ty<'tcx>>,
339 /// Stores the type parameters which were substituted to obtain the type
340 /// of this node. This only applies to nodes that refer to entities
341 /// parameterized by type parameters, such as generic fns, types, or
343 node_substs: ItemLocalMap<&'tcx Substs<'tcx>>,
345 /// This will either store the canonicalized types provided by the user
346 /// or the substitutions that the user explicitly gave (if any) attached
347 /// to `id`. These will not include any inferred values. The canonical form
348 /// is used to capture things like `_` or other unspecified values.
350 /// For example, if the user wrote `foo.collect::<Vec<_>>()`, then the
351 /// canonical substitutions would include only `for<X> { Vec<X> }`.
353 /// See also `AscribeUserType` statement in MIR.
354 user_provided_types: ItemLocalMap<CanonicalUserTypeAnnotation<'tcx>>,
356 /// Stores the canonicalized types provided by the user. See also
357 /// `AscribeUserType` statement in MIR.
358 pub user_provided_sigs: DefIdMap<CanonicalPolyFnSig<'tcx>>,
360 adjustments: ItemLocalMap<Vec<ty::adjustment::Adjustment<'tcx>>>,
362 /// Stores the actual binding mode for all instances of hir::BindingAnnotation.
363 pat_binding_modes: ItemLocalMap<BindingMode>,
365 /// Stores the types which were implicitly dereferenced in pattern binding modes
366 /// for later usage in HAIR lowering. For example,
369 /// match &&Some(5i32) {
374 /// leads to a `vec![&&Option<i32>, &Option<i32>]`. Empty vectors are not stored.
377 /// https://github.com/rust-lang/rfcs/blob/master/text/2005-match-ergonomics.md#definitions
378 pat_adjustments: ItemLocalMap<Vec<Ty<'tcx>>>,
381 pub upvar_capture_map: ty::UpvarCaptureMap<'tcx>,
383 /// Records the reasons that we picked the kind of each closure;
384 /// not all closures are present in the map.
385 closure_kind_origins: ItemLocalMap<(Span, ast::Name)>,
387 /// For each fn, records the "liberated" types of its arguments
388 /// and return type. Liberated means that all bound regions
389 /// (including late-bound regions) are replaced with free
390 /// equivalents. This table is not used in codegen (since regions
391 /// are erased there) and hence is not serialized to metadata.
392 liberated_fn_sigs: ItemLocalMap<ty::FnSig<'tcx>>,
394 /// For each FRU expression, record the normalized types of the fields
395 /// of the struct - this is needed because it is non-trivial to
396 /// normalize while preserving regions. This table is used only in
397 /// MIR construction and hence is not serialized to metadata.
398 fru_field_types: ItemLocalMap<Vec<Ty<'tcx>>>,
400 /// Maps a cast expression to its kind. This is keyed on the
401 /// *from* expression of the cast, not the cast itself.
402 cast_kinds: ItemLocalMap<ty::cast::CastKind>,
404 /// Set of trait imports actually used in the method resolution.
405 /// This is used for warning unused imports. During type
406 /// checking, this `Lrc` should not be cloned: it must have a ref-count
407 /// of 1 so that we can insert things into the set mutably.
408 pub used_trait_imports: Lrc<DefIdSet>,
410 /// If any errors occurred while type-checking this body,
411 /// this field will be set to `true`.
412 pub tainted_by_errors: bool,
414 /// Stores the free-region relationships that were deduced from
415 /// its where clauses and parameter types. These are then
416 /// read-again by borrowck.
417 pub free_region_map: FreeRegionMap<'tcx>,
419 /// All the existential types that are restricted to concrete types
421 pub concrete_existential_types: FxHashMap<DefId, Ty<'tcx>>,
424 impl<'tcx> TypeckTables<'tcx> {
425 pub fn empty(local_id_root: Option<DefId>) -> TypeckTables<'tcx> {
428 type_dependent_defs: Default::default(),
429 field_indices: Default::default(),
430 user_provided_types: Default::default(),
431 user_provided_sigs: Default::default(),
432 node_types: Default::default(),
433 node_substs: Default::default(),
434 adjustments: Default::default(),
435 pat_binding_modes: Default::default(),
436 pat_adjustments: Default::default(),
437 upvar_capture_map: Default::default(),
438 closure_kind_origins: Default::default(),
439 liberated_fn_sigs: Default::default(),
440 fru_field_types: Default::default(),
441 cast_kinds: Default::default(),
442 used_trait_imports: Lrc::new(Default::default()),
443 tainted_by_errors: false,
444 free_region_map: Default::default(),
445 concrete_existential_types: Default::default(),
449 /// Returns the final resolution of a `QPath` in an `Expr` or `Pat` node.
450 pub fn qpath_def(&self, qpath: &hir::QPath, id: hir::HirId) -> Def {
452 hir::QPath::Resolved(_, ref path) => path.def,
453 hir::QPath::TypeRelative(..) => {
454 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
455 self.type_dependent_defs.get(&id.local_id).cloned().unwrap_or(Def::Err)
460 pub fn type_dependent_defs(&self) -> LocalTableInContext<'_, Def> {
461 LocalTableInContext {
462 local_id_root: self.local_id_root,
463 data: &self.type_dependent_defs
467 pub fn type_dependent_defs_mut(&mut self) -> LocalTableInContextMut<'_, Def> {
468 LocalTableInContextMut {
469 local_id_root: self.local_id_root,
470 data: &mut self.type_dependent_defs
474 pub fn field_indices(&self) -> LocalTableInContext<'_, usize> {
475 LocalTableInContext {
476 local_id_root: self.local_id_root,
477 data: &self.field_indices
481 pub fn field_indices_mut(&mut self) -> LocalTableInContextMut<'_, usize> {
482 LocalTableInContextMut {
483 local_id_root: self.local_id_root,
484 data: &mut self.field_indices
488 pub fn user_provided_types(
490 ) -> LocalTableInContext<'_, CanonicalUserTypeAnnotation<'tcx>> {
491 LocalTableInContext {
492 local_id_root: self.local_id_root,
493 data: &self.user_provided_types
497 pub fn user_provided_types_mut(
499 ) -> LocalTableInContextMut<'_, CanonicalUserTypeAnnotation<'tcx>> {
500 LocalTableInContextMut {
501 local_id_root: self.local_id_root,
502 data: &mut self.user_provided_types
506 pub fn node_types(&self) -> LocalTableInContext<'_, Ty<'tcx>> {
507 LocalTableInContext {
508 local_id_root: self.local_id_root,
509 data: &self.node_types
513 pub fn node_types_mut(&mut self) -> LocalTableInContextMut<'_, Ty<'tcx>> {
514 LocalTableInContextMut {
515 local_id_root: self.local_id_root,
516 data: &mut self.node_types
520 pub fn node_id_to_type(&self, id: hir::HirId) -> Ty<'tcx> {
521 self.node_id_to_type_opt(id).unwrap_or_else(||
522 bug!("node_id_to_type: no type for node `{}`",
524 let id = tcx.hir().hir_to_node_id(id);
525 tcx.hir().node_to_string(id)
530 pub fn node_id_to_type_opt(&self, id: hir::HirId) -> Option<Ty<'tcx>> {
531 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
532 self.node_types.get(&id.local_id).cloned()
535 pub fn node_substs_mut(&mut self) -> LocalTableInContextMut<'_, &'tcx Substs<'tcx>> {
536 LocalTableInContextMut {
537 local_id_root: self.local_id_root,
538 data: &mut self.node_substs
542 pub fn node_substs(&self, id: hir::HirId) -> &'tcx Substs<'tcx> {
543 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
544 self.node_substs.get(&id.local_id).cloned().unwrap_or_else(|| Substs::empty())
547 pub fn node_substs_opt(&self, id: hir::HirId) -> Option<&'tcx Substs<'tcx>> {
548 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
549 self.node_substs.get(&id.local_id).cloned()
552 // Returns the type of a pattern as a monotype. Like @expr_ty, this function
553 // doesn't provide type parameter substitutions.
554 pub fn pat_ty(&self, pat: &hir::Pat) -> Ty<'tcx> {
555 self.node_id_to_type(pat.hir_id)
558 pub fn pat_ty_opt(&self, pat: &hir::Pat) -> Option<Ty<'tcx>> {
559 self.node_id_to_type_opt(pat.hir_id)
562 // Returns the type of an expression as a monotype.
564 // NB (1): This is the PRE-ADJUSTMENT TYPE for the expression. That is, in
565 // some cases, we insert `Adjustment` annotations such as auto-deref or
566 // auto-ref. The type returned by this function does not consider such
567 // adjustments. See `expr_ty_adjusted()` instead.
569 // NB (2): This type doesn't provide type parameter substitutions; e.g., if you
570 // ask for the type of "id" in "id(3)", it will return "fn(&isize) -> isize"
571 // instead of "fn(ty) -> T with T = isize".
572 pub fn expr_ty(&self, expr: &hir::Expr) -> Ty<'tcx> {
573 self.node_id_to_type(expr.hir_id)
576 pub fn expr_ty_opt(&self, expr: &hir::Expr) -> Option<Ty<'tcx>> {
577 self.node_id_to_type_opt(expr.hir_id)
580 pub fn adjustments(&self) -> LocalTableInContext<'_, Vec<ty::adjustment::Adjustment<'tcx>>> {
581 LocalTableInContext {
582 local_id_root: self.local_id_root,
583 data: &self.adjustments
587 pub fn adjustments_mut(&mut self)
588 -> LocalTableInContextMut<'_, Vec<ty::adjustment::Adjustment<'tcx>>> {
589 LocalTableInContextMut {
590 local_id_root: self.local_id_root,
591 data: &mut self.adjustments
595 pub fn expr_adjustments(&self, expr: &hir::Expr)
596 -> &[ty::adjustment::Adjustment<'tcx>] {
597 validate_hir_id_for_typeck_tables(self.local_id_root, expr.hir_id, false);
598 self.adjustments.get(&expr.hir_id.local_id).map_or(&[], |a| &a[..])
601 /// Returns the type of `expr`, considering any `Adjustment`
602 /// entry recorded for that expression.
603 pub fn expr_ty_adjusted(&self, expr: &hir::Expr) -> Ty<'tcx> {
604 self.expr_adjustments(expr)
606 .map_or_else(|| self.expr_ty(expr), |adj| adj.target)
609 pub fn expr_ty_adjusted_opt(&self, expr: &hir::Expr) -> Option<Ty<'tcx>> {
610 self.expr_adjustments(expr)
612 .map(|adj| adj.target)
613 .or_else(|| self.expr_ty_opt(expr))
616 pub fn is_method_call(&self, expr: &hir::Expr) -> bool {
617 // Only paths and method calls/overloaded operators have
618 // entries in type_dependent_defs, ignore the former here.
619 if let hir::ExprKind::Path(_) = expr.node {
623 match self.type_dependent_defs().get(expr.hir_id) {
624 Some(&Def::Method(_)) => true,
629 pub fn pat_binding_modes(&self) -> LocalTableInContext<'_, BindingMode> {
630 LocalTableInContext {
631 local_id_root: self.local_id_root,
632 data: &self.pat_binding_modes
636 pub fn pat_binding_modes_mut(&mut self)
637 -> LocalTableInContextMut<'_, BindingMode> {
638 LocalTableInContextMut {
639 local_id_root: self.local_id_root,
640 data: &mut self.pat_binding_modes
644 pub fn pat_adjustments(&self) -> LocalTableInContext<'_, Vec<Ty<'tcx>>> {
645 LocalTableInContext {
646 local_id_root: self.local_id_root,
647 data: &self.pat_adjustments,
651 pub fn pat_adjustments_mut(&mut self)
652 -> LocalTableInContextMut<'_, Vec<Ty<'tcx>>> {
653 LocalTableInContextMut {
654 local_id_root: self.local_id_root,
655 data: &mut self.pat_adjustments,
659 pub fn upvar_capture(&self, upvar_id: ty::UpvarId) -> ty::UpvarCapture<'tcx> {
660 self.upvar_capture_map[&upvar_id]
663 pub fn closure_kind_origins(&self) -> LocalTableInContext<'_, (Span, ast::Name)> {
664 LocalTableInContext {
665 local_id_root: self.local_id_root,
666 data: &self.closure_kind_origins
670 pub fn closure_kind_origins_mut(&mut self) -> LocalTableInContextMut<'_, (Span, ast::Name)> {
671 LocalTableInContextMut {
672 local_id_root: self.local_id_root,
673 data: &mut self.closure_kind_origins
677 pub fn liberated_fn_sigs(&self) -> LocalTableInContext<'_, ty::FnSig<'tcx>> {
678 LocalTableInContext {
679 local_id_root: self.local_id_root,
680 data: &self.liberated_fn_sigs
684 pub fn liberated_fn_sigs_mut(&mut self) -> LocalTableInContextMut<'_, ty::FnSig<'tcx>> {
685 LocalTableInContextMut {
686 local_id_root: self.local_id_root,
687 data: &mut self.liberated_fn_sigs
691 pub fn fru_field_types(&self) -> LocalTableInContext<'_, Vec<Ty<'tcx>>> {
692 LocalTableInContext {
693 local_id_root: self.local_id_root,
694 data: &self.fru_field_types
698 pub fn fru_field_types_mut(&mut self) -> LocalTableInContextMut<'_, Vec<Ty<'tcx>>> {
699 LocalTableInContextMut {
700 local_id_root: self.local_id_root,
701 data: &mut self.fru_field_types
705 pub fn cast_kinds(&self) -> LocalTableInContext<'_, ty::cast::CastKind> {
706 LocalTableInContext {
707 local_id_root: self.local_id_root,
708 data: &self.cast_kinds
712 pub fn cast_kinds_mut(&mut self) -> LocalTableInContextMut<'_, ty::cast::CastKind> {
713 LocalTableInContextMut {
714 local_id_root: self.local_id_root,
715 data: &mut self.cast_kinds
720 impl<'a, 'gcx> HashStable<StableHashingContext<'a>> for TypeckTables<'gcx> {
721 fn hash_stable<W: StableHasherResult>(&self,
722 hcx: &mut StableHashingContext<'a>,
723 hasher: &mut StableHasher<W>) {
724 let ty::TypeckTables {
726 ref type_dependent_defs,
728 ref user_provided_types,
729 ref user_provided_sigs,
733 ref pat_binding_modes,
735 ref upvar_capture_map,
736 ref closure_kind_origins,
737 ref liberated_fn_sigs,
742 ref used_trait_imports,
745 ref concrete_existential_types,
748 hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| {
749 type_dependent_defs.hash_stable(hcx, hasher);
750 field_indices.hash_stable(hcx, hasher);
751 user_provided_types.hash_stable(hcx, hasher);
752 user_provided_sigs.hash_stable(hcx, hasher);
753 node_types.hash_stable(hcx, hasher);
754 node_substs.hash_stable(hcx, hasher);
755 adjustments.hash_stable(hcx, hasher);
756 pat_binding_modes.hash_stable(hcx, hasher);
757 pat_adjustments.hash_stable(hcx, hasher);
758 hash_stable_hashmap(hcx, hasher, upvar_capture_map, |up_var_id, hcx| {
765 local_id_root.expect("trying to hash invalid TypeckTables");
767 let var_owner_def_id = DefId {
768 krate: local_id_root.krate,
769 index: var_path.hir_id.owner,
771 let closure_def_id = DefId {
772 krate: local_id_root.krate,
773 index: closure_expr_id.to_def_id().index,
775 (hcx.def_path_hash(var_owner_def_id),
776 var_path.hir_id.local_id,
777 hcx.def_path_hash(closure_def_id))
780 closure_kind_origins.hash_stable(hcx, hasher);
781 liberated_fn_sigs.hash_stable(hcx, hasher);
782 fru_field_types.hash_stable(hcx, hasher);
783 cast_kinds.hash_stable(hcx, hasher);
784 used_trait_imports.hash_stable(hcx, hasher);
785 tainted_by_errors.hash_stable(hcx, hasher);
786 free_region_map.hash_stable(hcx, hasher);
787 concrete_existential_types.hash_stable(hcx, hasher);
793 pub struct UserTypeAnnotationIndex {
794 DEBUG_FORMAT = "UserTypeAnnotation({})",
795 const START_INDEX = 0,
799 /// Mapping of type annotation indices to canonical user type annotations.
800 pub type CanonicalUserTypeAnnotations<'tcx> =
801 IndexVec<UserTypeAnnotationIndex, (Span, CanonicalUserTypeAnnotation<'tcx>)>;
803 /// Canonicalized user type annotation.
804 pub type CanonicalUserTypeAnnotation<'gcx> = Canonical<'gcx, UserTypeAnnotation<'gcx>>;
806 impl CanonicalUserTypeAnnotation<'gcx> {
807 /// Returns `true` if this represents a substitution of the form `[?0, ?1, ?2]`,
808 /// i.e. each thing is mapped to a canonical variable with the same index.
809 pub fn is_identity(&self) -> bool {
811 UserTypeAnnotation::Ty(_) => false,
812 UserTypeAnnotation::TypeOf(_, user_substs) => {
813 if user_substs.user_self_ty.is_some() {
817 user_substs.substs.iter().zip(BoundVar::new(0)..).all(|(kind, cvar)| {
818 match kind.unpack() {
819 UnpackedKind::Type(ty) => match ty.sty {
820 ty::Bound(debruijn, b) => {
821 // We only allow a `ty::INNERMOST` index in substitutions.
822 assert_eq!(debruijn, ty::INNERMOST);
828 UnpackedKind::Lifetime(r) => match r {
829 ty::ReLateBound(debruijn, br) => {
830 // We only allow a `ty::INNERMOST` index in substitutions.
831 assert_eq!(*debruijn, ty::INNERMOST);
832 cvar == br.assert_bound_var()
843 /// A user-given type annotation attached to a constant. These arise
844 /// from constants that are named via paths, like `Foo::<A>::new` and
846 #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
847 pub enum UserTypeAnnotation<'tcx> {
850 /// The canonical type is the result of `type_of(def_id)` with the
851 /// given substitutions applied.
852 TypeOf(DefId, UserSubsts<'tcx>),
855 EnumTypeFoldableImpl! {
856 impl<'tcx> TypeFoldable<'tcx> for UserTypeAnnotation<'tcx> {
857 (UserTypeAnnotation::Ty)(ty),
858 (UserTypeAnnotation::TypeOf)(def, substs),
863 impl<'a, 'tcx> Lift<'tcx> for UserTypeAnnotation<'a> {
864 type Lifted = UserTypeAnnotation<'tcx>;
865 (UserTypeAnnotation::Ty)(ty),
866 (UserTypeAnnotation::TypeOf)(def, substs),
870 impl<'tcx> CommonTypes<'tcx> {
871 fn new(interners: &CtxtInterners<'tcx>) -> CommonTypes<'tcx> {
872 let mk = |sty| CtxtInterners::intern_ty(interners, interners, sty);
873 let mk_region = |r| {
874 interners.region.borrow_mut().intern(r, |r| {
875 Interned(interners.arena.alloc(r))
880 unit: mk(Tuple(List::empty())),
885 isize: mk(Int(ast::IntTy::Isize)),
886 i8: mk(Int(ast::IntTy::I8)),
887 i16: mk(Int(ast::IntTy::I16)),
888 i32: mk(Int(ast::IntTy::I32)),
889 i64: mk(Int(ast::IntTy::I64)),
890 i128: mk(Int(ast::IntTy::I128)),
891 usize: mk(Uint(ast::UintTy::Usize)),
892 u8: mk(Uint(ast::UintTy::U8)),
893 u16: mk(Uint(ast::UintTy::U16)),
894 u32: mk(Uint(ast::UintTy::U32)),
895 u64: mk(Uint(ast::UintTy::U64)),
896 u128: mk(Uint(ast::UintTy::U128)),
897 f32: mk(Float(ast::FloatTy::F32)),
898 f64: mk(Float(ast::FloatTy::F64)),
900 re_empty: mk_region(RegionKind::ReEmpty),
901 re_static: mk_region(RegionKind::ReStatic),
902 re_erased: mk_region(RegionKind::ReErased),
907 // This struct contains information regarding the `ReFree(FreeRegion)` corresponding to a lifetime
910 pub struct FreeRegionInfo {
911 // def id corresponding to FreeRegion
913 // the bound region corresponding to FreeRegion
914 pub boundregion: ty::BoundRegion,
915 // checks if bound region is in Impl Item
916 pub is_impl_item: bool,
919 /// The central data structure of the compiler. It stores references
920 /// to the various **arenas** and also houses the results of the
921 /// various **compiler queries** that have been performed. See the
922 /// [rustc guide] for more details.
924 /// [rustc guide]: https://rust-lang.github.io/rustc-guide/ty.html
925 #[derive(Copy, Clone)]
926 pub struct TyCtxt<'a, 'gcx: 'tcx, 'tcx: 'a> {
927 gcx: &'gcx GlobalCtxt<'gcx>,
928 interners: &'tcx CtxtInterners<'tcx>,
929 dummy: PhantomData<&'a ()>,
932 impl<'gcx> Deref for TyCtxt<'_, 'gcx, '_> {
933 type Target = &'gcx GlobalCtxt<'gcx>;
935 fn deref(&self) -> &Self::Target {
940 pub struct GlobalCtxt<'tcx> {
941 global_arenas: &'tcx WorkerLocal<GlobalArenas<'tcx>>,
942 global_interners: CtxtInterners<'tcx>,
944 cstore: &'tcx CrateStoreDyn,
946 pub sess: &'tcx Session,
948 pub dep_graph: DepGraph,
950 /// Common types, pre-interned for your convenience.
951 pub types: CommonTypes<'tcx>,
953 /// Map indicating what traits are in scope for places where this
954 /// is relevant; generated by resolve.
955 trait_map: FxHashMap<DefIndex,
956 Lrc<FxHashMap<ItemLocalId,
957 Lrc<StableVec<TraitCandidate>>>>>,
959 /// Export map produced by name resolution.
960 export_map: FxHashMap<DefId, Lrc<Vec<Export>>>,
962 hir_map: hir_map::Map<'tcx>,
964 /// A map from DefPathHash -> DefId. Includes DefIds from the local crate
965 /// as well as all upstream crates. Only populated in incremental mode.
966 pub def_path_hash_to_def_id: Option<FxHashMap<DefPathHash, DefId>>,
968 pub queries: query::Queries<'tcx>,
970 // Records the free variables referenced by every closure
971 // expression. Do not track deps for this, just recompute it from
972 // scratch every time.
973 freevars: FxHashMap<DefId, Lrc<Vec<hir::Freevar>>>,
975 maybe_unused_trait_imports: FxHashSet<DefId>,
976 maybe_unused_extern_crates: Vec<(DefId, Span)>,
977 /// Extern prelude entries. The value is `true` if the entry was introduced
978 /// via `extern crate` item and not `--extern` option or compiler built-in.
979 pub extern_prelude: FxHashMap<ast::Name, bool>,
981 // Internal cache for metadata decoding. No need to track deps on this.
982 pub rcache: Lock<FxHashMap<ty::CReaderCacheKey, Ty<'tcx>>>,
984 /// Caches the results of trait selection. This cache is used
985 /// for things that do not have to do with the parameters in scope.
986 pub selection_cache: traits::SelectionCache<'tcx>,
988 /// Caches the results of trait evaluation. This cache is used
989 /// for things that do not have to do with the parameters in scope.
990 /// Merge this with `selection_cache`?
991 pub evaluation_cache: traits::EvaluationCache<'tcx>,
993 /// The definite name of the current crate after taking into account
994 /// attributes, commandline parameters, etc.
995 pub crate_name: Symbol,
997 /// Data layout specification for the current target.
998 pub data_layout: TargetDataLayout,
1000 stability_interner: Lock<FxHashMap<&'tcx attr::Stability, ()>>,
1002 /// Stores the value of constants (and deduplicates the actual memory)
1003 allocation_interner: Lock<FxHashMap<&'tcx Allocation, ()>>,
1005 pub alloc_map: Lock<interpret::AllocMap<'tcx>>,
1007 layout_interner: Lock<FxHashMap<&'tcx LayoutDetails, ()>>,
1009 /// A general purpose channel to throw data out the back towards LLVM worker
1012 /// This is intended to only get used during the codegen phase of the compiler
1013 /// when satisfying the query for a particular codegen unit. Internally in
1014 /// the query it'll send data along this channel to get processed later.
1015 pub tx_to_llvm_workers: Lock<mpsc::Sender<Box<dyn Any + Send>>>,
1017 output_filenames: Arc<OutputFilenames>,
1020 impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
1021 /// Get the global TyCtxt.
1023 pub fn global_tcx(self) -> TyCtxt<'gcx, 'gcx, 'gcx> {
1026 interners: &self.gcx.global_interners,
1032 pub fn hir(self) -> &'a hir_map::Map<'gcx> {
1036 pub fn alloc_generics(self, generics: ty::Generics) -> &'gcx ty::Generics {
1037 self.global_arenas.generics.alloc(generics)
1040 pub fn alloc_steal_mir(self, mir: Mir<'gcx>) -> &'gcx Steal<Mir<'gcx>> {
1041 self.global_arenas.steal_mir.alloc(Steal::new(mir))
1044 pub fn alloc_mir(self, mir: Mir<'gcx>) -> &'gcx Mir<'gcx> {
1045 self.global_arenas.mir.alloc(mir)
1048 pub fn alloc_tables(self, tables: ty::TypeckTables<'gcx>) -> &'gcx ty::TypeckTables<'gcx> {
1049 self.global_arenas.tables.alloc(tables)
1052 pub fn alloc_trait_def(self, def: ty::TraitDef) -> &'gcx ty::TraitDef {
1053 self.global_arenas.trait_def.alloc(def)
1056 pub fn alloc_adt_def(self,
1059 variants: IndexVec<VariantIdx, ty::VariantDef>,
1061 -> &'gcx ty::AdtDef {
1062 let def = ty::AdtDef::new(self, did, kind, variants, repr);
1063 self.global_arenas.adt_def.alloc(def)
1066 pub fn alloc_byte_array(self, bytes: &[u8]) -> &'gcx [u8] {
1067 if bytes.is_empty() {
1070 self.global_interners.arena.alloc_slice(bytes)
1074 pub fn alloc_const_slice(self, values: &[&'tcx ty::Const<'tcx>])
1075 -> &'tcx [&'tcx ty::Const<'tcx>] {
1076 if values.is_empty() {
1079 self.interners.arena.alloc_slice(values)
1083 pub fn alloc_name_const_slice(self, values: &[(ast::Name, &'tcx ty::Const<'tcx>)])
1084 -> &'tcx [(ast::Name, &'tcx ty::Const<'tcx>)] {
1085 if values.is_empty() {
1088 self.interners.arena.alloc_slice(values)
1092 pub fn intern_const_alloc(
1095 ) -> &'gcx Allocation {
1096 self.allocation_interner.borrow_mut().intern(alloc, |alloc| {
1097 self.global_arenas.const_allocs.alloc(alloc)
1101 /// Allocates a byte or string literal for `mir::interpret`, read-only
1102 pub fn allocate_bytes(self, bytes: &[u8]) -> interpret::AllocId {
1103 // create an allocation that just contains these bytes
1104 let alloc = interpret::Allocation::from_byte_aligned_bytes(bytes, ());
1105 let alloc = self.intern_const_alloc(alloc);
1106 self.alloc_map.lock().allocate(alloc)
1109 pub fn intern_stability(self, stab: attr::Stability) -> &'gcx attr::Stability {
1110 self.stability_interner.borrow_mut().intern(stab, |stab| {
1111 self.global_interners.arena.alloc(stab)
1115 pub fn intern_layout(self, layout: LayoutDetails) -> &'gcx LayoutDetails {
1116 self.layout_interner.borrow_mut().intern(layout, |layout| {
1117 self.global_arenas.layout.alloc(layout)
1121 /// Returns a range of the start/end indices specified with the
1122 /// `rustc_layout_scalar_valid_range` attribute.
1123 pub fn layout_scalar_valid_range(self, def_id: DefId) -> (Bound<u128>, Bound<u128>) {
1124 let attrs = self.get_attrs(def_id);
1126 let attr = match attrs.iter().find(|a| a.check_name(name)) {
1128 None => return Bound::Unbounded,
1130 for meta in attr.meta_item_list().expect("rustc_layout_scalar_valid_range takes args") {
1131 match meta.literal().expect("attribute takes lit").node {
1132 ast::LitKind::Int(a, _) => return Bound::Included(a),
1133 _ => span_bug!(attr.span, "rustc_layout_scalar_valid_range expects int arg"),
1136 span_bug!(attr.span, "no arguments to `rustc_layout_scalar_valid_range` attribute");
1138 (get("rustc_layout_scalar_valid_range_start"), get("rustc_layout_scalar_valid_range_end"))
1141 pub fn lift<T: ?Sized + Lift<'tcx>>(self, value: &T) -> Option<T::Lifted> {
1142 value.lift_to_tcx(self)
1145 /// Like lift, but only tries in the global tcx.
1146 pub fn lift_to_global<T: ?Sized + Lift<'gcx>>(self, value: &T) -> Option<T::Lifted> {
1147 value.lift_to_tcx(self.global_tcx())
1150 /// Returns true if self is the same as self.global_tcx().
1151 fn is_global(self) -> bool {
1152 let local = self.interners as *const _;
1153 let global = &self.global_interners as *const _;
1154 local as usize == global as usize
1157 /// Create a type context and call the closure with a `TyCtxt` reference
1158 /// to the context. The closure enforces that the type context and any interned
1159 /// value (types, substs, etc.) can only be used while `ty::tls` has a valid
1160 /// reference to the context, to allow formatting values that need it.
1161 pub fn create_and_enter<F, R>(s: &'tcx Session,
1162 cstore: &'tcx CrateStoreDyn,
1163 local_providers: ty::query::Providers<'tcx>,
1164 extern_providers: ty::query::Providers<'tcx>,
1165 arenas: &'tcx mut AllArenas<'tcx>,
1166 resolutions: ty::Resolutions,
1167 hir: hir_map::Map<'tcx>,
1168 on_disk_query_result_cache: query::OnDiskCache<'tcx>,
1170 tx: mpsc::Sender<Box<dyn Any + Send>>,
1171 output_filenames: &OutputFilenames,
1173 where F: for<'b> FnOnce(TyCtxt<'b, 'tcx, 'tcx>) -> R
1175 let data_layout = TargetDataLayout::parse(&s.target.target).unwrap_or_else(|err| {
1178 let interners = CtxtInterners::new(&arenas.interner);
1179 let common_types = CommonTypes::new(&interners);
1180 let dep_graph = hir.dep_graph.clone();
1181 let max_cnum = cstore.crates_untracked().iter().map(|c| c.as_usize()).max().unwrap_or(0);
1182 let mut providers = IndexVec::from_elem_n(extern_providers, max_cnum + 1);
1183 providers[LOCAL_CRATE] = local_providers;
1185 let def_path_hash_to_def_id = if s.opts.build_dep_graph() {
1186 let upstream_def_path_tables: Vec<(CrateNum, Lrc<_>)> = cstore
1189 .map(|&cnum| (cnum, cstore.def_path_table(cnum)))
1192 let def_path_tables = || {
1193 upstream_def_path_tables
1195 .map(|&(cnum, ref rc)| (cnum, &**rc))
1196 .chain(iter::once((LOCAL_CRATE, hir.definitions().def_path_table())))
1199 // Precompute the capacity of the hashmap so we don't have to
1200 // re-allocate when populating it.
1201 let capacity = def_path_tables().map(|(_, t)| t.size()).sum::<usize>();
1203 let mut map: FxHashMap<_, _> = FxHashMap::with_capacity_and_hasher(
1205 ::std::default::Default::default()
1208 for (cnum, def_path_table) in def_path_tables() {
1209 def_path_table.add_def_path_hashes_to(cnum, &mut map);
1217 let mut trait_map: FxHashMap<_, Lrc<FxHashMap<_, _>>> = FxHashMap::default();
1218 for (k, v) in resolutions.trait_map {
1219 let hir_id = hir.node_to_hir_id(k);
1220 let map = trait_map.entry(hir_id.owner).or_default();
1221 Lrc::get_mut(map).unwrap()
1222 .insert(hir_id.local_id,
1223 Lrc::new(StableVec::new(v)));
1226 arenas.global_ctxt = Some(GlobalCtxt {
1229 global_arenas: &arenas.global,
1230 global_interners: interners,
1232 types: common_types,
1234 export_map: resolutions.export_map.into_iter().map(|(k, v)| {
1237 freevars: resolutions.freevars.into_iter().map(|(k, v)| {
1238 (hir.local_def_id(k), Lrc::new(v))
1240 maybe_unused_trait_imports:
1241 resolutions.maybe_unused_trait_imports
1243 .map(|id| hir.local_def_id(id))
1245 maybe_unused_extern_crates:
1246 resolutions.maybe_unused_extern_crates
1248 .map(|(id, sp)| (hir.local_def_id(id), sp))
1250 extern_prelude: resolutions.extern_prelude,
1252 def_path_hash_to_def_id,
1253 queries: query::Queries::new(
1256 on_disk_query_result_cache,
1258 rcache: Default::default(),
1259 selection_cache: Default::default(),
1260 evaluation_cache: Default::default(),
1261 crate_name: Symbol::intern(crate_name),
1263 layout_interner: Default::default(),
1264 stability_interner: Default::default(),
1265 allocation_interner: Default::default(),
1266 alloc_map: Lock::new(interpret::AllocMap::new()),
1267 tx_to_llvm_workers: Lock::new(tx),
1268 output_filenames: Arc::new(output_filenames.clone()),
1271 let gcx = arenas.global_ctxt.as_ref().unwrap();
1273 sync::assert_send_val(&gcx);
1275 tls::enter_global(gcx, f)
1278 pub fn consider_optimizing<T: Fn() -> String>(&self, msg: T) -> bool {
1279 let cname = self.crate_name(LOCAL_CRATE).as_str();
1280 self.sess.consider_optimizing(&cname, msg)
1283 pub fn lib_features(self) -> Lrc<middle::lib_features::LibFeatures> {
1284 self.get_lib_features(LOCAL_CRATE)
1287 pub fn lang_items(self) -> Lrc<middle::lang_items::LanguageItems> {
1288 self.get_lang_items(LOCAL_CRATE)
1291 /// Due to missing llvm support for lowering 128 bit math to software emulation
1292 /// (on some targets), the lowering can be done in MIR.
1294 /// This function only exists until said support is implemented.
1295 pub fn is_binop_lang_item(&self, def_id: DefId) -> Option<(mir::BinOp, bool)> {
1296 let items = self.lang_items();
1297 let def_id = Some(def_id);
1298 if items.i128_add_fn() == def_id { Some((mir::BinOp::Add, false)) }
1299 else if items.u128_add_fn() == def_id { Some((mir::BinOp::Add, false)) }
1300 else if items.i128_sub_fn() == def_id { Some((mir::BinOp::Sub, false)) }
1301 else if items.u128_sub_fn() == def_id { Some((mir::BinOp::Sub, false)) }
1302 else if items.i128_mul_fn() == def_id { Some((mir::BinOp::Mul, false)) }
1303 else if items.u128_mul_fn() == def_id { Some((mir::BinOp::Mul, false)) }
1304 else if items.i128_div_fn() == def_id { Some((mir::BinOp::Div, false)) }
1305 else if items.u128_div_fn() == def_id { Some((mir::BinOp::Div, false)) }
1306 else if items.i128_rem_fn() == def_id { Some((mir::BinOp::Rem, false)) }
1307 else if items.u128_rem_fn() == def_id { Some((mir::BinOp::Rem, false)) }
1308 else if items.i128_shl_fn() == def_id { Some((mir::BinOp::Shl, false)) }
1309 else if items.u128_shl_fn() == def_id { Some((mir::BinOp::Shl, false)) }
1310 else if items.i128_shr_fn() == def_id { Some((mir::BinOp::Shr, false)) }
1311 else if items.u128_shr_fn() == def_id { Some((mir::BinOp::Shr, false)) }
1312 else if items.i128_addo_fn() == def_id { Some((mir::BinOp::Add, true)) }
1313 else if items.u128_addo_fn() == def_id { Some((mir::BinOp::Add, true)) }
1314 else if items.i128_subo_fn() == def_id { Some((mir::BinOp::Sub, true)) }
1315 else if items.u128_subo_fn() == def_id { Some((mir::BinOp::Sub, true)) }
1316 else if items.i128_mulo_fn() == def_id { Some((mir::BinOp::Mul, true)) }
1317 else if items.u128_mulo_fn() == def_id { Some((mir::BinOp::Mul, true)) }
1318 else if items.i128_shlo_fn() == def_id { Some((mir::BinOp::Shl, true)) }
1319 else if items.u128_shlo_fn() == def_id { Some((mir::BinOp::Shl, true)) }
1320 else if items.i128_shro_fn() == def_id { Some((mir::BinOp::Shr, true)) }
1321 else if items.u128_shro_fn() == def_id { Some((mir::BinOp::Shr, true)) }
1325 pub fn stability(self) -> Lrc<stability::Index<'tcx>> {
1326 self.stability_index(LOCAL_CRATE)
1329 pub fn crates(self) -> Lrc<Vec<CrateNum>> {
1330 self.all_crate_nums(LOCAL_CRATE)
1333 pub fn features(self) -> Lrc<feature_gate::Features> {
1334 self.features_query(LOCAL_CRATE)
1337 pub fn def_key(self, id: DefId) -> hir_map::DefKey {
1339 self.hir().def_key(id)
1341 self.cstore.def_key(id)
1345 /// Convert a `DefId` into its fully expanded `DefPath` (every
1346 /// `DefId` is really just an interned def-path).
1348 /// Note that if `id` is not local to this crate, the result will
1349 /// be a non-local `DefPath`.
1350 pub fn def_path(self, id: DefId) -> hir_map::DefPath {
1352 self.hir().def_path(id)
1354 self.cstore.def_path(id)
1359 pub fn def_path_hash(self, def_id: DefId) -> hir_map::DefPathHash {
1360 if def_id.is_local() {
1361 self.hir().definitions().def_path_hash(def_id.index)
1363 self.cstore.def_path_hash(def_id)
1367 pub fn def_path_debug_str(self, def_id: DefId) -> String {
1368 // We are explicitly not going through queries here in order to get
1369 // crate name and disambiguator since this code is called from debug!()
1370 // statements within the query system and we'd run into endless
1371 // recursion otherwise.
1372 let (crate_name, crate_disambiguator) = if def_id.is_local() {
1373 (self.crate_name.clone(),
1374 self.sess.local_crate_disambiguator())
1376 (self.cstore.crate_name_untracked(def_id.krate),
1377 self.cstore.crate_disambiguator_untracked(def_id.krate))
1382 // Don't print the whole crate disambiguator. That's just
1383 // annoying in debug output.
1384 &(crate_disambiguator.to_fingerprint().to_hex())[..4],
1385 self.def_path(def_id).to_string_no_crate())
1388 pub fn metadata_encoding_version(self) -> Vec<u8> {
1389 self.cstore.metadata_encoding_version().to_vec()
1392 // Note that this is *untracked* and should only be used within the query
1393 // system if the result is otherwise tracked through queries
1394 pub fn crate_data_as_rc_any(self, cnum: CrateNum) -> Lrc<dyn Any> {
1395 self.cstore.crate_data_as_rc_any(cnum)
1399 pub fn create_stable_hashing_context(self) -> StableHashingContext<'a> {
1400 let krate = self.gcx.hir_map.forest.untracked_krate();
1402 StableHashingContext::new(self.sess,
1404 self.hir().definitions(),
1408 // This method makes sure that we have a DepNode and a Fingerprint for
1409 // every upstream crate. It needs to be called once right after the tcx is
1411 // With full-fledged red/green, the method will probably become unnecessary
1412 // as this will be done on-demand.
1413 pub fn allocate_metadata_dep_nodes(self) {
1414 // We cannot use the query versions of crates() and crate_hash(), since
1415 // those would need the DepNodes that we are allocating here.
1416 for cnum in self.cstore.crates_untracked() {
1417 let dep_node = DepNode::new(self, DepConstructor::CrateMetadata(cnum));
1418 let crate_hash = self.cstore.crate_hash_untracked(cnum);
1419 self.dep_graph.with_task(dep_node,
1422 |_, x| x // No transformation needed
1427 // This method exercises the `in_scope_traits_map` query for all possible
1428 // values so that we have their fingerprints available in the DepGraph.
1429 // This is only required as long as we still use the old dependency tracking
1430 // which needs to have the fingerprints of all input nodes beforehand.
1431 pub fn precompute_in_scope_traits_hashes(self) {
1432 for &def_index in self.trait_map.keys() {
1433 self.in_scope_traits_map(def_index);
1437 pub fn serialize_query_result_cache<E>(self,
1439 -> Result<(), E::Error>
1440 where E: ty::codec::TyEncoder
1442 self.queries.on_disk_cache.serialize(self.global_tcx(), encoder)
1445 /// This checks whether one is allowed to have pattern bindings
1446 /// that bind-by-move on a match arm that has a guard, e.g.:
1449 /// match foo { A(inner) if { /* something */ } => ..., ... }
1452 /// It is separate from check_for_mutation_in_guard_via_ast_walk,
1453 /// because that method has a narrower effect that can be toggled
1454 /// off via a separate `-Z` flag, at least for the short term.
1455 pub fn allow_bind_by_move_patterns_with_guards(self) -> bool {
1456 self.features().bind_by_move_pattern_guards && self.use_mir_borrowck()
1459 /// If true, we should use a naive AST walk to determine if match
1460 /// guard could perform bad mutations (or mutable-borrows).
1461 pub fn check_for_mutation_in_guard_via_ast_walk(self) -> bool {
1462 // If someone requests the feature, then be a little more
1463 // careful and ensure that MIR-borrowck is enabled (which can
1464 // happen via edition selection, via `feature(nll)`, or via an
1465 // appropriate `-Z` flag) before disabling the mutation check.
1466 if self.allow_bind_by_move_patterns_with_guards() {
1473 /// If true, we should use the AST-based borrowck (we may *also* use
1474 /// the MIR-based borrowck).
1475 pub fn use_ast_borrowck(self) -> bool {
1476 self.borrowck_mode().use_ast()
1479 /// If true, we should use the MIR-based borrowck (we may *also* use
1480 /// the AST-based borrowck).
1481 pub fn use_mir_borrowck(self) -> bool {
1482 self.borrowck_mode().use_mir()
1485 /// If true, we should use the MIR-based borrow check, but also
1486 /// fall back on the AST borrow check if the MIR-based one errors.
1487 pub fn migrate_borrowck(self) -> bool {
1488 self.borrowck_mode().migrate()
1491 /// If true, make MIR codegen for `match` emit a temp that holds a
1492 /// borrow of the input to the match expression.
1493 pub fn generate_borrow_of_any_match_input(&self) -> bool {
1494 self.emit_read_for_match()
1497 /// If true, make MIR codegen for `match` emit FakeRead
1498 /// statements (which simulate the maximal effect of executing the
1499 /// patterns in a match arm).
1500 pub fn emit_read_for_match(&self) -> bool {
1501 self.use_mir_borrowck() && !self.sess.opts.debugging_opts.nll_dont_emit_read_for_match
1504 /// If true, pattern variables for use in guards on match arms
1505 /// will be bound as references to the data, and occurrences of
1506 /// those variables in the guard expression will implicitly
1507 /// dereference those bindings. (See rust-lang/rust#27282.)
1508 pub fn all_pat_vars_are_implicit_refs_within_guards(self) -> bool {
1509 self.borrowck_mode().use_mir()
1512 /// If true, we should enable two-phase borrows checks. This is
1513 /// done with either: `-Ztwo-phase-borrows`, `#![feature(nll)]`,
1514 /// or by opting into an edition after 2015.
1515 pub fn two_phase_borrows(self) -> bool {
1516 self.sess.rust_2018() || self.features().nll ||
1517 self.sess.opts.debugging_opts.two_phase_borrows
1520 /// What mode(s) of borrowck should we run? AST? MIR? both?
1521 /// (Also considers the `#![feature(nll)]` setting.)
1522 pub fn borrowck_mode(&self) -> BorrowckMode {
1523 // Here are the main constraints we need to deal with:
1525 // 1. An opts.borrowck_mode of `BorrowckMode::Ast` is
1526 // synonymous with no `-Z borrowck=...` flag at all.
1527 // (This is arguably a historical accident.)
1529 // 2. `BorrowckMode::Migrate` is the limited migration to
1530 // NLL that we are deploying with the 2018 edition.
1532 // 3. We want to allow developers on the Nightly channel
1533 // to opt back into the "hard error" mode for NLL,
1534 // (which they can do via specifying `#![feature(nll)]`
1535 // explicitly in their crate).
1537 // So, this precedence list is how pnkfelix chose to work with
1538 // the above constraints:
1540 // * `#![feature(nll)]` *always* means use NLL with hard
1541 // errors. (To simplify the code here, it now even overrides
1542 // a user's attempt to specify `-Z borrowck=compare`, which
1543 // we arguably do not need anymore and should remove.)
1545 // * Otherwise, if no `-Z borrowck=...` flag was given (or
1546 // if `borrowck=ast` was specified), then use the default
1547 // as required by the edition.
1549 // * Otherwise, use the behavior requested via `-Z borrowck=...`
1551 if self.features().nll { return BorrowckMode::Mir; }
1553 match self.sess.opts.borrowck_mode {
1554 mode @ BorrowckMode::Mir |
1555 mode @ BorrowckMode::Compare |
1556 mode @ BorrowckMode::Migrate => mode,
1558 BorrowckMode::Ast => match self.sess.edition() {
1559 Edition::Edition2015 => BorrowckMode::Ast,
1560 Edition::Edition2018 => BorrowckMode::Migrate,
1566 pub fn local_crate_exports_generics(self) -> bool {
1567 debug_assert!(self.sess.opts.share_generics());
1569 self.sess.crate_types.borrow().iter().any(|crate_type| {
1571 CrateType::Executable |
1572 CrateType::Staticlib |
1573 CrateType::ProcMacro |
1574 CrateType::Cdylib => false,
1576 CrateType::Dylib => true,
1581 // This method returns the DefId and the BoundRegion corresponding to the given region.
1582 pub fn is_suitable_region(&self, region: Region<'tcx>) -> Option<FreeRegionInfo> {
1583 let (suitable_region_binding_scope, bound_region) = match *region {
1584 ty::ReFree(ref free_region) => (free_region.scope, free_region.bound_region),
1585 ty::ReEarlyBound(ref ebr) => (
1586 self.parent_def_id(ebr.def_id).unwrap(),
1587 ty::BoundRegion::BrNamed(ebr.def_id, ebr.name),
1589 _ => return None, // not a free region
1592 let node_id = self.hir()
1593 .as_local_node_id(suitable_region_binding_scope)
1595 let is_impl_item = match self.hir().find(node_id) {
1596 Some(Node::Item(..)) | Some(Node::TraitItem(..)) => false,
1597 Some(Node::ImplItem(..)) => {
1598 self.is_bound_region_in_impl_item(suitable_region_binding_scope)
1603 return Some(FreeRegionInfo {
1604 def_id: suitable_region_binding_scope,
1605 boundregion: bound_region,
1606 is_impl_item: is_impl_item,
1610 pub fn return_type_impl_trait(
1612 scope_def_id: DefId,
1613 ) -> Option<Ty<'tcx>> {
1614 // HACK: `type_of_def_id()` will fail on these (#55796), so return None
1615 let node_id = self.hir().as_local_node_id(scope_def_id).unwrap();
1616 match self.hir().get(node_id) {
1617 Node::Item(item) => {
1619 ItemKind::Fn(..) => { /* type_of_def_id() will work */ }
1625 _ => { /* type_of_def_id() will work or panic */ }
1628 let ret_ty = self.type_of(scope_def_id);
1630 ty::FnDef(_, _) => {
1631 let sig = ret_ty.fn_sig(*self);
1632 let output = self.erase_late_bound_regions(&sig.output());
1633 if output.is_impl_trait() {
1643 // Here we check if the bound region is in Impl Item.
1644 pub fn is_bound_region_in_impl_item(
1646 suitable_region_binding_scope: DefId,
1648 let container_id = self.associated_item(suitable_region_binding_scope)
1651 if self.impl_trait_ref(container_id).is_some() {
1652 // For now, we do not try to target impls of traits. This is
1653 // because this message is going to suggest that the user
1654 // change the fn signature, but they may not be free to do so,
1655 // since the signature must match the trait.
1657 // FIXME(#42706) -- in some cases, we could do better here.
1664 impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
1665 pub fn encode_metadata(self)
1668 self.cstore.encode_metadata(self)
1672 impl<'gcx> GlobalCtxt<'gcx> {
1673 /// Call the closure with a local `TyCtxt` using the given arena.
1674 /// `interners` is a slot passed so we can create a CtxtInterners
1675 /// with the same lifetime as `arena`.
1676 pub fn enter_local<'tcx, F, R>(
1678 arena: &'tcx SyncDroplessArena,
1679 interners: &'tcx mut Option<CtxtInterners<'tcx>>,
1683 F: FnOnce(TyCtxt<'tcx, 'gcx, 'tcx>) -> R,
1686 *interners = Some(CtxtInterners::new(&arena));
1689 interners: interners.as_ref().unwrap(),
1692 ty::tls::with_related_context(tcx.global_tcx(), |icx| {
1693 let new_icx = ty::tls::ImplicitCtxt {
1695 query: icx.query.clone(),
1696 layout_depth: icx.layout_depth,
1699 ty::tls::enter_context(&new_icx, |_| {
1706 /// A trait implemented for all X<'a> types which can be safely and
1707 /// efficiently converted to X<'tcx> as long as they are part of the
1708 /// provided TyCtxt<'tcx>.
1709 /// This can be done, for example, for Ty<'tcx> or &'tcx Substs<'tcx>
1710 /// by looking them up in their respective interners.
1712 /// However, this is still not the best implementation as it does
1713 /// need to compare the components, even for interned values.
1714 /// It would be more efficient if TypedArena provided a way to
1715 /// determine whether the address is in the allocated range.
1717 /// None is returned if the value or one of the components is not part
1718 /// of the provided context.
1719 /// For Ty, None can be returned if either the type interner doesn't
1720 /// contain the TyKind key or if the address of the interned
1721 /// pointer differs. The latter case is possible if a primitive type,
1722 /// e.g., `()` or `u8`, was interned in a different context.
1723 pub trait Lift<'tcx>: fmt::Debug {
1724 type Lifted: fmt::Debug + 'tcx;
1725 fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option<Self::Lifted>;
1728 impl<'a, 'tcx> Lift<'tcx> for Ty<'a> {
1729 type Lifted = Ty<'tcx>;
1730 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Ty<'tcx>> {
1731 if tcx.interners.arena.in_arena(*self as *const _) {
1732 return Some(unsafe { mem::transmute(*self) });
1734 // Also try in the global tcx if we're not that.
1735 if !tcx.is_global() {
1736 self.lift_to_tcx(tcx.global_tcx())
1743 impl<'a, 'tcx> Lift<'tcx> for Region<'a> {
1744 type Lifted = Region<'tcx>;
1745 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Region<'tcx>> {
1746 if tcx.interners.arena.in_arena(*self as *const _) {
1747 return Some(unsafe { mem::transmute(*self) });
1749 // Also try in the global tcx if we're not that.
1750 if !tcx.is_global() {
1751 self.lift_to_tcx(tcx.global_tcx())
1758 impl<'a, 'tcx> Lift<'tcx> for Goal<'a> {
1759 type Lifted = Goal<'tcx>;
1760 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Goal<'tcx>> {
1761 if tcx.interners.arena.in_arena(*self as *const _) {
1762 return Some(unsafe { mem::transmute(*self) });
1764 // Also try in the global tcx if we're not that.
1765 if !tcx.is_global() {
1766 self.lift_to_tcx(tcx.global_tcx())
1773 impl<'a, 'tcx> Lift<'tcx> for &'a List<Goal<'a>> {
1774 type Lifted = &'tcx List<Goal<'tcx>>;
1775 fn lift_to_tcx<'b, 'gcx>(
1777 tcx: TyCtxt<'b, 'gcx, 'tcx>,
1778 ) -> Option<&'tcx List<Goal<'tcx>>> {
1779 if self.is_empty() {
1780 return Some(List::empty());
1783 if tcx.interners.arena.in_arena(*self as *const _) {
1784 return Some(unsafe { mem::transmute(*self) });
1786 // Also try in the global tcx if we're not that.
1787 if !tcx.is_global() {
1788 self.lift_to_tcx(tcx.global_tcx())
1795 impl<'a, 'tcx> Lift<'tcx> for &'a List<Clause<'a>> {
1796 type Lifted = &'tcx List<Clause<'tcx>>;
1797 fn lift_to_tcx<'b, 'gcx>(
1799 tcx: TyCtxt<'b, 'gcx, 'tcx>,
1800 ) -> Option<&'tcx List<Clause<'tcx>>> {
1801 if self.is_empty() {
1802 return Some(List::empty());
1805 if tcx.interners.arena.in_arena(*self as *const _) {
1806 return Some(unsafe { mem::transmute(*self) });
1808 // Also try in the global tcx if we're not that.
1809 if !tcx.is_global() {
1810 self.lift_to_tcx(tcx.global_tcx())
1817 impl<'a, 'tcx> Lift<'tcx> for &'a Const<'a> {
1818 type Lifted = &'tcx Const<'tcx>;
1819 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<&'tcx Const<'tcx>> {
1820 if tcx.interners.arena.in_arena(*self as *const _) {
1821 return Some(unsafe { mem::transmute(*self) });
1823 // Also try in the global tcx if we're not that.
1824 if !tcx.is_global() {
1825 self.lift_to_tcx(tcx.global_tcx())
1832 impl<'a, 'tcx> Lift<'tcx> for &'a Substs<'a> {
1833 type Lifted = &'tcx Substs<'tcx>;
1834 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<&'tcx Substs<'tcx>> {
1835 if self.len() == 0 {
1836 return Some(List::empty());
1838 if tcx.interners.arena.in_arena(&self[..] as *const _) {
1839 return Some(unsafe { mem::transmute(*self) });
1841 // Also try in the global tcx if we're not that.
1842 if !tcx.is_global() {
1843 self.lift_to_tcx(tcx.global_tcx())
1850 impl<'a, 'tcx> Lift<'tcx> for &'a List<Ty<'a>> {
1851 type Lifted = &'tcx List<Ty<'tcx>>;
1852 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>)
1853 -> Option<&'tcx List<Ty<'tcx>>> {
1854 if self.len() == 0 {
1855 return Some(List::empty());
1857 if tcx.interners.arena.in_arena(*self as *const _) {
1858 return Some(unsafe { mem::transmute(*self) });
1860 // Also try in the global tcx if we're not that.
1861 if !tcx.is_global() {
1862 self.lift_to_tcx(tcx.global_tcx())
1869 impl<'a, 'tcx> Lift<'tcx> for &'a List<ExistentialPredicate<'a>> {
1870 type Lifted = &'tcx List<ExistentialPredicate<'tcx>>;
1871 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>)
1872 -> Option<&'tcx List<ExistentialPredicate<'tcx>>> {
1873 if self.is_empty() {
1874 return Some(List::empty());
1876 if tcx.interners.arena.in_arena(*self as *const _) {
1877 return Some(unsafe { mem::transmute(*self) });
1879 // Also try in the global tcx if we're not that.
1880 if !tcx.is_global() {
1881 self.lift_to_tcx(tcx.global_tcx())
1888 impl<'a, 'tcx> Lift<'tcx> for &'a List<Predicate<'a>> {
1889 type Lifted = &'tcx List<Predicate<'tcx>>;
1890 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>)
1891 -> Option<&'tcx List<Predicate<'tcx>>> {
1892 if self.is_empty() {
1893 return Some(List::empty());
1895 if tcx.interners.arena.in_arena(*self as *const _) {
1896 return Some(unsafe { mem::transmute(*self) });
1898 // Also try in the global tcx if we're not that.
1899 if !tcx.is_global() {
1900 self.lift_to_tcx(tcx.global_tcx())
1907 impl<'a, 'tcx> Lift<'tcx> for &'a List<CanonicalVarInfo> {
1908 type Lifted = &'tcx List<CanonicalVarInfo>;
1909 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
1910 if self.len() == 0 {
1911 return Some(List::empty());
1913 if tcx.interners.arena.in_arena(*self as *const _) {
1914 return Some(unsafe { mem::transmute(*self) });
1916 // Also try in the global tcx if we're not that.
1917 if !tcx.is_global() {
1918 self.lift_to_tcx(tcx.global_tcx())
1925 impl<'a, 'tcx> Lift<'tcx> for &'a List<ProjectionKind<'a>> {
1926 type Lifted = &'tcx List<ProjectionKind<'tcx>>;
1927 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
1928 if self.len() == 0 {
1929 return Some(List::empty());
1931 if tcx.interners.arena.in_arena(*self as *const _) {
1932 return Some(unsafe { mem::transmute(*self) });
1934 // Also try in the global tcx if we're not that.
1935 if !tcx.is_global() {
1936 self.lift_to_tcx(tcx.global_tcx())
1944 use super::{GlobalCtxt, TyCtxt};
1948 use std::marker::PhantomData;
1951 use errors::{Diagnostic, TRACK_DIAGNOSTICS};
1952 use rustc_data_structures::OnDrop;
1953 use rustc_data_structures::sync::{self, Lrc, Lock};
1954 use dep_graph::OpenTask;
1956 #[cfg(not(parallel_queries))]
1957 use std::cell::Cell;
1959 #[cfg(parallel_queries)]
1962 /// This is the implicit state of rustc. It contains the current
1963 /// TyCtxt and query. It is updated when creating a local interner or
1964 /// executing a new query. Whenever there's a TyCtxt value available
1965 /// you should also have access to an ImplicitCtxt through the functions
1968 pub struct ImplicitCtxt<'a, 'gcx: 'tcx, 'tcx> {
1969 /// The current TyCtxt. Initially created by `enter_global` and updated
1970 /// by `enter_local` with a new local interner
1971 pub tcx: TyCtxt<'tcx, 'gcx, 'tcx>,
1973 /// The current query job, if any. This is updated by start_job in
1974 /// ty::query::plumbing when executing a query
1975 pub query: Option<Lrc<query::QueryJob<'gcx>>>,
1977 /// Used to prevent layout from recursing too deeply.
1978 pub layout_depth: usize,
1980 /// The current dep graph task. This is used to add dependencies to queries
1981 /// when executing them
1982 pub task: &'a OpenTask,
1985 /// Sets Rayon's thread local variable which is preserved for Rayon jobs
1986 /// to `value` during the call to `f`. It is restored to its previous value after.
1987 /// This is used to set the pointer to the new ImplicitCtxt.
1988 #[cfg(parallel_queries)]
1990 fn set_tlv<F: FnOnce() -> R, R>(value: usize, f: F) -> R {
1991 rayon_core::tlv::with(value, f)
1994 /// Gets Rayon's thread local variable which is preserved for Rayon jobs.
1995 /// This is used to get the pointer to the current ImplicitCtxt.
1996 #[cfg(parallel_queries)]
1998 fn get_tlv() -> usize {
1999 rayon_core::tlv::get()
2002 /// A thread local variable which stores a pointer to the current ImplicitCtxt
2003 #[cfg(not(parallel_queries))]
2004 thread_local!(static TLV: Cell<usize> = Cell::new(0));
2006 /// Sets TLV to `value` during the call to `f`.
2007 /// It is restored to its previous value after.
2008 /// This is used to set the pointer to the new ImplicitCtxt.
2009 #[cfg(not(parallel_queries))]
2011 fn set_tlv<F: FnOnce() -> R, R>(value: usize, f: F) -> R {
2012 let old = get_tlv();
2013 let _reset = OnDrop(move || TLV.with(|tlv| tlv.set(old)));
2014 TLV.with(|tlv| tlv.set(value));
2018 /// This is used to get the pointer to the current ImplicitCtxt.
2019 #[cfg(not(parallel_queries))]
2020 fn get_tlv() -> usize {
2021 TLV.with(|tlv| tlv.get())
2024 /// This is a callback from libsyntax as it cannot access the implicit state
2025 /// in librustc otherwise
2026 fn span_debug(span: syntax_pos::Span, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2028 if let Some(tcx) = tcx {
2029 write!(f, "{}", tcx.sess.source_map().span_to_string(span))
2031 syntax_pos::default_span_debug(span, f)
2036 /// This is a callback from libsyntax as it cannot access the implicit state
2037 /// in librustc otherwise. It is used to when diagnostic messages are
2038 /// emitted and stores them in the current query, if there is one.
2039 fn track_diagnostic(diagnostic: &Diagnostic) {
2040 with_context_opt(|icx| {
2041 if let Some(icx) = icx {
2042 if let Some(ref query) = icx.query {
2043 query.diagnostics.lock().push(diagnostic.clone());
2049 /// Sets up the callbacks from libsyntax on the current thread
2050 pub fn with_thread_locals<F, R>(f: F) -> R
2051 where F: FnOnce() -> R
2053 syntax_pos::SPAN_DEBUG.with(|span_dbg| {
2054 let original_span_debug = span_dbg.get();
2055 span_dbg.set(span_debug);
2057 let _on_drop = OnDrop(move || {
2058 span_dbg.set(original_span_debug);
2061 TRACK_DIAGNOSTICS.with(|current| {
2062 let original = current.get();
2063 current.set(track_diagnostic);
2065 let _on_drop = OnDrop(move || {
2066 current.set(original);
2074 /// Sets `context` as the new current ImplicitCtxt for the duration of the function `f`
2076 pub fn enter_context<'a, 'gcx: 'tcx, 'tcx, F, R>(context: &ImplicitCtxt<'a, 'gcx, 'tcx>,
2078 where F: FnOnce(&ImplicitCtxt<'a, 'gcx, 'tcx>) -> R
2080 set_tlv(context as *const _ as usize, || {
2085 /// Enters GlobalCtxt by setting up libsyntax callbacks and
2086 /// creating a initial TyCtxt and ImplicitCtxt.
2087 /// This happens once per rustc session and TyCtxts only exists
2088 /// inside the `f` function.
2089 pub fn enter_global<'gcx, F, R>(gcx: &'gcx GlobalCtxt<'gcx>, f: F) -> R
2090 where F: FnOnce(TyCtxt<'gcx, 'gcx, 'gcx>) -> R
2092 with_thread_locals(|| {
2093 // Update GCX_PTR to indicate there's a GlobalCtxt available
2094 GCX_PTR.with(|lock| {
2095 *lock.lock() = gcx as *const _ as usize;
2097 // Set GCX_PTR back to 0 when we exit
2098 let _on_drop = OnDrop(move || {
2099 GCX_PTR.with(|lock| *lock.lock() = 0);
2104 interners: &gcx.global_interners,
2107 let icx = ImplicitCtxt {
2111 task: &OpenTask::Ignore,
2113 enter_context(&icx, |_| {
2119 /// Stores a pointer to the GlobalCtxt if one is available.
2120 /// This is used to access the GlobalCtxt in the deadlock handler
2122 scoped_thread_local!(pub static GCX_PTR: Lock<usize>);
2124 /// Creates a TyCtxt and ImplicitCtxt based on the GCX_PTR thread local.
2125 /// This is used in the deadlock handler.
2126 pub unsafe fn with_global<F, R>(f: F) -> R
2127 where F: for<'a, 'gcx, 'tcx> FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R
2129 let gcx = GCX_PTR.with(|lock| *lock.lock());
2131 let gcx = &*(gcx as *const GlobalCtxt<'_>);
2134 interners: &gcx.global_interners,
2137 let icx = ImplicitCtxt {
2141 task: &OpenTask::Ignore,
2143 enter_context(&icx, |_| f(tcx))
2146 /// Allows access to the current ImplicitCtxt in a closure if one is available
2148 pub fn with_context_opt<F, R>(f: F) -> R
2149 where F: for<'a, 'gcx, 'tcx> FnOnce(Option<&ImplicitCtxt<'a, 'gcx, 'tcx>>) -> R
2151 let context = get_tlv();
2155 // We could get a ImplicitCtxt pointer from another thread.
2156 // Ensure that ImplicitCtxt is Sync
2157 sync::assert_sync::<ImplicitCtxt<'_, '_, '_>>();
2159 unsafe { f(Some(&*(context as *const ImplicitCtxt<'_, '_, '_>))) }
2163 /// Allows access to the current ImplicitCtxt.
2164 /// Panics if there is no ImplicitCtxt available
2166 pub fn with_context<F, R>(f: F) -> R
2167 where F: for<'a, 'gcx, 'tcx> FnOnce(&ImplicitCtxt<'a, 'gcx, 'tcx>) -> R
2169 with_context_opt(|opt_context| f(opt_context.expect("no ImplicitCtxt stored in tls")))
2172 /// Allows access to the current ImplicitCtxt whose tcx field has the same global
2173 /// interner as the tcx argument passed in. This means the closure is given an ImplicitCtxt
2174 /// with the same 'gcx lifetime as the TyCtxt passed in.
2175 /// This will panic if you pass it a TyCtxt which has a different global interner from
2176 /// the current ImplicitCtxt's tcx field.
2178 pub fn with_related_context<'a, 'gcx, 'tcx1, F, R>(tcx: TyCtxt<'a, 'gcx, 'tcx1>, f: F) -> R
2179 where F: for<'b, 'tcx2> FnOnce(&ImplicitCtxt<'b, 'gcx, 'tcx2>) -> R
2181 with_context(|context| {
2183 let gcx = tcx.gcx as *const _ as usize;
2184 assert!(context.tcx.gcx as *const _ as usize == gcx);
2185 let context: &ImplicitCtxt<'_, '_, '_> = mem::transmute(context);
2191 /// Allows access to the current ImplicitCtxt whose tcx field has the same global
2192 /// interner and local interner as the tcx argument passed in. This means the closure
2193 /// is given an ImplicitCtxt with the same 'tcx and 'gcx lifetimes as the TyCtxt passed in.
2194 /// This will panic if you pass it a TyCtxt which has a different global interner or
2195 /// a different local interner from the current ImplicitCtxt's tcx field.
2197 pub fn with_fully_related_context<'a, 'gcx, 'tcx, F, R>(tcx: TyCtxt<'a, 'gcx, 'tcx>, f: F) -> R
2198 where F: for<'b> FnOnce(&ImplicitCtxt<'b, 'gcx, 'tcx>) -> R
2200 with_context(|context| {
2202 let gcx = tcx.gcx as *const _ as usize;
2203 let interners = tcx.interners as *const _ as usize;
2204 assert!(context.tcx.gcx as *const _ as usize == gcx);
2205 assert!(context.tcx.interners as *const _ as usize == interners);
2206 let context: &ImplicitCtxt<'_, '_, '_> = mem::transmute(context);
2212 /// Allows access to the TyCtxt in the current ImplicitCtxt.
2213 /// Panics if there is no ImplicitCtxt available
2215 pub fn with<F, R>(f: F) -> R
2216 where F: for<'a, 'gcx, 'tcx> FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R
2218 with_context(|context| f(context.tcx))
2221 /// Allows access to the TyCtxt in the current ImplicitCtxt.
2222 /// The closure is passed None if there is no ImplicitCtxt available
2224 pub fn with_opt<F, R>(f: F) -> R
2225 where F: for<'a, 'gcx, 'tcx> FnOnce(Option<TyCtxt<'a, 'gcx, 'tcx>>) -> R
2227 with_context_opt(|opt_context| f(opt_context.map(|context| context.tcx)))
2231 macro_rules! sty_debug_print {
2232 ($ctxt: expr, $($variant: ident),*) => {{
2233 // curious inner module to allow variant names to be used as
2235 #[allow(non_snake_case)]
2237 use ty::{self, TyCtxt};
2238 use ty::context::Interned;
2240 #[derive(Copy, Clone)]
2243 region_infer: usize,
2248 pub fn go(tcx: TyCtxt<'_, '_, '_>) {
2249 let mut total = DebugStat {
2251 region_infer: 0, ty_infer: 0, both_infer: 0,
2253 $(let mut $variant = total;)*
2255 for &Interned(t) in tcx.interners.type_.borrow().keys() {
2256 let variant = match t.sty {
2257 ty::Bool | ty::Char | ty::Int(..) | ty::Uint(..) |
2258 ty::Float(..) | ty::Str | ty::Never => continue,
2259 ty::Error => /* unimportant */ continue,
2260 $(ty::$variant(..) => &mut $variant,)*
2262 let region = t.flags.intersects(ty::TypeFlags::HAS_RE_INFER);
2263 let ty = t.flags.intersects(ty::TypeFlags::HAS_TY_INFER);
2267 if region { total.region_infer += 1; variant.region_infer += 1 }
2268 if ty { total.ty_infer += 1; variant.ty_infer += 1 }
2269 if region && ty { total.both_infer += 1; variant.both_infer += 1 }
2271 println!("Ty interner total ty region both");
2272 $(println!(" {:18}: {uses:6} {usespc:4.1}%, \
2273 {ty:4.1}% {region:5.1}% {both:4.1}%",
2274 stringify!($variant),
2275 uses = $variant.total,
2276 usespc = $variant.total as f64 * 100.0 / total.total as f64,
2277 ty = $variant.ty_infer as f64 * 100.0 / total.total as f64,
2278 region = $variant.region_infer as f64 * 100.0 / total.total as f64,
2279 both = $variant.both_infer as f64 * 100.0 / total.total as f64);
2281 println!(" total {uses:6} \
2282 {ty:4.1}% {region:5.1}% {both:4.1}%",
2284 ty = total.ty_infer as f64 * 100.0 / total.total as f64,
2285 region = total.region_infer as f64 * 100.0 / total.total as f64,
2286 both = total.both_infer as f64 * 100.0 / total.total as f64)
2294 impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
2295 pub fn print_debug_stats(self) {
2298 Adt, Array, Slice, RawPtr, Ref, FnDef, FnPtr, Placeholder,
2299 Generator, GeneratorWitness, Dynamic, Closure, Tuple, Bound,
2300 Param, Infer, UnnormalizedProjection, Projection, Opaque, Foreign);
2302 println!("Substs interner: #{}", self.interners.substs.borrow().len());
2303 println!("Region interner: #{}", self.interners.region.borrow().len());
2304 println!("Stability interner: #{}", self.stability_interner.borrow().len());
2305 println!("Allocation interner: #{}", self.allocation_interner.borrow().len());
2306 println!("Layout interner: #{}", self.layout_interner.borrow().len());
2311 /// An entry in an interner.
2312 struct Interned<'tcx, T: 'tcx+?Sized>(&'tcx T);
2314 impl<'tcx, T: 'tcx+?Sized> Clone for Interned<'tcx, T> {
2315 fn clone(&self) -> Self {
2319 impl<'tcx, T: 'tcx+?Sized> Copy for Interned<'tcx, T> {}
2321 // N.B., an `Interned<Ty>` compares and hashes as a sty.
2322 impl<'tcx> PartialEq for Interned<'tcx, TyS<'tcx>> {
2323 fn eq(&self, other: &Interned<'tcx, TyS<'tcx>>) -> bool {
2324 self.0.sty == other.0.sty
2328 impl<'tcx> Eq for Interned<'tcx, TyS<'tcx>> {}
2330 impl<'tcx> Hash for Interned<'tcx, TyS<'tcx>> {
2331 fn hash<H: Hasher>(&self, s: &mut H) {
2336 impl<'tcx: 'lcx, 'lcx> Borrow<TyKind<'lcx>> for Interned<'tcx, TyS<'tcx>> {
2337 fn borrow<'a>(&'a self) -> &'a TyKind<'lcx> {
2342 // N.B., an `Interned<List<T>>` compares and hashes as its elements.
2343 impl<'tcx, T: PartialEq> PartialEq for Interned<'tcx, List<T>> {
2344 fn eq(&self, other: &Interned<'tcx, List<T>>) -> bool {
2345 self.0[..] == other.0[..]
2349 impl<'tcx, T: Eq> Eq for Interned<'tcx, List<T>> {}
2351 impl<'tcx, T: Hash> Hash for Interned<'tcx, List<T>> {
2352 fn hash<H: Hasher>(&self, s: &mut H) {
2357 impl<'tcx: 'lcx, 'lcx> Borrow<[Ty<'lcx>]> for Interned<'tcx, List<Ty<'tcx>>> {
2358 fn borrow<'a>(&'a self) -> &'a [Ty<'lcx>] {
2363 impl<'tcx: 'lcx, 'lcx> Borrow<[CanonicalVarInfo]> for Interned<'tcx, List<CanonicalVarInfo>> {
2364 fn borrow<'a>(&'a self) -> &'a [CanonicalVarInfo] {
2369 impl<'tcx: 'lcx, 'lcx> Borrow<[Kind<'lcx>]> for Interned<'tcx, Substs<'tcx>> {
2370 fn borrow<'a>(&'a self) -> &'a [Kind<'lcx>] {
2375 impl<'tcx: 'lcx, 'lcx> Borrow<[ProjectionKind<'lcx>]>
2376 for Interned<'tcx, List<ProjectionKind<'tcx>>> {
2377 fn borrow<'a>(&'a self) -> &'a [ProjectionKind<'lcx>] {
2382 impl<'tcx> Borrow<RegionKind> for Interned<'tcx, RegionKind> {
2383 fn borrow<'a>(&'a self) -> &'a RegionKind {
2388 impl<'tcx: 'lcx, 'lcx> Borrow<GoalKind<'lcx>> for Interned<'tcx, GoalKind<'tcx>> {
2389 fn borrow<'a>(&'a self) -> &'a GoalKind<'lcx> {
2394 impl<'tcx: 'lcx, 'lcx> Borrow<[ExistentialPredicate<'lcx>]>
2395 for Interned<'tcx, List<ExistentialPredicate<'tcx>>> {
2396 fn borrow<'a>(&'a self) -> &'a [ExistentialPredicate<'lcx>] {
2401 impl<'tcx: 'lcx, 'lcx> Borrow<[Predicate<'lcx>]>
2402 for Interned<'tcx, List<Predicate<'tcx>>> {
2403 fn borrow<'a>(&'a self) -> &'a [Predicate<'lcx>] {
2408 impl<'tcx: 'lcx, 'lcx> Borrow<Const<'lcx>> for Interned<'tcx, Const<'tcx>> {
2409 fn borrow<'a>(&'a self) -> &'a Const<'lcx> {
2414 impl<'tcx: 'lcx, 'lcx> Borrow<[Clause<'lcx>]>
2415 for Interned<'tcx, List<Clause<'tcx>>> {
2416 fn borrow<'a>(&'a self) -> &'a [Clause<'lcx>] {
2421 impl<'tcx: 'lcx, 'lcx> Borrow<[Goal<'lcx>]>
2422 for Interned<'tcx, List<Goal<'tcx>>> {
2423 fn borrow<'a>(&'a self) -> &'a [Goal<'lcx>] {
2428 macro_rules! intern_method {
2429 ($lt_tcx:tt, $name:ident: $method:ident($alloc:ty,
2432 $keep_in_local_tcx:expr) -> $ty:ty) => {
2433 impl<'a, 'gcx, $lt_tcx> TyCtxt<'a, 'gcx, $lt_tcx> {
2434 pub fn $method(self, v: $alloc) -> &$lt_tcx $ty {
2435 let key = ($alloc_to_key)(&v);
2437 // HACK(eddyb) Depend on flags being accurate to
2438 // determine that all contents are in the global tcx.
2439 // See comments on Lift for why we can't use that.
2440 if ($keep_in_local_tcx)(&v) {
2441 self.interners.$name.borrow_mut().intern_ref(key, || {
2442 // Make sure we don't end up with inference
2443 // types/regions in the global tcx.
2444 if self.is_global() {
2445 bug!("Attempted to intern `{:?}` which contains \
2446 inference types/regions in the global type context",
2450 Interned($alloc_method(&self.interners.arena, v))
2453 self.global_interners.$name.borrow_mut().intern_ref(key, || {
2454 // This transmutes $alloc<'tcx> to $alloc<'gcx>
2458 let i: &$lt_tcx $ty = $alloc_method(&self.global_interners.arena, v);
2460 let i = unsafe { mem::transmute(i) };
2469 macro_rules! direct_interners {
2470 ($lt_tcx:tt, $($name:ident: $method:ident($keep_in_local_tcx:expr) -> $ty:ty),+) => {
2471 $(impl<$lt_tcx> PartialEq for Interned<$lt_tcx, $ty> {
2472 fn eq(&self, other: &Self) -> bool {
2477 impl<$lt_tcx> Eq for Interned<$lt_tcx, $ty> {}
2479 impl<$lt_tcx> Hash for Interned<$lt_tcx, $ty> {
2480 fn hash<H: Hasher>(&self, s: &mut H) {
2488 |a: &$lt_tcx SyncDroplessArena, v| -> &$lt_tcx $ty { a.alloc(v) },
2490 $keep_in_local_tcx) -> $ty);)+
2494 pub fn keep_local<'tcx, T: ty::TypeFoldable<'tcx>>(x: &T) -> bool {
2495 x.has_type_flags(ty::TypeFlags::KEEP_IN_LOCAL_TCX)
2498 direct_interners!('tcx,
2499 region: mk_region(|r: &RegionKind| r.keep_in_local_tcx()) -> RegionKind,
2500 const_: mk_const(|c: &Const<'_>| keep_local(&c.ty) || keep_local(&c.val)) -> Const<'tcx>,
2501 goal: mk_goal(|c: &GoalKind<'_>| keep_local(c)) -> GoalKind<'tcx>
2504 macro_rules! slice_interners {
2505 ($($field:ident: $method:ident($ty:ident)),+) => (
2506 $(intern_method!( 'tcx, $field: $method(
2508 |a, v| List::from_arena(a, v),
2510 |xs: &[$ty<'_>]| xs.iter().any(keep_local)) -> List<$ty<'tcx>>);)+
2515 existential_predicates: _intern_existential_predicates(ExistentialPredicate),
2516 predicates: _intern_predicates(Predicate),
2517 type_list: _intern_type_list(Ty),
2518 substs: _intern_substs(Kind),
2519 clauses: _intern_clauses(Clause),
2520 goal_list: _intern_goals(Goal),
2521 projs: _intern_projs(ProjectionKind)
2524 // This isn't a perfect fit: CanonicalVarInfo slices are always
2525 // allocated in the global arena, so this `intern_method!` macro is
2526 // overly general. But we just return false for the code that checks
2527 // whether they belong in the thread-local arena, so no harm done, and
2528 // seems better than open-coding the rest.
2531 canonical_var_infos: _intern_canonical_var_infos(
2532 &[CanonicalVarInfo],
2533 |a, v| List::from_arena(a, v),
2535 |_xs: &[CanonicalVarInfo]| -> bool { false }
2536 ) -> List<CanonicalVarInfo>
2539 impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
2540 /// Given a `fn` type, returns an equivalent `unsafe fn` type;
2541 /// that is, a `fn` type that is equivalent in every way for being
2543 pub fn safe_to_unsafe_fn_ty(self, sig: PolyFnSig<'tcx>) -> Ty<'tcx> {
2544 assert_eq!(sig.unsafety(), hir::Unsafety::Normal);
2545 self.mk_fn_ptr(sig.map_bound(|sig| ty::FnSig {
2546 unsafety: hir::Unsafety::Unsafe,
2551 /// Given a closure signature `sig`, returns an equivalent `fn`
2552 /// type with the same signature. Detuples and so forth -- so
2553 /// e.g., if we have a sig with `Fn<(u32, i32)>` then you would get
2554 /// a `fn(u32, i32)`.
2555 pub fn coerce_closure_fn_ty(self, sig: PolyFnSig<'tcx>) -> Ty<'tcx> {
2556 let converted_sig = sig.map_bound(|s| {
2557 let params_iter = match s.inputs()[0].sty {
2558 ty::Tuple(params) => {
2559 params.into_iter().cloned()
2567 hir::Unsafety::Normal,
2572 self.mk_fn_ptr(converted_sig)
2576 pub fn mk_ty(&self, st: TyKind<'tcx>) -> Ty<'tcx> {
2577 CtxtInterners::intern_ty(&self.interners, &self.global_interners, st)
2580 pub fn mk_mach_int(self, tm: ast::IntTy) -> Ty<'tcx> {
2582 ast::IntTy::Isize => self.types.isize,
2583 ast::IntTy::I8 => self.types.i8,
2584 ast::IntTy::I16 => self.types.i16,
2585 ast::IntTy::I32 => self.types.i32,
2586 ast::IntTy::I64 => self.types.i64,
2587 ast::IntTy::I128 => self.types.i128,
2591 pub fn mk_mach_uint(self, tm: ast::UintTy) -> Ty<'tcx> {
2593 ast::UintTy::Usize => self.types.usize,
2594 ast::UintTy::U8 => self.types.u8,
2595 ast::UintTy::U16 => self.types.u16,
2596 ast::UintTy::U32 => self.types.u32,
2597 ast::UintTy::U64 => self.types.u64,
2598 ast::UintTy::U128 => self.types.u128,
2602 pub fn mk_mach_float(self, tm: ast::FloatTy) -> Ty<'tcx> {
2604 ast::FloatTy::F32 => self.types.f32,
2605 ast::FloatTy::F64 => self.types.f64,
2610 pub fn mk_str(self) -> Ty<'tcx> {
2615 pub fn mk_static_str(self) -> Ty<'tcx> {
2616 self.mk_imm_ref(self.types.re_static, self.mk_str())
2620 pub fn mk_adt(self, def: &'tcx AdtDef, substs: &'tcx Substs<'tcx>) -> Ty<'tcx> {
2621 // take a copy of substs so that we own the vectors inside
2622 self.mk_ty(Adt(def, substs))
2626 pub fn mk_foreign(self, def_id: DefId) -> Ty<'tcx> {
2627 self.mk_ty(Foreign(def_id))
2630 pub fn mk_box(self, ty: Ty<'tcx>) -> Ty<'tcx> {
2631 let def_id = self.require_lang_item(lang_items::OwnedBoxLangItem);
2632 let adt_def = self.adt_def(def_id);
2633 let substs = Substs::for_item(self, def_id, |param, substs| {
2635 GenericParamDefKind::Lifetime => bug!(),
2636 GenericParamDefKind::Type { has_default, .. } => {
2637 if param.index == 0 {
2640 assert!(has_default);
2641 self.type_of(param.def_id).subst(self, substs).into()
2646 self.mk_ty(Adt(adt_def, substs))
2650 pub fn mk_ptr(self, tm: TypeAndMut<'tcx>) -> Ty<'tcx> {
2651 self.mk_ty(RawPtr(tm))
2655 pub fn mk_ref(self, r: Region<'tcx>, tm: TypeAndMut<'tcx>) -> Ty<'tcx> {
2656 self.mk_ty(Ref(r, tm.ty, tm.mutbl))
2660 pub fn mk_mut_ref(self, r: Region<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> {
2661 self.mk_ref(r, TypeAndMut {ty: ty, mutbl: hir::MutMutable})
2665 pub fn mk_imm_ref(self, r: Region<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> {
2666 self.mk_ref(r, TypeAndMut {ty: ty, mutbl: hir::MutImmutable})
2670 pub fn mk_mut_ptr(self, ty: Ty<'tcx>) -> Ty<'tcx> {
2671 self.mk_ptr(TypeAndMut {ty: ty, mutbl: hir::MutMutable})
2675 pub fn mk_imm_ptr(self, ty: Ty<'tcx>) -> Ty<'tcx> {
2676 self.mk_ptr(TypeAndMut {ty: ty, mutbl: hir::MutImmutable})
2680 pub fn mk_nil_ptr(self) -> Ty<'tcx> {
2681 self.mk_imm_ptr(self.mk_unit())
2685 pub fn mk_array(self, ty: Ty<'tcx>, n: u64) -> Ty<'tcx> {
2686 self.mk_ty(Array(ty, ty::Const::from_usize(self, n)))
2690 pub fn mk_slice(self, ty: Ty<'tcx>) -> Ty<'tcx> {
2691 self.mk_ty(Slice(ty))
2695 pub fn intern_tup(self, ts: &[Ty<'tcx>]) -> Ty<'tcx> {
2696 self.mk_ty(Tuple(self.intern_type_list(ts)))
2699 pub fn mk_tup<I: InternAs<[Ty<'tcx>], Ty<'tcx>>>(self, iter: I) -> I::Output {
2700 iter.intern_with(|ts| self.mk_ty(Tuple(self.intern_type_list(ts))))
2704 pub fn mk_unit(self) -> Ty<'tcx> {
2709 pub fn mk_diverging_default(self) -> Ty<'tcx> {
2710 if self.features().never_type {
2713 self.intern_tup(&[])
2718 pub fn mk_bool(self) -> Ty<'tcx> {
2723 pub fn mk_fn_def(self, def_id: DefId,
2724 substs: &'tcx Substs<'tcx>) -> Ty<'tcx> {
2725 self.mk_ty(FnDef(def_id, substs))
2729 pub fn mk_fn_ptr(self, fty: PolyFnSig<'tcx>) -> Ty<'tcx> {
2730 self.mk_ty(FnPtr(fty))
2736 obj: ty::Binder<&'tcx List<ExistentialPredicate<'tcx>>>,
2737 reg: ty::Region<'tcx>
2739 self.mk_ty(Dynamic(obj, reg))
2743 pub fn mk_projection(self,
2745 substs: &'tcx Substs<'tcx>)
2747 self.mk_ty(Projection(ProjectionTy {
2754 pub fn mk_closure(self, closure_id: DefId, closure_substs: ClosureSubsts<'tcx>)
2756 self.mk_ty(Closure(closure_id, closure_substs))
2760 pub fn mk_generator(self,
2762 generator_substs: GeneratorSubsts<'tcx>,
2763 movability: hir::GeneratorMovability)
2765 self.mk_ty(Generator(id, generator_substs, movability))
2769 pub fn mk_generator_witness(self, types: ty::Binder<&'tcx List<Ty<'tcx>>>) -> Ty<'tcx> {
2770 self.mk_ty(GeneratorWitness(types))
2774 pub fn mk_var(self, v: TyVid) -> Ty<'tcx> {
2775 self.mk_infer(TyVar(v))
2779 pub fn mk_int_var(self, v: IntVid) -> Ty<'tcx> {
2780 self.mk_infer(IntVar(v))
2784 pub fn mk_float_var(self, v: FloatVid) -> Ty<'tcx> {
2785 self.mk_infer(FloatVar(v))
2789 pub fn mk_infer(self, it: InferTy) -> Ty<'tcx> {
2790 self.mk_ty(Infer(it))
2794 pub fn mk_ty_param(self,
2796 name: InternedString) -> Ty<'tcx> {
2797 self.mk_ty(Param(ParamTy { idx: index, name: name }))
2801 pub fn mk_self_type(self) -> Ty<'tcx> {
2802 self.mk_ty_param(0, keywords::SelfUpper.name().as_interned_str())
2805 pub fn mk_param_from_def(self, param: &ty::GenericParamDef) -> Kind<'tcx> {
2807 GenericParamDefKind::Lifetime => {
2808 self.mk_region(ty::ReEarlyBound(param.to_early_bound_region_data())).into()
2810 GenericParamDefKind::Type {..} => self.mk_ty_param(param.index, param.name).into(),
2815 pub fn mk_opaque(self, def_id: DefId, substs: &'tcx Substs<'tcx>) -> Ty<'tcx> {
2816 self.mk_ty(Opaque(def_id, substs))
2819 pub fn intern_existential_predicates(self, eps: &[ExistentialPredicate<'tcx>])
2820 -> &'tcx List<ExistentialPredicate<'tcx>> {
2821 assert!(!eps.is_empty());
2822 assert!(eps.windows(2).all(|w| w[0].stable_cmp(self, &w[1]) != Ordering::Greater));
2823 self._intern_existential_predicates(eps)
2826 pub fn intern_predicates(self, preds: &[Predicate<'tcx>])
2827 -> &'tcx List<Predicate<'tcx>> {
2828 // FIXME consider asking the input slice to be sorted to avoid
2829 // re-interning permutations, in which case that would be asserted
2831 if preds.len() == 0 {
2832 // The macro-generated method below asserts we don't intern an empty slice.
2835 self._intern_predicates(preds)
2839 pub fn intern_type_list(self, ts: &[Ty<'tcx>]) -> &'tcx List<Ty<'tcx>> {
2843 self._intern_type_list(ts)
2847 pub fn intern_substs(self, ts: &[Kind<'tcx>]) -> &'tcx List<Kind<'tcx>> {
2851 self._intern_substs(ts)
2855 pub fn intern_projs(self, ps: &[ProjectionKind<'tcx>]) -> &'tcx List<ProjectionKind<'tcx>> {
2859 self._intern_projs(ps)
2863 pub fn intern_canonical_var_infos(self, ts: &[CanonicalVarInfo]) -> CanonicalVarInfos<'gcx> {
2867 self.global_tcx()._intern_canonical_var_infos(ts)
2871 pub fn intern_clauses(self, ts: &[Clause<'tcx>]) -> Clauses<'tcx> {
2875 self._intern_clauses(ts)
2879 pub fn intern_goals(self, ts: &[Goal<'tcx>]) -> Goals<'tcx> {
2883 self._intern_goals(ts)
2887 pub fn mk_fn_sig<I>(self,
2891 unsafety: hir::Unsafety,
2893 -> <I::Item as InternIteratorElement<Ty<'tcx>, ty::FnSig<'tcx>>>::Output
2895 I::Item: InternIteratorElement<Ty<'tcx>, ty::FnSig<'tcx>>
2897 inputs.chain(iter::once(output)).intern_with(|xs| ty::FnSig {
2898 inputs_and_output: self.intern_type_list(xs),
2899 variadic, unsafety, abi
2903 pub fn mk_existential_predicates<I: InternAs<[ExistentialPredicate<'tcx>],
2904 &'tcx List<ExistentialPredicate<'tcx>>>>(self, iter: I)
2906 iter.intern_with(|xs| self.intern_existential_predicates(xs))
2909 pub fn mk_predicates<I: InternAs<[Predicate<'tcx>],
2910 &'tcx List<Predicate<'tcx>>>>(self, iter: I)
2912 iter.intern_with(|xs| self.intern_predicates(xs))
2915 pub fn mk_type_list<I: InternAs<[Ty<'tcx>],
2916 &'tcx List<Ty<'tcx>>>>(self, iter: I) -> I::Output {
2917 iter.intern_with(|xs| self.intern_type_list(xs))
2920 pub fn mk_substs<I: InternAs<[Kind<'tcx>],
2921 &'tcx List<Kind<'tcx>>>>(self, iter: I) -> I::Output {
2922 iter.intern_with(|xs| self.intern_substs(xs))
2925 pub fn mk_substs_trait(self,
2927 rest: &[Kind<'tcx>])
2928 -> &'tcx Substs<'tcx>
2930 self.mk_substs(iter::once(self_ty.into()).chain(rest.iter().cloned()))
2933 pub fn mk_clauses<I: InternAs<[Clause<'tcx>], Clauses<'tcx>>>(self, iter: I) -> I::Output {
2934 iter.intern_with(|xs| self.intern_clauses(xs))
2937 pub fn mk_goals<I: InternAs<[Goal<'tcx>], Goals<'tcx>>>(self, iter: I) -> I::Output {
2938 iter.intern_with(|xs| self.intern_goals(xs))
2941 pub fn lint_hir<S: Into<MultiSpan>>(self,
2942 lint: &'static Lint,
2946 self.struct_span_lint_hir(lint, hir_id, span.into(), msg).emit()
2949 pub fn lint_node<S: Into<MultiSpan>>(self,
2950 lint: &'static Lint,
2954 self.struct_span_lint_node(lint, id, span.into(), msg).emit()
2957 pub fn lint_hir_note<S: Into<MultiSpan>>(self,
2958 lint: &'static Lint,
2963 let mut err = self.struct_span_lint_hir(lint, hir_id, span.into(), msg);
2968 pub fn lint_node_note<S: Into<MultiSpan>>(self,
2969 lint: &'static Lint,
2974 let mut err = self.struct_span_lint_node(lint, id, span.into(), msg);
2979 pub fn lint_level_at_node(self, lint: &'static Lint, mut id: NodeId)
2980 -> (lint::Level, lint::LintSource)
2982 // Right now we insert a `with_ignore` node in the dep graph here to
2983 // ignore the fact that `lint_levels` below depends on the entire crate.
2984 // For now this'll prevent false positives of recompiling too much when
2985 // anything changes.
2987 // Once red/green incremental compilation lands we should be able to
2988 // remove this because while the crate changes often the lint level map
2989 // will change rarely.
2990 self.dep_graph.with_ignore(|| {
2991 let sets = self.lint_levels(LOCAL_CRATE);
2993 let hir_id = self.hir().definitions().node_to_hir_id(id);
2994 if let Some(pair) = sets.level_and_source(lint, hir_id, self.sess) {
2997 let next = self.hir().get_parent_node(id);
2999 bug!("lint traversal reached the root of the crate");
3006 pub fn struct_span_lint_hir<S: Into<MultiSpan>>(self,
3007 lint: &'static Lint,
3011 -> DiagnosticBuilder<'tcx>
3013 let node_id = self.hir().hir_to_node_id(hir_id);
3014 let (level, src) = self.lint_level_at_node(lint, node_id);
3015 lint::struct_lint_level(self.sess, lint, level, src, Some(span.into()), msg)
3018 pub fn struct_span_lint_node<S: Into<MultiSpan>>(self,
3019 lint: &'static Lint,
3023 -> DiagnosticBuilder<'tcx>
3025 let (level, src) = self.lint_level_at_node(lint, id);
3026 lint::struct_lint_level(self.sess, lint, level, src, Some(span.into()), msg)
3029 pub fn struct_lint_node(self, lint: &'static Lint, id: NodeId, msg: &str)
3030 -> DiagnosticBuilder<'tcx>
3032 let (level, src) = self.lint_level_at_node(lint, id);
3033 lint::struct_lint_level(self.sess, lint, level, src, None, msg)
3036 pub fn in_scope_traits(self, id: HirId) -> Option<Lrc<StableVec<TraitCandidate>>> {
3037 self.in_scope_traits_map(id.owner)
3038 .and_then(|map| map.get(&id.local_id).cloned())
3041 pub fn named_region(self, id: HirId) -> Option<resolve_lifetime::Region> {
3042 self.named_region_map(id.owner)
3043 .and_then(|map| map.get(&id.local_id).cloned())
3046 pub fn is_late_bound(self, id: HirId) -> bool {
3047 self.is_late_bound_map(id.owner)
3048 .map(|set| set.contains(&id.local_id))
3052 pub fn object_lifetime_defaults(self, id: HirId)
3053 -> Option<Lrc<Vec<ObjectLifetimeDefault>>>
3055 self.object_lifetime_defaults_map(id.owner)
3056 .and_then(|map| map.get(&id.local_id).cloned())
3060 pub trait InternAs<T: ?Sized, R> {
3062 fn intern_with<F>(self, f: F) -> Self::Output
3063 where F: FnOnce(&T) -> R;
3066 impl<I, T, R, E> InternAs<[T], R> for I
3067 where E: InternIteratorElement<T, R>,
3068 I: Iterator<Item=E> {
3069 type Output = E::Output;
3070 fn intern_with<F>(self, f: F) -> Self::Output
3071 where F: FnOnce(&[T]) -> R {
3072 E::intern_with(self, f)
3076 pub trait InternIteratorElement<T, R>: Sized {
3078 fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output;
3081 impl<T, R> InternIteratorElement<T, R> for T {
3083 fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output {
3084 f(&iter.collect::<SmallVec<[_; 8]>>())
3088 impl<'a, T, R> InternIteratorElement<T, R> for &'a T
3092 fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output {
3093 f(&iter.cloned().collect::<SmallVec<[_; 8]>>())
3097 impl<T, R, E> InternIteratorElement<T, R> for Result<T, E> {
3098 type Output = Result<R, E>;
3099 fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output {
3100 Ok(f(&iter.collect::<Result<SmallVec<[_; 8]>, _>>()?))
3104 pub fn provide(providers: &mut ty::query::Providers<'_>) {
3105 // FIXME(#44234): almost all of these queries have no sub-queries and
3106 // therefore no actual inputs, they're just reading tables calculated in
3107 // resolve! Does this work? Unsure! That's what the issue is about.
3108 providers.in_scope_traits_map = |tcx, id| tcx.gcx.trait_map.get(&id).cloned();
3109 providers.module_exports = |tcx, id| tcx.gcx.export_map.get(&id).cloned();
3110 providers.crate_name = |tcx, id| {
3111 assert_eq!(id, LOCAL_CRATE);
3114 providers.get_lib_features = |tcx, id| {
3115 assert_eq!(id, LOCAL_CRATE);
3116 Lrc::new(middle::lib_features::collect(tcx))
3118 providers.get_lang_items = |tcx, id| {
3119 assert_eq!(id, LOCAL_CRATE);
3120 Lrc::new(middle::lang_items::collect(tcx))
3122 providers.freevars = |tcx, id| tcx.gcx.freevars.get(&id).cloned();
3123 providers.maybe_unused_trait_import = |tcx, id| {
3124 tcx.maybe_unused_trait_imports.contains(&id)
3126 providers.maybe_unused_extern_crates = |tcx, cnum| {
3127 assert_eq!(cnum, LOCAL_CRATE);
3128 Lrc::new(tcx.maybe_unused_extern_crates.clone())
3131 providers.stability_index = |tcx, cnum| {
3132 assert_eq!(cnum, LOCAL_CRATE);
3133 Lrc::new(stability::Index::new(tcx))
3135 providers.lookup_stability = |tcx, id| {
3136 assert_eq!(id.krate, LOCAL_CRATE);
3137 let id = tcx.hir().definitions().def_index_to_hir_id(id.index);
3138 tcx.stability().local_stability(id)
3140 providers.lookup_deprecation_entry = |tcx, id| {
3141 assert_eq!(id.krate, LOCAL_CRATE);
3142 let id = tcx.hir().definitions().def_index_to_hir_id(id.index);
3143 tcx.stability().local_deprecation_entry(id)
3145 providers.extern_mod_stmt_cnum = |tcx, id| {
3146 let id = tcx.hir().as_local_node_id(id).unwrap();
3147 tcx.cstore.extern_mod_stmt_cnum_untracked(id)
3149 providers.all_crate_nums = |tcx, cnum| {
3150 assert_eq!(cnum, LOCAL_CRATE);
3151 Lrc::new(tcx.cstore.crates_untracked())
3153 providers.postorder_cnums = |tcx, cnum| {
3154 assert_eq!(cnum, LOCAL_CRATE);
3155 Lrc::new(tcx.cstore.postorder_cnums_untracked())
3157 providers.output_filenames = |tcx, cnum| {
3158 assert_eq!(cnum, LOCAL_CRATE);
3159 tcx.output_filenames.clone()
3161 providers.features_query = |tcx, cnum| {
3162 assert_eq!(cnum, LOCAL_CRATE);
3163 Lrc::new(tcx.sess.features_untracked().clone())
3165 providers.is_panic_runtime = |tcx, cnum| {
3166 assert_eq!(cnum, LOCAL_CRATE);
3167 attr::contains_name(tcx.hir().krate_attrs(), "panic_runtime")
3169 providers.is_compiler_builtins = |tcx, cnum| {
3170 assert_eq!(cnum, LOCAL_CRATE);
3171 attr::contains_name(tcx.hir().krate_attrs(), "compiler_builtins")