1 // Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! type context book-keeping
13 use dep_graph::DepGraph;
14 use dep_graph::{DepNode, DepConstructor};
15 use errors::DiagnosticBuilder;
17 use session::config::{BorrowckMode, OutputFilenames};
18 use session::config::CrateType;
20 use hir::{TraitCandidate, HirId, ItemKind, ItemLocalId, Node};
21 use hir::def::{Def, Export};
22 use hir::def_id::{CrateNum, DefId, DefIndex, LOCAL_CRATE};
23 use hir::map as hir_map;
24 use hir::map::DefPathHash;
25 use lint::{self, Lint};
26 use ich::{StableHashingContext, NodeIdHashingMode};
27 use infer::canonical::{CanonicalVarInfo, CanonicalVarInfos};
28 use infer::outlives::free_region_map::FreeRegionMap;
29 use middle::cstore::CrateStoreDyn;
30 use middle::cstore::EncodedMetadata;
31 use middle::lang_items;
32 use middle::resolve_lifetime::{self, ObjectLifetimeDefault};
33 use middle::stability;
34 use mir::{self, Mir, interpret, ProjectionKind};
35 use mir::interpret::Allocation;
36 use ty::subst::{CanonicalUserSubsts, Kind, Substs, Subst};
39 use traits::{Clause, Clauses, GoalKind, Goal, Goals};
40 use ty::{self, Ty, TypeAndMut};
41 use ty::{TyS, TyKind, List};
42 use ty::{AdtKind, AdtDef, ClosureSubsts, GeneratorSubsts, Region, Const};
43 use ty::{PolyFnSig, InferTy, ParamTy, ProjectionTy, ExistentialPredicate, Predicate};
45 use ty::{TyVar, TyVid, IntVar, IntVid, FloatVar, FloatVid};
47 use ty::GenericParamDefKind;
48 use ty::layout::{LayoutDetails, TargetDataLayout, VariantIdx};
53 use ty::CanonicalPolyFnSig;
54 use util::nodemap::{DefIdMap, DefIdSet, ItemLocalMap};
55 use util::nodemap::{FxHashMap, FxHashSet};
56 use smallvec::SmallVec;
57 use rustc_data_structures::stable_hasher::{HashStable, hash_stable_hashmap,
58 StableHasher, StableHasherResult,
60 use arena::{TypedArena, SyncDroplessArena};
61 use rustc_data_structures::indexed_vec::IndexVec;
62 use rustc_data_structures::sync::{self, Lrc, Lock, WorkerLocal};
64 use std::borrow::Borrow;
65 use std::cmp::Ordering;
66 use std::collections::hash_map::{self, Entry};
67 use std::hash::{Hash, Hasher};
70 use std::ops::{Deref, Bound};
74 use rustc_target::spec::abi;
75 use syntax::ast::{self, NodeId};
77 use syntax::source_map::MultiSpan;
78 use syntax::edition::Edition;
79 use syntax::feature_gate;
80 use syntax::symbol::{Symbol, keywords, InternedString};
85 pub struct AllArenas<'tcx> {
86 pub global: WorkerLocal<GlobalArenas<'tcx>>,
87 pub interner: SyncDroplessArena,
90 impl<'tcx> AllArenas<'tcx> {
91 pub fn new() -> Self {
93 global: WorkerLocal::new(|_| GlobalArenas::default()),
94 interner: SyncDroplessArena::default(),
101 pub struct GlobalArenas<'tcx> {
103 layout: TypedArena<LayoutDetails>,
106 generics: TypedArena<ty::Generics>,
107 trait_def: TypedArena<ty::TraitDef>,
108 adt_def: TypedArena<ty::AdtDef>,
109 steal_mir: TypedArena<Steal<Mir<'tcx>>>,
110 mir: TypedArena<Mir<'tcx>>,
111 tables: TypedArena<ty::TypeckTables<'tcx>>,
113 const_allocs: TypedArena<interpret::Allocation>,
116 type InternedSet<'tcx, T> = Lock<FxHashSet<Interned<'tcx, T>>>;
118 pub struct CtxtInterners<'tcx> {
119 /// The arena that types, regions, etc are allocated from
120 arena: &'tcx SyncDroplessArena,
122 /// Specifically use a speedy hash algorithm for these hash sets,
123 /// they're accessed quite often.
124 type_: InternedSet<'tcx, TyS<'tcx>>,
125 type_list: InternedSet<'tcx, List<Ty<'tcx>>>,
126 substs: InternedSet<'tcx, Substs<'tcx>>,
127 canonical_var_infos: InternedSet<'tcx, List<CanonicalVarInfo>>,
128 region: InternedSet<'tcx, RegionKind>,
129 existential_predicates: InternedSet<'tcx, List<ExistentialPredicate<'tcx>>>,
130 predicates: InternedSet<'tcx, List<Predicate<'tcx>>>,
131 const_: InternedSet<'tcx, Const<'tcx>>,
132 clauses: InternedSet<'tcx, List<Clause<'tcx>>>,
133 goal: InternedSet<'tcx, GoalKind<'tcx>>,
134 goal_list: InternedSet<'tcx, List<Goal<'tcx>>>,
135 projs: InternedSet<'tcx, List<ProjectionKind<'tcx>>>,
138 impl<'gcx: 'tcx, 'tcx> CtxtInterners<'tcx> {
139 fn new(arena: &'tcx SyncDroplessArena) -> CtxtInterners<'tcx> {
142 type_: Default::default(),
143 type_list: Default::default(),
144 substs: Default::default(),
145 region: Default::default(),
146 existential_predicates: Default::default(),
147 canonical_var_infos: Default::default(),
148 predicates: Default::default(),
149 const_: Default::default(),
150 clauses: Default::default(),
151 goal: Default::default(),
152 goal_list: Default::default(),
153 projs: Default::default(),
159 local: &CtxtInterners<'tcx>,
160 global: &CtxtInterners<'gcx>,
163 let flags = super::flags::FlagComputation::for_sty(&st);
165 // HACK(eddyb) Depend on flags being accurate to
166 // determine that all contents are in the global tcx.
167 // See comments on Lift for why we can't use that.
168 if flags.flags.intersects(ty::TypeFlags::KEEP_IN_LOCAL_TCX) {
169 let mut interner = local.type_.borrow_mut();
170 if let Some(&Interned(ty)) = interner.get(&st) {
174 let ty_struct = TyS {
177 outer_exclusive_binder: flags.outer_exclusive_binder,
180 // Make sure we don't end up with inference
181 // types/regions in the global interner
182 if local as *const _ as usize == global as *const _ as usize {
183 bug!("Attempted to intern `{:?}` which contains \
184 inference types/regions in the global type context",
188 // Don't be &mut TyS.
189 let ty: Ty<'tcx> = local.arena.alloc(ty_struct);
190 interner.insert(Interned(ty));
193 let mut interner = global.type_.borrow_mut();
194 if let Some(&Interned(ty)) = interner.get(&st) {
198 let ty_struct = TyS {
201 outer_exclusive_binder: flags.outer_exclusive_binder,
204 // This is safe because all the types the ty_struct can point to
205 // already is in the global arena
206 let ty_struct: TyS<'gcx> = unsafe {
207 mem::transmute(ty_struct)
210 // Don't be &mut TyS.
211 let ty: Ty<'gcx> = global.arena.alloc(ty_struct);
212 interner.insert(Interned(ty));
218 pub struct CommonTypes<'tcx> {
238 pub re_empty: Region<'tcx>,
239 pub re_static: Region<'tcx>,
240 pub re_erased: Region<'tcx>,
243 pub struct LocalTableInContext<'a, V: 'a> {
244 local_id_root: Option<DefId>,
245 data: &'a ItemLocalMap<V>
248 /// Validate that the given HirId (respectively its `local_id` part) can be
249 /// safely used as a key in the tables of a TypeckTable. For that to be
250 /// the case, the HirId must have the same `owner` as all the other IDs in
251 /// this table (signified by `local_id_root`). Otherwise the HirId
252 /// would be in a different frame of reference and using its `local_id`
253 /// would result in lookup errors, or worse, in silently wrong data being
255 fn validate_hir_id_for_typeck_tables(local_id_root: Option<DefId>,
258 if cfg!(debug_assertions) {
259 if let Some(local_id_root) = local_id_root {
260 if hir_id.owner != local_id_root.index {
261 ty::tls::with(|tcx| {
262 let node_id = tcx.hir.hir_to_node_id(hir_id);
264 bug!("node {} with HirId::owner {:?} cannot be placed in \
265 TypeckTables with local_id_root {:?}",
266 tcx.hir.node_to_string(node_id),
267 DefId::local(hir_id.owner),
272 // We use "Null Object" TypeckTables in some of the analysis passes.
273 // These are just expected to be empty and their `local_id_root` is
274 // `None`. Therefore we cannot verify whether a given `HirId` would
275 // be a valid key for the given table. Instead we make sure that
276 // nobody tries to write to such a Null Object table.
278 bug!("access to invalid TypeckTables")
284 impl<'a, V> LocalTableInContext<'a, V> {
285 pub fn contains_key(&self, id: hir::HirId) -> bool {
286 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
287 self.data.contains_key(&id.local_id)
290 pub fn get(&self, id: hir::HirId) -> Option<&V> {
291 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
292 self.data.get(&id.local_id)
295 pub fn iter(&self) -> hash_map::Iter<'_, hir::ItemLocalId, V> {
300 impl<'a, V> ::std::ops::Index<hir::HirId> for LocalTableInContext<'a, V> {
303 fn index(&self, key: hir::HirId) -> &V {
304 self.get(key).expect("LocalTableInContext: key not found")
308 pub struct LocalTableInContextMut<'a, V: 'a> {
309 local_id_root: Option<DefId>,
310 data: &'a mut ItemLocalMap<V>
313 impl<'a, V> LocalTableInContextMut<'a, V> {
314 pub fn get_mut(&mut self, id: hir::HirId) -> Option<&mut V> {
315 validate_hir_id_for_typeck_tables(self.local_id_root, id, true);
316 self.data.get_mut(&id.local_id)
319 pub fn entry(&mut self, id: hir::HirId) -> Entry<'_, hir::ItemLocalId, V> {
320 validate_hir_id_for_typeck_tables(self.local_id_root, id, true);
321 self.data.entry(id.local_id)
324 pub fn insert(&mut self, id: hir::HirId, val: V) -> Option<V> {
325 validate_hir_id_for_typeck_tables(self.local_id_root, id, true);
326 self.data.insert(id.local_id, val)
329 pub fn remove(&mut self, id: hir::HirId) -> Option<V> {
330 validate_hir_id_for_typeck_tables(self.local_id_root, id, true);
331 self.data.remove(&id.local_id)
335 #[derive(RustcEncodable, RustcDecodable, Debug)]
336 pub struct TypeckTables<'tcx> {
337 /// The HirId::owner all ItemLocalIds in this table are relative to.
338 pub local_id_root: Option<DefId>,
340 /// Resolved definitions for `<T>::X` associated paths and
341 /// method calls, including those of overloaded operators.
342 type_dependent_defs: ItemLocalMap<Def>,
344 /// Resolved field indices for field accesses in expressions (`S { field }`, `obj.field`)
345 /// or patterns (`S { field }`). The index is often useful by itself, but to learn more
346 /// about the field you also need definition of the variant to which the field
347 /// belongs, but it may not exist if it's a tuple field (`tuple.0`).
348 field_indices: ItemLocalMap<usize>,
350 /// Stores the types for various nodes in the AST. Note that this table
351 /// is not guaranteed to be populated until after typeck. See
352 /// typeck::check::fn_ctxt for details.
353 node_types: ItemLocalMap<Ty<'tcx>>,
355 /// Stores the type parameters which were substituted to obtain the type
356 /// of this node. This only applies to nodes that refer to entities
357 /// parameterized by type parameters, such as generic fns, types, or
359 node_substs: ItemLocalMap<&'tcx Substs<'tcx>>,
361 /// Stores the canonicalized types provided by the user. See also
362 /// `AscribeUserType` statement in MIR.
363 user_provided_tys: ItemLocalMap<CanonicalTy<'tcx>>,
365 /// Stores the canonicalized types provided by the user. See also
366 /// `AscribeUserType` statement in MIR.
367 pub user_provided_sigs: DefIdMap<CanonicalPolyFnSig<'tcx>>,
369 /// Stores the substitutions that the user explicitly gave (if any)
370 /// attached to `id`. These will not include any inferred
371 /// values. The canonical form is used to capture things like `_`
372 /// or other unspecified values.
376 /// If the user wrote `foo.collect::<Vec<_>>()`, then the
377 /// canonical substitutions would include only `for<X> { Vec<X>
379 user_substs: ItemLocalMap<CanonicalUserSubsts<'tcx>>,
381 adjustments: ItemLocalMap<Vec<ty::adjustment::Adjustment<'tcx>>>,
383 /// Stores the actual binding mode for all instances of hir::BindingAnnotation.
384 pat_binding_modes: ItemLocalMap<BindingMode>,
386 /// Stores the types which were implicitly dereferenced in pattern binding modes
387 /// for later usage in HAIR lowering. For example,
390 /// match &&Some(5i32) {
395 /// leads to a `vec![&&Option<i32>, &Option<i32>]`. Empty vectors are not stored.
398 /// https://github.com/rust-lang/rfcs/blob/master/text/2005-match-ergonomics.md#definitions
399 pat_adjustments: ItemLocalMap<Vec<Ty<'tcx>>>,
402 pub upvar_capture_map: ty::UpvarCaptureMap<'tcx>,
404 /// Records the reasons that we picked the kind of each closure;
405 /// not all closures are present in the map.
406 closure_kind_origins: ItemLocalMap<(Span, ast::Name)>,
408 /// For each fn, records the "liberated" types of its arguments
409 /// and return type. Liberated means that all bound regions
410 /// (including late-bound regions) are replaced with free
411 /// equivalents. This table is not used in codegen (since regions
412 /// are erased there) and hence is not serialized to metadata.
413 liberated_fn_sigs: ItemLocalMap<ty::FnSig<'tcx>>,
415 /// For each FRU expression, record the normalized types of the fields
416 /// of the struct - this is needed because it is non-trivial to
417 /// normalize while preserving regions. This table is used only in
418 /// MIR construction and hence is not serialized to metadata.
419 fru_field_types: ItemLocalMap<Vec<Ty<'tcx>>>,
421 /// Maps a cast expression to its kind. This is keyed on the
422 /// *from* expression of the cast, not the cast itself.
423 cast_kinds: ItemLocalMap<ty::cast::CastKind>,
425 /// Set of trait imports actually used in the method resolution.
426 /// This is used for warning unused imports. During type
427 /// checking, this `Lrc` should not be cloned: it must have a ref-count
428 /// of 1 so that we can insert things into the set mutably.
429 pub used_trait_imports: Lrc<DefIdSet>,
431 /// If any errors occurred while type-checking this body,
432 /// this field will be set to `true`.
433 pub tainted_by_errors: bool,
435 /// Stores the free-region relationships that were deduced from
436 /// its where clauses and parameter types. These are then
437 /// read-again by borrowck.
438 pub free_region_map: FreeRegionMap<'tcx>,
440 /// All the existential types that are restricted to concrete types
442 pub concrete_existential_types: FxHashMap<DefId, Ty<'tcx>>,
445 impl<'tcx> TypeckTables<'tcx> {
446 pub fn empty(local_id_root: Option<DefId>) -> TypeckTables<'tcx> {
449 type_dependent_defs: ItemLocalMap(),
450 field_indices: ItemLocalMap(),
451 user_provided_tys: ItemLocalMap(),
452 user_provided_sigs: Default::default(),
453 node_types: ItemLocalMap(),
454 node_substs: ItemLocalMap(),
455 user_substs: ItemLocalMap(),
456 adjustments: ItemLocalMap(),
457 pat_binding_modes: ItemLocalMap(),
458 pat_adjustments: ItemLocalMap(),
459 upvar_capture_map: Default::default(),
460 closure_kind_origins: ItemLocalMap(),
461 liberated_fn_sigs: ItemLocalMap(),
462 fru_field_types: ItemLocalMap(),
463 cast_kinds: ItemLocalMap(),
464 used_trait_imports: Lrc::new(DefIdSet()),
465 tainted_by_errors: false,
466 free_region_map: Default::default(),
467 concrete_existential_types: Default::default(),
471 /// Returns the final resolution of a `QPath` in an `Expr` or `Pat` node.
472 pub fn qpath_def(&self, qpath: &hir::QPath, id: hir::HirId) -> Def {
474 hir::QPath::Resolved(_, ref path) => path.def,
475 hir::QPath::TypeRelative(..) => {
476 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
477 self.type_dependent_defs.get(&id.local_id).cloned().unwrap_or(Def::Err)
482 pub fn type_dependent_defs(&self) -> LocalTableInContext<'_, Def> {
483 LocalTableInContext {
484 local_id_root: self.local_id_root,
485 data: &self.type_dependent_defs
489 pub fn type_dependent_defs_mut(&mut self) -> LocalTableInContextMut<'_, Def> {
490 LocalTableInContextMut {
491 local_id_root: self.local_id_root,
492 data: &mut self.type_dependent_defs
496 pub fn field_indices(&self) -> LocalTableInContext<'_, usize> {
497 LocalTableInContext {
498 local_id_root: self.local_id_root,
499 data: &self.field_indices
503 pub fn field_indices_mut(&mut self) -> LocalTableInContextMut<'_, usize> {
504 LocalTableInContextMut {
505 local_id_root: self.local_id_root,
506 data: &mut self.field_indices
510 pub fn user_provided_tys(&self) -> LocalTableInContext<'_, CanonicalTy<'tcx>> {
511 LocalTableInContext {
512 local_id_root: self.local_id_root,
513 data: &self.user_provided_tys
517 pub fn user_provided_tys_mut(&mut self) -> LocalTableInContextMut<'_, CanonicalTy<'tcx>> {
518 LocalTableInContextMut {
519 local_id_root: self.local_id_root,
520 data: &mut self.user_provided_tys
524 pub fn node_types(&self) -> LocalTableInContext<'_, Ty<'tcx>> {
525 LocalTableInContext {
526 local_id_root: self.local_id_root,
527 data: &self.node_types
531 pub fn node_types_mut(&mut self) -> LocalTableInContextMut<'_, Ty<'tcx>> {
532 LocalTableInContextMut {
533 local_id_root: self.local_id_root,
534 data: &mut self.node_types
538 pub fn node_id_to_type(&self, id: hir::HirId) -> Ty<'tcx> {
539 self.node_id_to_type_opt(id).unwrap_or_else(||
540 bug!("node_id_to_type: no type for node `{}`",
542 let id = tcx.hir.hir_to_node_id(id);
543 tcx.hir.node_to_string(id)
548 pub fn node_id_to_type_opt(&self, id: hir::HirId) -> Option<Ty<'tcx>> {
549 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
550 self.node_types.get(&id.local_id).cloned()
553 pub fn node_substs_mut(&mut self) -> LocalTableInContextMut<'_, &'tcx Substs<'tcx>> {
554 LocalTableInContextMut {
555 local_id_root: self.local_id_root,
556 data: &mut self.node_substs
560 pub fn node_substs(&self, id: hir::HirId) -> &'tcx Substs<'tcx> {
561 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
562 self.node_substs.get(&id.local_id).cloned().unwrap_or_else(|| Substs::empty())
565 pub fn node_substs_opt(&self, id: hir::HirId) -> Option<&'tcx Substs<'tcx>> {
566 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
567 self.node_substs.get(&id.local_id).cloned()
570 pub fn user_substs_mut(&mut self) -> LocalTableInContextMut<'_, CanonicalUserSubsts<'tcx>> {
571 LocalTableInContextMut {
572 local_id_root: self.local_id_root,
573 data: &mut self.user_substs
577 pub fn user_substs(&self, id: hir::HirId) -> Option<CanonicalUserSubsts<'tcx>> {
578 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
579 self.user_substs.get(&id.local_id).cloned()
582 // Returns the type of a pattern as a monotype. Like @expr_ty, this function
583 // doesn't provide type parameter substitutions.
584 pub fn pat_ty(&self, pat: &hir::Pat) -> Ty<'tcx> {
585 self.node_id_to_type(pat.hir_id)
588 pub fn pat_ty_opt(&self, pat: &hir::Pat) -> Option<Ty<'tcx>> {
589 self.node_id_to_type_opt(pat.hir_id)
592 // Returns the type of an expression as a monotype.
594 // NB (1): This is the PRE-ADJUSTMENT TYPE for the expression. That is, in
595 // some cases, we insert `Adjustment` annotations such as auto-deref or
596 // auto-ref. The type returned by this function does not consider such
597 // adjustments. See `expr_ty_adjusted()` instead.
599 // NB (2): This type doesn't provide type parameter substitutions; e.g. if you
600 // ask for the type of "id" in "id(3)", it will return "fn(&isize) -> isize"
601 // instead of "fn(ty) -> T with T = isize".
602 pub fn expr_ty(&self, expr: &hir::Expr) -> Ty<'tcx> {
603 self.node_id_to_type(expr.hir_id)
606 pub fn expr_ty_opt(&self, expr: &hir::Expr) -> Option<Ty<'tcx>> {
607 self.node_id_to_type_opt(expr.hir_id)
610 pub fn adjustments(&self) -> LocalTableInContext<'_, Vec<ty::adjustment::Adjustment<'tcx>>> {
611 LocalTableInContext {
612 local_id_root: self.local_id_root,
613 data: &self.adjustments
617 pub fn adjustments_mut(&mut self)
618 -> LocalTableInContextMut<'_, Vec<ty::adjustment::Adjustment<'tcx>>> {
619 LocalTableInContextMut {
620 local_id_root: self.local_id_root,
621 data: &mut self.adjustments
625 pub fn expr_adjustments(&self, expr: &hir::Expr)
626 -> &[ty::adjustment::Adjustment<'tcx>] {
627 validate_hir_id_for_typeck_tables(self.local_id_root, expr.hir_id, false);
628 self.adjustments.get(&expr.hir_id.local_id).map_or(&[], |a| &a[..])
631 /// Returns the type of `expr`, considering any `Adjustment`
632 /// entry recorded for that expression.
633 pub fn expr_ty_adjusted(&self, expr: &hir::Expr) -> Ty<'tcx> {
634 self.expr_adjustments(expr)
636 .map_or_else(|| self.expr_ty(expr), |adj| adj.target)
639 pub fn expr_ty_adjusted_opt(&self, expr: &hir::Expr) -> Option<Ty<'tcx>> {
640 self.expr_adjustments(expr)
642 .map(|adj| adj.target)
643 .or_else(|| self.expr_ty_opt(expr))
646 pub fn is_method_call(&self, expr: &hir::Expr) -> bool {
647 // Only paths and method calls/overloaded operators have
648 // entries in type_dependent_defs, ignore the former here.
649 if let hir::ExprKind::Path(_) = expr.node {
653 match self.type_dependent_defs().get(expr.hir_id) {
654 Some(&Def::Method(_)) => true,
659 pub fn pat_binding_modes(&self) -> LocalTableInContext<'_, BindingMode> {
660 LocalTableInContext {
661 local_id_root: self.local_id_root,
662 data: &self.pat_binding_modes
666 pub fn pat_binding_modes_mut(&mut self)
667 -> LocalTableInContextMut<'_, BindingMode> {
668 LocalTableInContextMut {
669 local_id_root: self.local_id_root,
670 data: &mut self.pat_binding_modes
674 pub fn pat_adjustments(&self) -> LocalTableInContext<'_, Vec<Ty<'tcx>>> {
675 LocalTableInContext {
676 local_id_root: self.local_id_root,
677 data: &self.pat_adjustments,
681 pub fn pat_adjustments_mut(&mut self)
682 -> LocalTableInContextMut<'_, Vec<Ty<'tcx>>> {
683 LocalTableInContextMut {
684 local_id_root: self.local_id_root,
685 data: &mut self.pat_adjustments,
689 pub fn upvar_capture(&self, upvar_id: ty::UpvarId) -> ty::UpvarCapture<'tcx> {
690 self.upvar_capture_map[&upvar_id]
693 pub fn closure_kind_origins(&self) -> LocalTableInContext<'_, (Span, ast::Name)> {
694 LocalTableInContext {
695 local_id_root: self.local_id_root,
696 data: &self.closure_kind_origins
700 pub fn closure_kind_origins_mut(&mut self) -> LocalTableInContextMut<'_, (Span, ast::Name)> {
701 LocalTableInContextMut {
702 local_id_root: self.local_id_root,
703 data: &mut self.closure_kind_origins
707 pub fn liberated_fn_sigs(&self) -> LocalTableInContext<'_, ty::FnSig<'tcx>> {
708 LocalTableInContext {
709 local_id_root: self.local_id_root,
710 data: &self.liberated_fn_sigs
714 pub fn liberated_fn_sigs_mut(&mut self) -> LocalTableInContextMut<'_, ty::FnSig<'tcx>> {
715 LocalTableInContextMut {
716 local_id_root: self.local_id_root,
717 data: &mut self.liberated_fn_sigs
721 pub fn fru_field_types(&self) -> LocalTableInContext<'_, Vec<Ty<'tcx>>> {
722 LocalTableInContext {
723 local_id_root: self.local_id_root,
724 data: &self.fru_field_types
728 pub fn fru_field_types_mut(&mut self) -> LocalTableInContextMut<'_, Vec<Ty<'tcx>>> {
729 LocalTableInContextMut {
730 local_id_root: self.local_id_root,
731 data: &mut self.fru_field_types
735 pub fn cast_kinds(&self) -> LocalTableInContext<'_, ty::cast::CastKind> {
736 LocalTableInContext {
737 local_id_root: self.local_id_root,
738 data: &self.cast_kinds
742 pub fn cast_kinds_mut(&mut self) -> LocalTableInContextMut<'_, ty::cast::CastKind> {
743 LocalTableInContextMut {
744 local_id_root: self.local_id_root,
745 data: &mut self.cast_kinds
750 impl<'a, 'gcx> HashStable<StableHashingContext<'a>> for TypeckTables<'gcx> {
751 fn hash_stable<W: StableHasherResult>(&self,
752 hcx: &mut StableHashingContext<'a>,
753 hasher: &mut StableHasher<W>) {
754 let ty::TypeckTables {
756 ref type_dependent_defs,
758 ref user_provided_tys,
759 ref user_provided_sigs,
764 ref pat_binding_modes,
766 ref upvar_capture_map,
767 ref closure_kind_origins,
768 ref liberated_fn_sigs,
773 ref used_trait_imports,
776 ref concrete_existential_types,
779 hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| {
780 type_dependent_defs.hash_stable(hcx, hasher);
781 field_indices.hash_stable(hcx, hasher);
782 user_provided_tys.hash_stable(hcx, hasher);
783 user_provided_sigs.hash_stable(hcx, hasher);
784 node_types.hash_stable(hcx, hasher);
785 node_substs.hash_stable(hcx, hasher);
786 user_substs.hash_stable(hcx, hasher);
787 adjustments.hash_stable(hcx, hasher);
788 pat_binding_modes.hash_stable(hcx, hasher);
789 pat_adjustments.hash_stable(hcx, hasher);
790 hash_stable_hashmap(hcx, hasher, upvar_capture_map, |up_var_id, hcx| {
797 local_id_root.expect("trying to hash invalid TypeckTables");
799 let var_owner_def_id = DefId {
800 krate: local_id_root.krate,
803 let closure_def_id = DefId {
804 krate: local_id_root.krate,
805 index: closure_expr_id.to_def_id().index,
807 (hcx.def_path_hash(var_owner_def_id),
809 hcx.def_path_hash(closure_def_id))
812 closure_kind_origins.hash_stable(hcx, hasher);
813 liberated_fn_sigs.hash_stable(hcx, hasher);
814 fru_field_types.hash_stable(hcx, hasher);
815 cast_kinds.hash_stable(hcx, hasher);
816 used_trait_imports.hash_stable(hcx, hasher);
817 tainted_by_errors.hash_stable(hcx, hasher);
818 free_region_map.hash_stable(hcx, hasher);
819 concrete_existential_types.hash_stable(hcx, hasher);
824 impl<'tcx> CommonTypes<'tcx> {
825 fn new(interners: &CtxtInterners<'tcx>) -> CommonTypes<'tcx> {
826 let mk = |sty| CtxtInterners::intern_ty(interners, interners, sty);
827 let mk_region = |r| {
828 if let Some(r) = interners.region.borrow().get(&r) {
831 let r = interners.arena.alloc(r);
832 interners.region.borrow_mut().insert(Interned(r));
840 isize: mk(Int(ast::IntTy::Isize)),
841 i8: mk(Int(ast::IntTy::I8)),
842 i16: mk(Int(ast::IntTy::I16)),
843 i32: mk(Int(ast::IntTy::I32)),
844 i64: mk(Int(ast::IntTy::I64)),
845 i128: mk(Int(ast::IntTy::I128)),
846 usize: mk(Uint(ast::UintTy::Usize)),
847 u8: mk(Uint(ast::UintTy::U8)),
848 u16: mk(Uint(ast::UintTy::U16)),
849 u32: mk(Uint(ast::UintTy::U32)),
850 u64: mk(Uint(ast::UintTy::U64)),
851 u128: mk(Uint(ast::UintTy::U128)),
852 f32: mk(Float(ast::FloatTy::F32)),
853 f64: mk(Float(ast::FloatTy::F64)),
855 re_empty: mk_region(RegionKind::ReEmpty),
856 re_static: mk_region(RegionKind::ReStatic),
857 re_erased: mk_region(RegionKind::ReErased),
862 // This struct contains information regarding the `ReFree(FreeRegion)` corresponding to a lifetime
865 pub struct FreeRegionInfo {
866 // def id corresponding to FreeRegion
868 // the bound region corresponding to FreeRegion
869 pub boundregion: ty::BoundRegion,
870 // checks if bound region is in Impl Item
871 pub is_impl_item: bool,
874 /// The central data structure of the compiler. It stores references
875 /// to the various **arenas** and also houses the results of the
876 /// various **compiler queries** that have been performed. See the
877 /// [rustc guide] for more details.
879 /// [rustc guide]: https://rust-lang-nursery.github.io/rustc-guide/ty.html
880 #[derive(Copy, Clone)]
881 pub struct TyCtxt<'a, 'gcx: 'tcx, 'tcx: 'a> {
882 gcx: &'a GlobalCtxt<'gcx>,
883 interners: &'a CtxtInterners<'tcx>
886 impl<'a, 'gcx, 'tcx> Deref for TyCtxt<'a, 'gcx, 'tcx> {
887 type Target = &'a GlobalCtxt<'gcx>;
888 fn deref(&self) -> &Self::Target {
893 pub struct GlobalCtxt<'tcx> {
894 global_arenas: &'tcx WorkerLocal<GlobalArenas<'tcx>>,
895 global_interners: CtxtInterners<'tcx>,
897 cstore: &'tcx CrateStoreDyn,
899 pub sess: &'tcx Session,
901 pub dep_graph: DepGraph,
903 /// Common types, pre-interned for your convenience.
904 pub types: CommonTypes<'tcx>,
906 /// Map indicating what traits are in scope for places where this
907 /// is relevant; generated by resolve.
908 trait_map: FxHashMap<DefIndex,
909 Lrc<FxHashMap<ItemLocalId,
910 Lrc<StableVec<TraitCandidate>>>>>,
912 /// Export map produced by name resolution.
913 export_map: FxHashMap<DefId, Lrc<Vec<Export>>>,
915 pub hir: hir_map::Map<'tcx>,
917 /// A map from DefPathHash -> DefId. Includes DefIds from the local crate
918 /// as well as all upstream crates. Only populated in incremental mode.
919 pub def_path_hash_to_def_id: Option<FxHashMap<DefPathHash, DefId>>,
921 pub(crate) queries: query::Queries<'tcx>,
923 // Records the free variables referenced by every closure
924 // expression. Do not track deps for this, just recompute it from
925 // scratch every time.
926 freevars: FxHashMap<DefId, Lrc<Vec<hir::Freevar>>>,
928 maybe_unused_trait_imports: FxHashSet<DefId>,
929 maybe_unused_extern_crates: Vec<(DefId, Span)>,
930 /// Extern prelude entries. The value is `true` if the entry was introduced
931 /// via `extern crate` item and not `--extern` option or compiler built-in.
932 pub extern_prelude: FxHashMap<ast::Name, bool>,
934 // Internal cache for metadata decoding. No need to track deps on this.
935 pub rcache: Lock<FxHashMap<ty::CReaderCacheKey, Ty<'tcx>>>,
937 /// Caches the results of trait selection. This cache is used
938 /// for things that do not have to do with the parameters in scope.
939 pub selection_cache: traits::SelectionCache<'tcx>,
941 /// Caches the results of trait evaluation. This cache is used
942 /// for things that do not have to do with the parameters in scope.
943 /// Merge this with `selection_cache`?
944 pub evaluation_cache: traits::EvaluationCache<'tcx>,
946 /// The definite name of the current crate after taking into account
947 /// attributes, commandline parameters, etc.
948 pub crate_name: Symbol,
950 /// Data layout specification for the current target.
951 pub data_layout: TargetDataLayout,
953 stability_interner: Lock<FxHashSet<&'tcx attr::Stability>>,
955 /// Stores the value of constants (and deduplicates the actual memory)
956 allocation_interner: Lock<FxHashSet<&'tcx Allocation>>,
958 pub alloc_map: Lock<interpret::AllocMap<'tcx, &'tcx Allocation>>,
960 layout_interner: Lock<FxHashSet<&'tcx LayoutDetails>>,
962 /// A general purpose channel to throw data out the back towards LLVM worker
965 /// This is intended to only get used during the codegen phase of the compiler
966 /// when satisfying the query for a particular codegen unit. Internally in
967 /// the query it'll send data along this channel to get processed later.
968 pub tx_to_llvm_workers: Lock<mpsc::Sender<Box<dyn Any + Send>>>,
970 output_filenames: Arc<OutputFilenames>,
973 impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
974 /// Get the global TyCtxt.
976 pub fn global_tcx(self) -> TyCtxt<'a, 'gcx, 'gcx> {
979 interners: &self.gcx.global_interners,
983 pub fn alloc_generics(self, generics: ty::Generics) -> &'gcx ty::Generics {
984 self.global_arenas.generics.alloc(generics)
987 pub fn alloc_steal_mir(self, mir: Mir<'gcx>) -> &'gcx Steal<Mir<'gcx>> {
988 self.global_arenas.steal_mir.alloc(Steal::new(mir))
991 pub fn alloc_mir(self, mir: Mir<'gcx>) -> &'gcx Mir<'gcx> {
992 self.global_arenas.mir.alloc(mir)
995 pub fn alloc_tables(self, tables: ty::TypeckTables<'gcx>) -> &'gcx ty::TypeckTables<'gcx> {
996 self.global_arenas.tables.alloc(tables)
999 pub fn alloc_trait_def(self, def: ty::TraitDef) -> &'gcx ty::TraitDef {
1000 self.global_arenas.trait_def.alloc(def)
1003 pub fn alloc_adt_def(self,
1006 variants: IndexVec<VariantIdx, ty::VariantDef>,
1008 -> &'gcx ty::AdtDef {
1009 let def = ty::AdtDef::new(self, did, kind, variants, repr);
1010 self.global_arenas.adt_def.alloc(def)
1013 pub fn alloc_byte_array(self, bytes: &[u8]) -> &'gcx [u8] {
1014 if bytes.is_empty() {
1017 self.global_interners.arena.alloc_slice(bytes)
1021 pub fn alloc_const_slice(self, values: &[&'tcx ty::Const<'tcx>])
1022 -> &'tcx [&'tcx ty::Const<'tcx>] {
1023 if values.is_empty() {
1026 self.interners.arena.alloc_slice(values)
1030 pub fn alloc_name_const_slice(self, values: &[(ast::Name, &'tcx ty::Const<'tcx>)])
1031 -> &'tcx [(ast::Name, &'tcx ty::Const<'tcx>)] {
1032 if values.is_empty() {
1035 self.interners.arena.alloc_slice(values)
1039 pub fn intern_const_alloc(
1042 ) -> &'gcx Allocation {
1043 let allocs = &mut self.allocation_interner.borrow_mut();
1044 if let Some(alloc) = allocs.get(&alloc) {
1048 let interned = self.global_arenas.const_allocs.alloc(alloc);
1049 if let Some(prev) = allocs.replace(interned) { // insert into interner
1050 bug!("Tried to overwrite interned Allocation: {:#?}", prev)
1055 /// Allocates a byte or string literal for `mir::interpret`, read-only
1056 pub fn allocate_bytes(self, bytes: &[u8]) -> interpret::AllocId {
1057 // create an allocation that just contains these bytes
1058 let alloc = interpret::Allocation::from_byte_aligned_bytes(bytes);
1059 let alloc = self.intern_const_alloc(alloc);
1060 self.alloc_map.lock().allocate(alloc)
1063 pub fn intern_stability(self, stab: attr::Stability) -> &'gcx attr::Stability {
1064 let mut stability_interner = self.stability_interner.borrow_mut();
1065 if let Some(st) = stability_interner.get(&stab) {
1069 let interned = self.global_interners.arena.alloc(stab);
1070 if let Some(prev) = stability_interner.replace(interned) {
1071 bug!("Tried to overwrite interned Stability: {:?}", prev)
1076 pub fn intern_layout(self, layout: LayoutDetails) -> &'gcx LayoutDetails {
1077 let mut layout_interner = self.layout_interner.borrow_mut();
1078 if let Some(layout) = layout_interner.get(&layout) {
1082 let interned = self.global_arenas.layout.alloc(layout);
1083 if let Some(prev) = layout_interner.replace(interned) {
1084 bug!("Tried to overwrite interned Layout: {:?}", prev)
1089 /// Returns a range of the start/end indices specified with the
1090 /// `rustc_layout_scalar_valid_range` attribute.
1091 pub fn layout_scalar_valid_range(self, def_id: DefId) -> (Bound<u128>, Bound<u128>) {
1092 let attrs = self.get_attrs(def_id);
1094 let attr = match attrs.iter().find(|a| a.check_name(name)) {
1096 None => return Bound::Unbounded,
1098 for meta in attr.meta_item_list().expect("rustc_layout_scalar_valid_range takes args") {
1099 match meta.literal().expect("attribute takes lit").node {
1100 ast::LitKind::Int(a, _) => return Bound::Included(a),
1101 _ => span_bug!(attr.span, "rustc_layout_scalar_valid_range expects int arg"),
1104 span_bug!(attr.span, "no arguments to `rustc_layout_scalar_valid_range` attribute");
1106 (get("rustc_layout_scalar_valid_range_start"), get("rustc_layout_scalar_valid_range_end"))
1109 pub fn lift<T: ?Sized + Lift<'tcx>>(self, value: &T) -> Option<T::Lifted> {
1110 value.lift_to_tcx(self)
1113 /// Like lift, but only tries in the global tcx.
1114 pub fn lift_to_global<T: ?Sized + Lift<'gcx>>(self, value: &T) -> Option<T::Lifted> {
1115 value.lift_to_tcx(self.global_tcx())
1118 /// Returns true if self is the same as self.global_tcx().
1119 fn is_global(self) -> bool {
1120 let local = self.interners as *const _;
1121 let global = &self.global_interners as *const _;
1122 local as usize == global as usize
1125 /// Create a type context and call the closure with a `TyCtxt` reference
1126 /// to the context. The closure enforces that the type context and any interned
1127 /// value (types, substs, etc.) can only be used while `ty::tls` has a valid
1128 /// reference to the context, to allow formatting values that need it.
1129 pub fn create_and_enter<F, R>(s: &'tcx Session,
1130 cstore: &'tcx CrateStoreDyn,
1131 local_providers: ty::query::Providers<'tcx>,
1132 extern_providers: ty::query::Providers<'tcx>,
1133 arenas: &'tcx AllArenas<'tcx>,
1134 resolutions: ty::Resolutions,
1135 hir: hir_map::Map<'tcx>,
1136 on_disk_query_result_cache: query::OnDiskCache<'tcx>,
1138 tx: mpsc::Sender<Box<dyn Any + Send>>,
1139 output_filenames: &OutputFilenames,
1141 where F: for<'b> FnOnce(TyCtxt<'b, 'tcx, 'tcx>) -> R
1143 let data_layout = TargetDataLayout::parse(&s.target.target).unwrap_or_else(|err| {
1146 let interners = CtxtInterners::new(&arenas.interner);
1147 let common_types = CommonTypes::new(&interners);
1148 let dep_graph = hir.dep_graph.clone();
1149 let max_cnum = cstore.crates_untracked().iter().map(|c| c.as_usize()).max().unwrap_or(0);
1150 let mut providers = IndexVec::from_elem_n(extern_providers, max_cnum + 1);
1151 providers[LOCAL_CRATE] = local_providers;
1153 let def_path_hash_to_def_id = if s.opts.build_dep_graph() {
1154 let upstream_def_path_tables: Vec<(CrateNum, Lrc<_>)> = cstore
1157 .map(|&cnum| (cnum, cstore.def_path_table(cnum)))
1160 let def_path_tables = || {
1161 upstream_def_path_tables
1163 .map(|&(cnum, ref rc)| (cnum, &**rc))
1164 .chain(iter::once((LOCAL_CRATE, hir.definitions().def_path_table())))
1167 // Precompute the capacity of the hashmap so we don't have to
1168 // re-allocate when populating it.
1169 let capacity = def_path_tables().map(|(_, t)| t.size()).sum::<usize>();
1171 let mut map: FxHashMap<_, _> = FxHashMap::with_capacity_and_hasher(
1173 ::std::default::Default::default()
1176 for (cnum, def_path_table) in def_path_tables() {
1177 def_path_table.add_def_path_hashes_to(cnum, &mut map);
1185 let mut trait_map: FxHashMap<_, Lrc<FxHashMap<_, _>>> = FxHashMap::default();
1186 for (k, v) in resolutions.trait_map {
1187 let hir_id = hir.node_to_hir_id(k);
1188 let map = trait_map.entry(hir_id.owner).or_default();
1189 Lrc::get_mut(map).unwrap()
1190 .insert(hir_id.local_id,
1191 Lrc::new(StableVec::new(v)));
1194 let gcx = &GlobalCtxt {
1197 global_arenas: &arenas.global,
1198 global_interners: interners,
1200 types: common_types,
1202 export_map: resolutions.export_map.into_iter().map(|(k, v)| {
1205 freevars: resolutions.freevars.into_iter().map(|(k, v)| {
1206 (hir.local_def_id(k), Lrc::new(v))
1208 maybe_unused_trait_imports:
1209 resolutions.maybe_unused_trait_imports
1211 .map(|id| hir.local_def_id(id))
1213 maybe_unused_extern_crates:
1214 resolutions.maybe_unused_extern_crates
1216 .map(|(id, sp)| (hir.local_def_id(id), sp))
1218 extern_prelude: resolutions.extern_prelude,
1220 def_path_hash_to_def_id,
1221 queries: query::Queries::new(
1224 on_disk_query_result_cache,
1226 rcache: Default::default(),
1227 selection_cache: Default::default(),
1228 evaluation_cache: Default::default(),
1229 crate_name: Symbol::intern(crate_name),
1231 layout_interner: Default::default(),
1232 stability_interner: Default::default(),
1233 allocation_interner: Default::default(),
1234 alloc_map: Lock::new(interpret::AllocMap::new()),
1235 tx_to_llvm_workers: Lock::new(tx),
1236 output_filenames: Arc::new(output_filenames.clone()),
1239 sync::assert_send_val(&gcx);
1241 tls::enter_global(gcx, f)
1244 pub fn consider_optimizing<T: Fn() -> String>(&self, msg: T) -> bool {
1245 let cname = self.crate_name(LOCAL_CRATE).as_str();
1246 self.sess.consider_optimizing(&cname, msg)
1249 pub fn lib_features(self) -> Lrc<middle::lib_features::LibFeatures> {
1250 self.get_lib_features(LOCAL_CRATE)
1253 pub fn lang_items(self) -> Lrc<middle::lang_items::LanguageItems> {
1254 self.get_lang_items(LOCAL_CRATE)
1257 /// Due to missing llvm support for lowering 128 bit math to software emulation
1258 /// (on some targets), the lowering can be done in MIR.
1260 /// This function only exists until said support is implemented.
1261 pub fn is_binop_lang_item(&self, def_id: DefId) -> Option<(mir::BinOp, bool)> {
1262 let items = self.lang_items();
1263 let def_id = Some(def_id);
1264 if items.i128_add_fn() == def_id { Some((mir::BinOp::Add, false)) }
1265 else if items.u128_add_fn() == def_id { Some((mir::BinOp::Add, false)) }
1266 else if items.i128_sub_fn() == def_id { Some((mir::BinOp::Sub, false)) }
1267 else if items.u128_sub_fn() == def_id { Some((mir::BinOp::Sub, false)) }
1268 else if items.i128_mul_fn() == def_id { Some((mir::BinOp::Mul, false)) }
1269 else if items.u128_mul_fn() == def_id { Some((mir::BinOp::Mul, false)) }
1270 else if items.i128_div_fn() == def_id { Some((mir::BinOp::Div, false)) }
1271 else if items.u128_div_fn() == def_id { Some((mir::BinOp::Div, false)) }
1272 else if items.i128_rem_fn() == def_id { Some((mir::BinOp::Rem, false)) }
1273 else if items.u128_rem_fn() == def_id { Some((mir::BinOp::Rem, false)) }
1274 else if items.i128_shl_fn() == def_id { Some((mir::BinOp::Shl, false)) }
1275 else if items.u128_shl_fn() == def_id { Some((mir::BinOp::Shl, false)) }
1276 else if items.i128_shr_fn() == def_id { Some((mir::BinOp::Shr, false)) }
1277 else if items.u128_shr_fn() == def_id { Some((mir::BinOp::Shr, false)) }
1278 else if items.i128_addo_fn() == def_id { Some((mir::BinOp::Add, true)) }
1279 else if items.u128_addo_fn() == def_id { Some((mir::BinOp::Add, true)) }
1280 else if items.i128_subo_fn() == def_id { Some((mir::BinOp::Sub, true)) }
1281 else if items.u128_subo_fn() == def_id { Some((mir::BinOp::Sub, true)) }
1282 else if items.i128_mulo_fn() == def_id { Some((mir::BinOp::Mul, true)) }
1283 else if items.u128_mulo_fn() == def_id { Some((mir::BinOp::Mul, true)) }
1284 else if items.i128_shlo_fn() == def_id { Some((mir::BinOp::Shl, true)) }
1285 else if items.u128_shlo_fn() == def_id { Some((mir::BinOp::Shl, true)) }
1286 else if items.i128_shro_fn() == def_id { Some((mir::BinOp::Shr, true)) }
1287 else if items.u128_shro_fn() == def_id { Some((mir::BinOp::Shr, true)) }
1291 pub fn stability(self) -> Lrc<stability::Index<'tcx>> {
1292 self.stability_index(LOCAL_CRATE)
1295 pub fn crates(self) -> Lrc<Vec<CrateNum>> {
1296 self.all_crate_nums(LOCAL_CRATE)
1299 pub fn features(self) -> Lrc<feature_gate::Features> {
1300 self.features_query(LOCAL_CRATE)
1303 pub fn def_key(self, id: DefId) -> hir_map::DefKey {
1305 self.hir.def_key(id)
1307 self.cstore.def_key(id)
1311 /// Convert a `DefId` into its fully expanded `DefPath` (every
1312 /// `DefId` is really just an interned def-path).
1314 /// Note that if `id` is not local to this crate, the result will
1315 /// be a non-local `DefPath`.
1316 pub fn def_path(self, id: DefId) -> hir_map::DefPath {
1318 self.hir.def_path(id)
1320 self.cstore.def_path(id)
1325 pub fn def_path_hash(self, def_id: DefId) -> hir_map::DefPathHash {
1326 if def_id.is_local() {
1327 self.hir.definitions().def_path_hash(def_id.index)
1329 self.cstore.def_path_hash(def_id)
1333 pub fn def_path_debug_str(self, def_id: DefId) -> String {
1334 // We are explicitly not going through queries here in order to get
1335 // crate name and disambiguator since this code is called from debug!()
1336 // statements within the query system and we'd run into endless
1337 // recursion otherwise.
1338 let (crate_name, crate_disambiguator) = if def_id.is_local() {
1339 (self.crate_name.clone(),
1340 self.sess.local_crate_disambiguator())
1342 (self.cstore.crate_name_untracked(def_id.krate),
1343 self.cstore.crate_disambiguator_untracked(def_id.krate))
1348 // Don't print the whole crate disambiguator. That's just
1349 // annoying in debug output.
1350 &(crate_disambiguator.to_fingerprint().to_hex())[..4],
1351 self.def_path(def_id).to_string_no_crate())
1354 pub fn metadata_encoding_version(self) -> Vec<u8> {
1355 self.cstore.metadata_encoding_version().to_vec()
1358 // Note that this is *untracked* and should only be used within the query
1359 // system if the result is otherwise tracked through queries
1360 pub fn crate_data_as_rc_any(self, cnum: CrateNum) -> Lrc<dyn Any> {
1361 self.cstore.crate_data_as_rc_any(cnum)
1364 pub fn create_stable_hashing_context(self) -> StableHashingContext<'a> {
1365 let krate = self.dep_graph.with_ignore(|| self.gcx.hir.krate());
1367 StableHashingContext::new(self.sess,
1369 self.hir.definitions(),
1373 // This method makes sure that we have a DepNode and a Fingerprint for
1374 // every upstream crate. It needs to be called once right after the tcx is
1376 // With full-fledged red/green, the method will probably become unnecessary
1377 // as this will be done on-demand.
1378 pub fn allocate_metadata_dep_nodes(self) {
1379 // We cannot use the query versions of crates() and crate_hash(), since
1380 // those would need the DepNodes that we are allocating here.
1381 for cnum in self.cstore.crates_untracked() {
1382 let dep_node = DepNode::new(self, DepConstructor::CrateMetadata(cnum));
1383 let crate_hash = self.cstore.crate_hash_untracked(cnum);
1384 self.dep_graph.with_task(dep_node,
1387 |_, x| x // No transformation needed
1392 // This method exercises the `in_scope_traits_map` query for all possible
1393 // values so that we have their fingerprints available in the DepGraph.
1394 // This is only required as long as we still use the old dependency tracking
1395 // which needs to have the fingerprints of all input nodes beforehand.
1396 pub fn precompute_in_scope_traits_hashes(self) {
1397 for &def_index in self.trait_map.keys() {
1398 self.in_scope_traits_map(def_index);
1402 pub fn serialize_query_result_cache<E>(self,
1404 -> Result<(), E::Error>
1405 where E: ty::codec::TyEncoder
1407 self.queries.on_disk_cache.serialize(self.global_tcx(), encoder)
1410 /// This checks whether one is allowed to have pattern bindings
1411 /// that bind-by-move on a match arm that has a guard, e.g.:
1414 /// match foo { A(inner) if { /* something */ } => ..., ... }
1417 /// It is separate from check_for_mutation_in_guard_via_ast_walk,
1418 /// because that method has a narrower effect that can be toggled
1419 /// off via a separate `-Z` flag, at least for the short term.
1420 pub fn allow_bind_by_move_patterns_with_guards(self) -> bool {
1421 self.features().bind_by_move_pattern_guards && self.use_mir_borrowck()
1424 /// If true, we should use a naive AST walk to determine if match
1425 /// guard could perform bad mutations (or mutable-borrows).
1426 pub fn check_for_mutation_in_guard_via_ast_walk(self) -> bool {
1427 // If someone requests the feature, then be a little more
1428 // careful and ensure that MIR-borrowck is enabled (which can
1429 // happen via edition selection, via `feature(nll)`, or via an
1430 // appropriate `-Z` flag) before disabling the mutation check.
1431 if self.allow_bind_by_move_patterns_with_guards() {
1438 /// If true, we should use the AST-based borrowck (we may *also* use
1439 /// the MIR-based borrowck).
1440 pub fn use_ast_borrowck(self) -> bool {
1441 self.borrowck_mode().use_ast()
1444 /// If true, we should use the MIR-based borrowck (we may *also* use
1445 /// the AST-based borrowck).
1446 pub fn use_mir_borrowck(self) -> bool {
1447 self.borrowck_mode().use_mir()
1450 /// If true, we should use the MIR-based borrow check, but also
1451 /// fall back on the AST borrow check if the MIR-based one errors.
1452 pub fn migrate_borrowck(self) -> bool {
1453 self.borrowck_mode().migrate()
1456 /// If true, make MIR codegen for `match` emit a temp that holds a
1457 /// borrow of the input to the match expression.
1458 pub fn generate_borrow_of_any_match_input(&self) -> bool {
1459 self.emit_read_for_match()
1462 /// If true, make MIR codegen for `match` emit FakeRead
1463 /// statements (which simulate the maximal effect of executing the
1464 /// patterns in a match arm).
1465 pub fn emit_read_for_match(&self) -> bool {
1466 self.use_mir_borrowck() && !self.sess.opts.debugging_opts.nll_dont_emit_read_for_match
1469 /// If true, pattern variables for use in guards on match arms
1470 /// will be bound as references to the data, and occurrences of
1471 /// those variables in the guard expression will implicitly
1472 /// dereference those bindings. (See rust-lang/rust#27282.)
1473 pub fn all_pat_vars_are_implicit_refs_within_guards(self) -> bool {
1474 self.borrowck_mode().use_mir()
1477 /// If true, we should enable two-phase borrows checks. This is
1478 /// done with either: `-Ztwo-phase-borrows`, `#![feature(nll)]`,
1479 /// or by opting into an edition after 2015.
1480 pub fn two_phase_borrows(self) -> bool {
1481 if self.features().nll || self.sess.opts.debugging_opts.two_phase_borrows {
1485 match self.sess.edition() {
1486 Edition::Edition2015 => false,
1487 Edition::Edition2018 => true,
1492 /// What mode(s) of borrowck should we run? AST? MIR? both?
1493 /// (Also considers the `#![feature(nll)]` setting.)
1494 pub fn borrowck_mode(&self) -> BorrowckMode {
1495 // Here are the main constraints we need to deal with:
1497 // 1. An opts.borrowck_mode of `BorrowckMode::Ast` is
1498 // synonymous with no `-Z borrowck=...` flag at all.
1499 // (This is arguably a historical accident.)
1501 // 2. `BorrowckMode::Migrate` is the limited migration to
1502 // NLL that we are deploying with the 2018 edition.
1504 // 3. We want to allow developers on the Nightly channel
1505 // to opt back into the "hard error" mode for NLL,
1506 // (which they can do via specifying `#![feature(nll)]`
1507 // explicitly in their crate).
1509 // So, this precedence list is how pnkfelix chose to work with
1510 // the above constraints:
1512 // * `#![feature(nll)]` *always* means use NLL with hard
1513 // errors. (To simplify the code here, it now even overrides
1514 // a user's attempt to specify `-Z borrowck=compare`, which
1515 // we arguably do not need anymore and should remove.)
1517 // * Otherwise, if no `-Z borrowck=...` flag was given (or
1518 // if `borrowck=ast` was specified), then use the default
1519 // as required by the edition.
1521 // * Otherwise, use the behavior requested via `-Z borrowck=...`
1523 if self.features().nll { return BorrowckMode::Mir; }
1525 match self.sess.opts.borrowck_mode {
1526 mode @ BorrowckMode::Mir |
1527 mode @ BorrowckMode::Compare |
1528 mode @ BorrowckMode::Migrate => mode,
1530 BorrowckMode::Ast => match self.sess.edition() {
1531 Edition::Edition2015 => BorrowckMode::Ast,
1532 Edition::Edition2018 => BorrowckMode::Migrate,
1534 // For now, future editions mean Migrate. (But it
1535 // would make a lot of sense for it to be changed to
1536 // `BorrowckMode::Mir`, depending on how we plan to
1537 // time the forcing of full migration to NLL.)
1538 _ => BorrowckMode::Migrate,
1543 /// Should we emit EndRegion MIR statements? These are consumed by
1544 /// MIR borrowck, but not when NLL is used.
1545 pub fn emit_end_regions(self) -> bool {
1546 self.sess.opts.debugging_opts.emit_end_regions ||
1547 self.use_mir_borrowck()
1551 pub fn local_crate_exports_generics(self) -> bool {
1552 debug_assert!(self.sess.opts.share_generics());
1554 self.sess.crate_types.borrow().iter().any(|crate_type| {
1556 CrateType::Executable |
1557 CrateType::Staticlib |
1558 CrateType::ProcMacro |
1559 CrateType::Cdylib => false,
1561 CrateType::Dylib => true,
1566 // This method returns the DefId and the BoundRegion corresponding to the given region.
1567 pub fn is_suitable_region(&self, region: Region<'tcx>) -> Option<FreeRegionInfo> {
1568 let (suitable_region_binding_scope, bound_region) = match *region {
1569 ty::ReFree(ref free_region) => (free_region.scope, free_region.bound_region),
1570 ty::ReEarlyBound(ref ebr) => (
1571 self.parent_def_id(ebr.def_id).unwrap(),
1572 ty::BoundRegion::BrNamed(ebr.def_id, ebr.name),
1574 _ => return None, // not a free region
1577 let node_id = self.hir
1578 .as_local_node_id(suitable_region_binding_scope)
1580 let is_impl_item = match self.hir.find(node_id) {
1581 Some(Node::Item(..)) | Some(Node::TraitItem(..)) => false,
1582 Some(Node::ImplItem(..)) => {
1583 self.is_bound_region_in_impl_item(suitable_region_binding_scope)
1588 return Some(FreeRegionInfo {
1589 def_id: suitable_region_binding_scope,
1590 boundregion: bound_region,
1591 is_impl_item: is_impl_item,
1595 pub fn return_type_impl_trait(
1597 scope_def_id: DefId,
1598 ) -> Option<Ty<'tcx>> {
1599 // HACK: `type_of_def_id()` will fail on these (#55796), so return None
1600 let node_id = self.hir.as_local_node_id(scope_def_id).unwrap();
1601 match self.hir.get(node_id) {
1602 Node::Item(item) => {
1604 ItemKind::Fn(..) => { /* type_of_def_id() will work */ }
1610 _ => { /* type_of_def_id() will work or panic */ }
1613 let ret_ty = self.type_of(scope_def_id);
1615 ty::FnDef(_, _) => {
1616 let sig = ret_ty.fn_sig(*self);
1617 let output = self.erase_late_bound_regions(&sig.output());
1618 if output.is_impl_trait() {
1628 // Here we check if the bound region is in Impl Item.
1629 pub fn is_bound_region_in_impl_item(
1631 suitable_region_binding_scope: DefId,
1633 let container_id = self.associated_item(suitable_region_binding_scope)
1636 if self.impl_trait_ref(container_id).is_some() {
1637 // For now, we do not try to target impls of traits. This is
1638 // because this message is going to suggest that the user
1639 // change the fn signature, but they may not be free to do so,
1640 // since the signature must match the trait.
1642 // FIXME(#42706) -- in some cases, we could do better here.
1649 impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
1650 pub fn encode_metadata(self)
1653 self.cstore.encode_metadata(self)
1657 impl<'gcx: 'tcx, 'tcx> GlobalCtxt<'gcx> {
1658 /// Call the closure with a local `TyCtxt` using the given arena.
1659 pub fn enter_local<F, R>(
1661 arena: &'tcx SyncDroplessArena,
1665 F: for<'a> FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R
1667 let interners = CtxtInterners::new(arena);
1670 interners: &interners,
1672 ty::tls::with_related_context(tcx.global_tcx(), |icx| {
1673 let new_icx = ty::tls::ImplicitCtxt {
1675 query: icx.query.clone(),
1676 layout_depth: icx.layout_depth,
1679 ty::tls::enter_context(&new_icx, |new_icx| {
1686 /// A trait implemented for all X<'a> types which can be safely and
1687 /// efficiently converted to X<'tcx> as long as they are part of the
1688 /// provided TyCtxt<'tcx>.
1689 /// This can be done, for example, for Ty<'tcx> or &'tcx Substs<'tcx>
1690 /// by looking them up in their respective interners.
1692 /// However, this is still not the best implementation as it does
1693 /// need to compare the components, even for interned values.
1694 /// It would be more efficient if TypedArena provided a way to
1695 /// determine whether the address is in the allocated range.
1697 /// None is returned if the value or one of the components is not part
1698 /// of the provided context.
1699 /// For Ty, None can be returned if either the type interner doesn't
1700 /// contain the TyKind key or if the address of the interned
1701 /// pointer differs. The latter case is possible if a primitive type,
1702 /// e.g. `()` or `u8`, was interned in a different context.
1703 pub trait Lift<'tcx>: fmt::Debug {
1704 type Lifted: fmt::Debug + 'tcx;
1705 fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option<Self::Lifted>;
1708 impl<'a, 'tcx> Lift<'tcx> for Ty<'a> {
1709 type Lifted = Ty<'tcx>;
1710 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Ty<'tcx>> {
1711 if tcx.interners.arena.in_arena(*self as *const _) {
1712 return Some(unsafe { mem::transmute(*self) });
1714 // Also try in the global tcx if we're not that.
1715 if !tcx.is_global() {
1716 self.lift_to_tcx(tcx.global_tcx())
1723 impl<'a, 'tcx> Lift<'tcx> for Region<'a> {
1724 type Lifted = Region<'tcx>;
1725 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Region<'tcx>> {
1726 if tcx.interners.arena.in_arena(*self as *const _) {
1727 return Some(unsafe { mem::transmute(*self) });
1729 // Also try in the global tcx if we're not that.
1730 if !tcx.is_global() {
1731 self.lift_to_tcx(tcx.global_tcx())
1738 impl<'a, 'tcx> Lift<'tcx> for Goal<'a> {
1739 type Lifted = Goal<'tcx>;
1740 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Goal<'tcx>> {
1741 if tcx.interners.arena.in_arena(*self as *const _) {
1742 return Some(unsafe { mem::transmute(*self) });
1744 // Also try in the global tcx if we're not that.
1745 if !tcx.is_global() {
1746 self.lift_to_tcx(tcx.global_tcx())
1753 impl<'a, 'tcx> Lift<'tcx> for &'a List<Goal<'a>> {
1754 type Lifted = &'tcx List<Goal<'tcx>>;
1755 fn lift_to_tcx<'b, 'gcx>(
1757 tcx: TyCtxt<'b, 'gcx, 'tcx>,
1758 ) -> Option<&'tcx List<Goal<'tcx>>> {
1759 if tcx.interners.arena.in_arena(*self as *const _) {
1760 return Some(unsafe { mem::transmute(*self) });
1762 // Also try in the global tcx if we're not that.
1763 if !tcx.is_global() {
1764 self.lift_to_tcx(tcx.global_tcx())
1771 impl<'a, 'tcx> Lift<'tcx> for &'a List<Clause<'a>> {
1772 type Lifted = &'tcx List<Clause<'tcx>>;
1773 fn lift_to_tcx<'b, 'gcx>(
1775 tcx: TyCtxt<'b, 'gcx, 'tcx>,
1776 ) -> Option<&'tcx List<Clause<'tcx>>> {
1777 if tcx.interners.arena.in_arena(*self as *const _) {
1778 return Some(unsafe { mem::transmute(*self) });
1780 // Also try in the global tcx if we're not that.
1781 if !tcx.is_global() {
1782 self.lift_to_tcx(tcx.global_tcx())
1789 impl<'a, 'tcx> Lift<'tcx> for &'a Const<'a> {
1790 type Lifted = &'tcx Const<'tcx>;
1791 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<&'tcx Const<'tcx>> {
1792 if tcx.interners.arena.in_arena(*self as *const _) {
1793 return Some(unsafe { mem::transmute(*self) });
1795 // Also try in the global tcx if we're not that.
1796 if !tcx.is_global() {
1797 self.lift_to_tcx(tcx.global_tcx())
1804 impl<'a, 'tcx> Lift<'tcx> for &'a Substs<'a> {
1805 type Lifted = &'tcx Substs<'tcx>;
1806 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<&'tcx Substs<'tcx>> {
1807 if self.len() == 0 {
1808 return Some(List::empty());
1810 if tcx.interners.arena.in_arena(&self[..] as *const _) {
1811 return Some(unsafe { mem::transmute(*self) });
1813 // Also try in the global tcx if we're not that.
1814 if !tcx.is_global() {
1815 self.lift_to_tcx(tcx.global_tcx())
1822 impl<'a, 'tcx> Lift<'tcx> for &'a List<Ty<'a>> {
1823 type Lifted = &'tcx List<Ty<'tcx>>;
1824 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>)
1825 -> Option<&'tcx List<Ty<'tcx>>> {
1826 if self.len() == 0 {
1827 return Some(List::empty());
1829 if tcx.interners.arena.in_arena(*self as *const _) {
1830 return Some(unsafe { mem::transmute(*self) });
1832 // Also try in the global tcx if we're not that.
1833 if !tcx.is_global() {
1834 self.lift_to_tcx(tcx.global_tcx())
1841 impl<'a, 'tcx> Lift<'tcx> for &'a List<ExistentialPredicate<'a>> {
1842 type Lifted = &'tcx List<ExistentialPredicate<'tcx>>;
1843 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>)
1844 -> Option<&'tcx List<ExistentialPredicate<'tcx>>> {
1845 if self.is_empty() {
1846 return Some(List::empty());
1848 if tcx.interners.arena.in_arena(*self as *const _) {
1849 return Some(unsafe { mem::transmute(*self) });
1851 // Also try in the global tcx if we're not that.
1852 if !tcx.is_global() {
1853 self.lift_to_tcx(tcx.global_tcx())
1860 impl<'a, 'tcx> Lift<'tcx> for &'a List<Predicate<'a>> {
1861 type Lifted = &'tcx List<Predicate<'tcx>>;
1862 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>)
1863 -> Option<&'tcx List<Predicate<'tcx>>> {
1864 if self.is_empty() {
1865 return Some(List::empty());
1867 if tcx.interners.arena.in_arena(*self as *const _) {
1868 return Some(unsafe { mem::transmute(*self) });
1870 // Also try in the global tcx if we're not that.
1871 if !tcx.is_global() {
1872 self.lift_to_tcx(tcx.global_tcx())
1879 impl<'a, 'tcx> Lift<'tcx> for &'a List<CanonicalVarInfo> {
1880 type Lifted = &'tcx List<CanonicalVarInfo>;
1881 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
1882 if self.len() == 0 {
1883 return Some(List::empty());
1885 if tcx.interners.arena.in_arena(*self as *const _) {
1886 return Some(unsafe { mem::transmute(*self) });
1888 // Also try in the global tcx if we're not that.
1889 if !tcx.is_global() {
1890 self.lift_to_tcx(tcx.global_tcx())
1897 impl<'a, 'tcx> Lift<'tcx> for &'a List<ProjectionKind<'a>> {
1898 type Lifted = &'tcx List<ProjectionKind<'tcx>>;
1899 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
1900 if self.len() == 0 {
1901 return Some(List::empty());
1903 if tcx.interners.arena.in_arena(*self as *const _) {
1904 return Some(unsafe { mem::transmute(*self) });
1906 // Also try in the global tcx if we're not that.
1907 if !tcx.is_global() {
1908 self.lift_to_tcx(tcx.global_tcx())
1916 use super::{GlobalCtxt, TyCtxt};
1922 use errors::{Diagnostic, TRACK_DIAGNOSTICS};
1923 use rustc_data_structures::OnDrop;
1924 use rustc_data_structures::sync::{self, Lrc, Lock};
1925 use dep_graph::OpenTask;
1927 #[cfg(not(parallel_queries))]
1928 use std::cell::Cell;
1930 #[cfg(parallel_queries)]
1933 /// This is the implicit state of rustc. It contains the current
1934 /// TyCtxt and query. It is updated when creating a local interner or
1935 /// executing a new query. Whenever there's a TyCtxt value available
1936 /// you should also have access to an ImplicitCtxt through the functions
1939 pub struct ImplicitCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
1940 /// The current TyCtxt. Initially created by `enter_global` and updated
1941 /// by `enter_local` with a new local interner
1942 pub tcx: TyCtxt<'a, 'gcx, 'tcx>,
1944 /// The current query job, if any. This is updated by start_job in
1945 /// ty::query::plumbing when executing a query
1946 pub query: Option<Lrc<query::QueryJob<'gcx>>>,
1948 /// Used to prevent layout from recursing too deeply.
1949 pub layout_depth: usize,
1951 /// The current dep graph task. This is used to add dependencies to queries
1952 /// when executing them
1953 pub task: &'a OpenTask,
1956 /// Sets Rayon's thread local variable which is preserved for Rayon jobs
1957 /// to `value` during the call to `f`. It is restored to its previous value after.
1958 /// This is used to set the pointer to the new ImplicitCtxt.
1959 #[cfg(parallel_queries)]
1960 fn set_tlv<F: FnOnce() -> R, R>(value: usize, f: F) -> R {
1961 rayon_core::tlv::with(value, f)
1964 /// Gets Rayon's thread local variable which is preserved for Rayon jobs.
1965 /// This is used to get the pointer to the current ImplicitCtxt.
1966 #[cfg(parallel_queries)]
1967 fn get_tlv() -> usize {
1968 rayon_core::tlv::get()
1971 /// A thread local variable which stores a pointer to the current ImplicitCtxt
1972 #[cfg(not(parallel_queries))]
1973 thread_local!(static TLV: Cell<usize> = Cell::new(0));
1975 /// Sets TLV to `value` during the call to `f`.
1976 /// It is restored to its previous value after.
1977 /// This is used to set the pointer to the new ImplicitCtxt.
1978 #[cfg(not(parallel_queries))]
1979 fn set_tlv<F: FnOnce() -> R, R>(value: usize, f: F) -> R {
1980 let old = get_tlv();
1981 let _reset = OnDrop(move || TLV.with(|tlv| tlv.set(old)));
1982 TLV.with(|tlv| tlv.set(value));
1986 /// This is used to get the pointer to the current ImplicitCtxt.
1987 #[cfg(not(parallel_queries))]
1988 fn get_tlv() -> usize {
1989 TLV.with(|tlv| tlv.get())
1992 /// This is a callback from libsyntax as it cannot access the implicit state
1993 /// in librustc otherwise
1994 fn span_debug(span: syntax_pos::Span, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1996 write!(f, "{}", tcx.sess.source_map().span_to_string(span))
2000 /// This is a callback from libsyntax as it cannot access the implicit state
2001 /// in librustc otherwise. It is used to when diagnostic messages are
2002 /// emitted and stores them in the current query, if there is one.
2003 fn track_diagnostic(diagnostic: &Diagnostic) {
2004 with_context_opt(|icx| {
2005 if let Some(icx) = icx {
2006 if let Some(ref query) = icx.query {
2007 query.diagnostics.lock().push(diagnostic.clone());
2013 /// Sets up the callbacks from libsyntax on the current thread
2014 pub fn with_thread_locals<F, R>(f: F) -> R
2015 where F: FnOnce() -> R
2017 syntax_pos::SPAN_DEBUG.with(|span_dbg| {
2018 let original_span_debug = span_dbg.get();
2019 span_dbg.set(span_debug);
2021 let _on_drop = OnDrop(move || {
2022 span_dbg.set(original_span_debug);
2025 TRACK_DIAGNOSTICS.with(|current| {
2026 let original = current.get();
2027 current.set(track_diagnostic);
2029 let _on_drop = OnDrop(move || {
2030 current.set(original);
2038 /// Sets `context` as the new current ImplicitCtxt for the duration of the function `f`
2039 pub fn enter_context<'a, 'gcx: 'tcx, 'tcx, F, R>(context: &ImplicitCtxt<'a, 'gcx, 'tcx>,
2041 where F: FnOnce(&ImplicitCtxt<'a, 'gcx, 'tcx>) -> R
2043 set_tlv(context as *const _ as usize, || {
2048 /// Enters GlobalCtxt by setting up libsyntax callbacks and
2049 /// creating a initial TyCtxt and ImplicitCtxt.
2050 /// This happens once per rustc session and TyCtxts only exists
2051 /// inside the `f` function.
2052 pub fn enter_global<'gcx, F, R>(gcx: &GlobalCtxt<'gcx>, f: F) -> R
2053 where F: for<'a> FnOnce(TyCtxt<'a, 'gcx, 'gcx>) -> R
2055 with_thread_locals(|| {
2056 // Update GCX_PTR to indicate there's a GlobalCtxt available
2057 GCX_PTR.with(|lock| {
2058 *lock.lock() = gcx as *const _ as usize;
2060 // Set GCX_PTR back to 0 when we exit
2061 let _on_drop = OnDrop(move || {
2062 GCX_PTR.with(|lock| *lock.lock() = 0);
2067 interners: &gcx.global_interners,
2069 let icx = ImplicitCtxt {
2073 task: &OpenTask::Ignore,
2075 enter_context(&icx, |_| {
2081 /// Stores a pointer to the GlobalCtxt if one is available.
2082 /// This is used to access the GlobalCtxt in the deadlock handler
2084 scoped_thread_local!(pub static GCX_PTR: Lock<usize>);
2086 /// Creates a TyCtxt and ImplicitCtxt based on the GCX_PTR thread local.
2087 /// This is used in the deadlock handler.
2088 pub unsafe fn with_global<F, R>(f: F) -> R
2089 where F: for<'a, 'gcx, 'tcx> FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R
2091 let gcx = GCX_PTR.with(|lock| *lock.lock());
2093 let gcx = &*(gcx as *const GlobalCtxt<'_>);
2096 interners: &gcx.global_interners,
2098 let icx = ImplicitCtxt {
2102 task: &OpenTask::Ignore,
2104 enter_context(&icx, |_| f(tcx))
2107 /// Allows access to the current ImplicitCtxt in a closure if one is available
2108 pub fn with_context_opt<F, R>(f: F) -> R
2109 where F: for<'a, 'gcx, 'tcx> FnOnce(Option<&ImplicitCtxt<'a, 'gcx, 'tcx>>) -> R
2111 let context = get_tlv();
2115 // We could get a ImplicitCtxt pointer from another thread.
2116 // Ensure that ImplicitCtxt is Sync
2117 sync::assert_sync::<ImplicitCtxt<'_, '_, '_>>();
2119 unsafe { f(Some(&*(context as *const ImplicitCtxt<'_, '_, '_>))) }
2123 /// Allows access to the current ImplicitCtxt.
2124 /// Panics if there is no ImplicitCtxt available
2125 pub fn with_context<F, R>(f: F) -> R
2126 where F: for<'a, 'gcx, 'tcx> FnOnce(&ImplicitCtxt<'a, 'gcx, 'tcx>) -> R
2128 with_context_opt(|opt_context| f(opt_context.expect("no ImplicitCtxt stored in tls")))
2131 /// Allows access to the current ImplicitCtxt whose tcx field has the same global
2132 /// interner as the tcx argument passed in. This means the closure is given an ImplicitCtxt
2133 /// with the same 'gcx lifetime as the TyCtxt passed in.
2134 /// This will panic if you pass it a TyCtxt which has a different global interner from
2135 /// the current ImplicitCtxt's tcx field.
2136 pub fn with_related_context<'a, 'gcx, 'tcx1, F, R>(tcx: TyCtxt<'a, 'gcx, 'tcx1>, f: F) -> R
2137 where F: for<'b, 'tcx2> FnOnce(&ImplicitCtxt<'b, 'gcx, 'tcx2>) -> R
2139 with_context(|context| {
2141 let gcx = tcx.gcx as *const _ as usize;
2142 assert!(context.tcx.gcx as *const _ as usize == gcx);
2143 let context: &ImplicitCtxt<'_, '_, '_> = mem::transmute(context);
2149 /// Allows access to the current ImplicitCtxt whose tcx field has the same global
2150 /// interner and local interner as the tcx argument passed in. This means the closure
2151 /// is given an ImplicitCtxt with the same 'tcx and 'gcx lifetimes as the TyCtxt passed in.
2152 /// This will panic if you pass it a TyCtxt which has a different global interner or
2153 /// a different local interner from the current ImplicitCtxt's tcx field.
2154 pub fn with_fully_related_context<'a, 'gcx, 'tcx, F, R>(tcx: TyCtxt<'a, 'gcx, 'tcx>, f: F) -> R
2155 where F: for<'b> FnOnce(&ImplicitCtxt<'b, 'gcx, 'tcx>) -> R
2157 with_context(|context| {
2159 let gcx = tcx.gcx as *const _ as usize;
2160 let interners = tcx.interners as *const _ as usize;
2161 assert!(context.tcx.gcx as *const _ as usize == gcx);
2162 assert!(context.tcx.interners as *const _ as usize == interners);
2163 let context: &ImplicitCtxt<'_, '_, '_> = mem::transmute(context);
2169 /// Allows access to the TyCtxt in the current ImplicitCtxt.
2170 /// Panics if there is no ImplicitCtxt available
2171 pub fn with<F, R>(f: F) -> R
2172 where F: for<'a, 'gcx, 'tcx> FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R
2174 with_context(|context| f(context.tcx))
2177 /// Allows access to the TyCtxt in the current ImplicitCtxt.
2178 /// The closure is passed None if there is no ImplicitCtxt available
2179 pub fn with_opt<F, R>(f: F) -> R
2180 where F: for<'a, 'gcx, 'tcx> FnOnce(Option<TyCtxt<'a, 'gcx, 'tcx>>) -> R
2182 with_context_opt(|opt_context| f(opt_context.map(|context| context.tcx)))
2186 macro_rules! sty_debug_print {
2187 ($ctxt: expr, $($variant: ident),*) => {{
2188 // curious inner module to allow variant names to be used as
2190 #[allow(non_snake_case)]
2192 use ty::{self, TyCtxt};
2193 use ty::context::Interned;
2195 #[derive(Copy, Clone)]
2198 region_infer: usize,
2203 pub fn go(tcx: TyCtxt<'_, '_, '_>) {
2204 let mut total = DebugStat {
2206 region_infer: 0, ty_infer: 0, both_infer: 0,
2208 $(let mut $variant = total;)*
2210 for &Interned(t) in tcx.interners.type_.borrow().iter() {
2211 let variant = match t.sty {
2212 ty::Bool | ty::Char | ty::Int(..) | ty::Uint(..) |
2213 ty::Float(..) | ty::Str | ty::Never => continue,
2214 ty::Error => /* unimportant */ continue,
2215 $(ty::$variant(..) => &mut $variant,)*
2217 let region = t.flags.intersects(ty::TypeFlags::HAS_RE_INFER);
2218 let ty = t.flags.intersects(ty::TypeFlags::HAS_TY_INFER);
2222 if region { total.region_infer += 1; variant.region_infer += 1 }
2223 if ty { total.ty_infer += 1; variant.ty_infer += 1 }
2224 if region && ty { total.both_infer += 1; variant.both_infer += 1 }
2226 println!("Ty interner total ty region both");
2227 $(println!(" {:18}: {uses:6} {usespc:4.1}%, \
2228 {ty:4.1}% {region:5.1}% {both:4.1}%",
2229 stringify!($variant),
2230 uses = $variant.total,
2231 usespc = $variant.total as f64 * 100.0 / total.total as f64,
2232 ty = $variant.ty_infer as f64 * 100.0 / total.total as f64,
2233 region = $variant.region_infer as f64 * 100.0 / total.total as f64,
2234 both = $variant.both_infer as f64 * 100.0 / total.total as f64);
2236 println!(" total {uses:6} \
2237 {ty:4.1}% {region:5.1}% {both:4.1}%",
2239 ty = total.ty_infer as f64 * 100.0 / total.total as f64,
2240 region = total.region_infer as f64 * 100.0 / total.total as f64,
2241 both = total.both_infer as f64 * 100.0 / total.total as f64)
2249 impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
2250 pub fn print_debug_stats(self) {
2253 Adt, Array, Slice, RawPtr, Ref, FnDef, FnPtr,
2254 Generator, GeneratorWitness, Dynamic, Closure, Tuple, Bound,
2255 Param, Infer, UnnormalizedProjection, Projection, Opaque, Foreign);
2257 println!("Substs interner: #{}", self.interners.substs.borrow().len());
2258 println!("Region interner: #{}", self.interners.region.borrow().len());
2259 println!("Stability interner: #{}", self.stability_interner.borrow().len());
2260 println!("Allocation interner: #{}", self.allocation_interner.borrow().len());
2261 println!("Layout interner: #{}", self.layout_interner.borrow().len());
2266 /// An entry in an interner.
2267 struct Interned<'tcx, T: 'tcx+?Sized>(&'tcx T);
2269 // NB: An Interned<Ty> compares and hashes as a sty.
2270 impl<'tcx> PartialEq for Interned<'tcx, TyS<'tcx>> {
2271 fn eq(&self, other: &Interned<'tcx, TyS<'tcx>>) -> bool {
2272 self.0.sty == other.0.sty
2276 impl<'tcx> Eq for Interned<'tcx, TyS<'tcx>> {}
2278 impl<'tcx> Hash for Interned<'tcx, TyS<'tcx>> {
2279 fn hash<H: Hasher>(&self, s: &mut H) {
2284 impl<'tcx: 'lcx, 'lcx> Borrow<TyKind<'lcx>> for Interned<'tcx, TyS<'tcx>> {
2285 fn borrow<'a>(&'a self) -> &'a TyKind<'lcx> {
2290 // NB: An Interned<List<T>> compares and hashes as its elements.
2291 impl<'tcx, T: PartialEq> PartialEq for Interned<'tcx, List<T>> {
2292 fn eq(&self, other: &Interned<'tcx, List<T>>) -> bool {
2293 self.0[..] == other.0[..]
2297 impl<'tcx, T: Eq> Eq for Interned<'tcx, List<T>> {}
2299 impl<'tcx, T: Hash> Hash for Interned<'tcx, List<T>> {
2300 fn hash<H: Hasher>(&self, s: &mut H) {
2305 impl<'tcx: 'lcx, 'lcx> Borrow<[Ty<'lcx>]> for Interned<'tcx, List<Ty<'tcx>>> {
2306 fn borrow<'a>(&'a self) -> &'a [Ty<'lcx>] {
2311 impl<'tcx: 'lcx, 'lcx> Borrow<[CanonicalVarInfo]> for Interned<'tcx, List<CanonicalVarInfo>> {
2312 fn borrow<'a>(&'a self) -> &'a [CanonicalVarInfo] {
2317 impl<'tcx: 'lcx, 'lcx> Borrow<[Kind<'lcx>]> for Interned<'tcx, Substs<'tcx>> {
2318 fn borrow<'a>(&'a self) -> &'a [Kind<'lcx>] {
2323 impl<'tcx: 'lcx, 'lcx> Borrow<[ProjectionKind<'lcx>]>
2324 for Interned<'tcx, List<ProjectionKind<'tcx>>> {
2325 fn borrow<'a>(&'a self) -> &'a [ProjectionKind<'lcx>] {
2330 impl<'tcx> Borrow<RegionKind> for Interned<'tcx, RegionKind> {
2331 fn borrow<'a>(&'a self) -> &'a RegionKind {
2336 impl<'tcx: 'lcx, 'lcx> Borrow<GoalKind<'lcx>> for Interned<'tcx, GoalKind<'tcx>> {
2337 fn borrow<'a>(&'a self) -> &'a GoalKind<'lcx> {
2342 impl<'tcx: 'lcx, 'lcx> Borrow<[ExistentialPredicate<'lcx>]>
2343 for Interned<'tcx, List<ExistentialPredicate<'tcx>>> {
2344 fn borrow<'a>(&'a self) -> &'a [ExistentialPredicate<'lcx>] {
2349 impl<'tcx: 'lcx, 'lcx> Borrow<[Predicate<'lcx>]>
2350 for Interned<'tcx, List<Predicate<'tcx>>> {
2351 fn borrow<'a>(&'a self) -> &'a [Predicate<'lcx>] {
2356 impl<'tcx: 'lcx, 'lcx> Borrow<Const<'lcx>> for Interned<'tcx, Const<'tcx>> {
2357 fn borrow<'a>(&'a self) -> &'a Const<'lcx> {
2362 impl<'tcx: 'lcx, 'lcx> Borrow<[Clause<'lcx>]>
2363 for Interned<'tcx, List<Clause<'tcx>>> {
2364 fn borrow<'a>(&'a self) -> &'a [Clause<'lcx>] {
2369 impl<'tcx: 'lcx, 'lcx> Borrow<[Goal<'lcx>]>
2370 for Interned<'tcx, List<Goal<'tcx>>> {
2371 fn borrow<'a>(&'a self) -> &'a [Goal<'lcx>] {
2376 macro_rules! intern_method {
2377 ($lt_tcx:tt, $name:ident: $method:ident($alloc:ty,
2380 $keep_in_local_tcx:expr) -> $ty:ty) => {
2381 impl<'a, 'gcx, $lt_tcx> TyCtxt<'a, 'gcx, $lt_tcx> {
2382 pub fn $method(self, v: $alloc) -> &$lt_tcx $ty {
2383 let key = ($alloc_to_key)(&v);
2385 // HACK(eddyb) Depend on flags being accurate to
2386 // determine that all contents are in the global tcx.
2387 // See comments on Lift for why we can't use that.
2388 if ($keep_in_local_tcx)(&v) {
2389 let mut interner = self.interners.$name.borrow_mut();
2390 if let Some(&Interned(v)) = interner.get(key) {
2394 // Make sure we don't end up with inference
2395 // types/regions in the global tcx.
2396 if self.is_global() {
2397 bug!("Attempted to intern `{:?}` which contains \
2398 inference types/regions in the global type context",
2402 let i = $alloc_method(&self.interners.arena, v);
2403 interner.insert(Interned(i));
2406 let mut interner = self.global_interners.$name.borrow_mut();
2407 if let Some(&Interned(v)) = interner.get(key) {
2411 // This transmutes $alloc<'tcx> to $alloc<'gcx>
2415 let i: &$lt_tcx $ty = $alloc_method(&self.global_interners.arena, v);
2417 let i = unsafe { mem::transmute(i) };
2418 interner.insert(Interned(i));
2426 macro_rules! direct_interners {
2427 ($lt_tcx:tt, $($name:ident: $method:ident($keep_in_local_tcx:expr) -> $ty:ty),+) => {
2428 $(impl<$lt_tcx> PartialEq for Interned<$lt_tcx, $ty> {
2429 fn eq(&self, other: &Self) -> bool {
2434 impl<$lt_tcx> Eq for Interned<$lt_tcx, $ty> {}
2436 impl<$lt_tcx> Hash for Interned<$lt_tcx, $ty> {
2437 fn hash<H: Hasher>(&self, s: &mut H) {
2445 |a: &$lt_tcx SyncDroplessArena, v| -> &$lt_tcx $ty { a.alloc(v) },
2447 $keep_in_local_tcx) -> $ty);)+
2451 pub fn keep_local<'tcx, T: ty::TypeFoldable<'tcx>>(x: &T) -> bool {
2452 x.has_type_flags(ty::TypeFlags::KEEP_IN_LOCAL_TCX)
2455 direct_interners!('tcx,
2456 region: mk_region(|r: &RegionKind| r.keep_in_local_tcx()) -> RegionKind,
2457 const_: mk_const(|c: &Const<'_>| keep_local(&c.ty) || keep_local(&c.val)) -> Const<'tcx>,
2458 goal: mk_goal(|c: &GoalKind<'_>| keep_local(c)) -> GoalKind<'tcx>
2461 macro_rules! slice_interners {
2462 ($($field:ident: $method:ident($ty:ident)),+) => (
2463 $(intern_method!( 'tcx, $field: $method(
2465 |a, v| List::from_arena(a, v),
2467 |xs: &[$ty<'_>]| xs.iter().any(keep_local)) -> List<$ty<'tcx>>);)+
2472 existential_predicates: _intern_existential_predicates(ExistentialPredicate),
2473 predicates: _intern_predicates(Predicate),
2474 type_list: _intern_type_list(Ty),
2475 substs: _intern_substs(Kind),
2476 clauses: _intern_clauses(Clause),
2477 goal_list: _intern_goals(Goal),
2478 projs: _intern_projs(ProjectionKind)
2481 // This isn't a perfect fit: CanonicalVarInfo slices are always
2482 // allocated in the global arena, so this `intern_method!` macro is
2483 // overly general. But we just return false for the code that checks
2484 // whether they belong in the thread-local arena, so no harm done, and
2485 // seems better than open-coding the rest.
2488 canonical_var_infos: _intern_canonical_var_infos(
2489 &[CanonicalVarInfo],
2490 |a, v| List::from_arena(a, v),
2492 |_xs: &[CanonicalVarInfo]| -> bool { false }
2493 ) -> List<CanonicalVarInfo>
2496 impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
2497 /// Given a `fn` type, returns an equivalent `unsafe fn` type;
2498 /// that is, a `fn` type that is equivalent in every way for being
2500 pub fn safe_to_unsafe_fn_ty(self, sig: PolyFnSig<'tcx>) -> Ty<'tcx> {
2501 assert_eq!(sig.unsafety(), hir::Unsafety::Normal);
2502 self.mk_fn_ptr(sig.map_bound(|sig| ty::FnSig {
2503 unsafety: hir::Unsafety::Unsafe,
2508 /// Given a closure signature `sig`, returns an equivalent `fn`
2509 /// type with the same signature. Detuples and so forth -- so
2510 /// e.g. if we have a sig with `Fn<(u32, i32)>` then you would get
2511 /// a `fn(u32, i32)`.
2512 pub fn coerce_closure_fn_ty(self, sig: PolyFnSig<'tcx>) -> Ty<'tcx> {
2513 let converted_sig = sig.map_bound(|s| {
2514 let params_iter = match s.inputs()[0].sty {
2515 ty::Tuple(params) => {
2516 params.into_iter().cloned()
2524 hir::Unsafety::Normal,
2529 self.mk_fn_ptr(converted_sig)
2532 pub fn mk_ty(&self, st: TyKind<'tcx>) -> Ty<'tcx> {
2533 CtxtInterners::intern_ty(&self.interners, &self.global_interners, st)
2536 pub fn mk_mach_int(self, tm: ast::IntTy) -> Ty<'tcx> {
2538 ast::IntTy::Isize => self.types.isize,
2539 ast::IntTy::I8 => self.types.i8,
2540 ast::IntTy::I16 => self.types.i16,
2541 ast::IntTy::I32 => self.types.i32,
2542 ast::IntTy::I64 => self.types.i64,
2543 ast::IntTy::I128 => self.types.i128,
2547 pub fn mk_mach_uint(self, tm: ast::UintTy) -> Ty<'tcx> {
2549 ast::UintTy::Usize => self.types.usize,
2550 ast::UintTy::U8 => self.types.u8,
2551 ast::UintTy::U16 => self.types.u16,
2552 ast::UintTy::U32 => self.types.u32,
2553 ast::UintTy::U64 => self.types.u64,
2554 ast::UintTy::U128 => self.types.u128,
2558 pub fn mk_mach_float(self, tm: ast::FloatTy) -> Ty<'tcx> {
2560 ast::FloatTy::F32 => self.types.f32,
2561 ast::FloatTy::F64 => self.types.f64,
2565 pub fn mk_str(self) -> Ty<'tcx> {
2569 pub fn mk_static_str(self) -> Ty<'tcx> {
2570 self.mk_imm_ref(self.types.re_static, self.mk_str())
2573 pub fn mk_adt(self, def: &'tcx AdtDef, substs: &'tcx Substs<'tcx>) -> Ty<'tcx> {
2574 // take a copy of substs so that we own the vectors inside
2575 self.mk_ty(Adt(def, substs))
2578 pub fn mk_foreign(self, def_id: DefId) -> Ty<'tcx> {
2579 self.mk_ty(Foreign(def_id))
2582 pub fn mk_box(self, ty: Ty<'tcx>) -> Ty<'tcx> {
2583 let def_id = self.require_lang_item(lang_items::OwnedBoxLangItem);
2584 let adt_def = self.adt_def(def_id);
2585 let substs = Substs::for_item(self, def_id, |param, substs| {
2587 GenericParamDefKind::Lifetime => bug!(),
2588 GenericParamDefKind::Type { has_default, .. } => {
2589 if param.index == 0 {
2592 assert!(has_default);
2593 self.type_of(param.def_id).subst(self, substs).into()
2598 self.mk_ty(Adt(adt_def, substs))
2601 pub fn mk_ptr(self, tm: TypeAndMut<'tcx>) -> Ty<'tcx> {
2602 self.mk_ty(RawPtr(tm))
2605 pub fn mk_ref(self, r: Region<'tcx>, tm: TypeAndMut<'tcx>) -> Ty<'tcx> {
2606 self.mk_ty(Ref(r, tm.ty, tm.mutbl))
2609 pub fn mk_mut_ref(self, r: Region<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> {
2610 self.mk_ref(r, TypeAndMut {ty: ty, mutbl: hir::MutMutable})
2613 pub fn mk_imm_ref(self, r: Region<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> {
2614 self.mk_ref(r, TypeAndMut {ty: ty, mutbl: hir::MutImmutable})
2617 pub fn mk_mut_ptr(self, ty: Ty<'tcx>) -> Ty<'tcx> {
2618 self.mk_ptr(TypeAndMut {ty: ty, mutbl: hir::MutMutable})
2621 pub fn mk_imm_ptr(self, ty: Ty<'tcx>) -> Ty<'tcx> {
2622 self.mk_ptr(TypeAndMut {ty: ty, mutbl: hir::MutImmutable})
2625 pub fn mk_nil_ptr(self) -> Ty<'tcx> {
2626 self.mk_imm_ptr(self.mk_unit())
2629 pub fn mk_array(self, ty: Ty<'tcx>, n: u64) -> Ty<'tcx> {
2630 self.mk_ty(Array(ty, ty::Const::from_usize(self, n)))
2633 pub fn mk_slice(self, ty: Ty<'tcx>) -> Ty<'tcx> {
2634 self.mk_ty(Slice(ty))
2637 pub fn intern_tup(self, ts: &[Ty<'tcx>]) -> Ty<'tcx> {
2638 self.mk_ty(Tuple(self.intern_type_list(ts)))
2641 pub fn mk_tup<I: InternAs<[Ty<'tcx>], Ty<'tcx>>>(self, iter: I) -> I::Output {
2642 iter.intern_with(|ts| self.mk_ty(Tuple(self.intern_type_list(ts))))
2645 pub fn mk_unit(self) -> Ty<'tcx> {
2646 self.intern_tup(&[])
2649 pub fn mk_diverging_default(self) -> Ty<'tcx> {
2650 if self.features().never_type {
2653 self.intern_tup(&[])
2657 pub fn mk_bool(self) -> Ty<'tcx> {
2661 pub fn mk_fn_def(self, def_id: DefId,
2662 substs: &'tcx Substs<'tcx>) -> Ty<'tcx> {
2663 self.mk_ty(FnDef(def_id, substs))
2666 pub fn mk_fn_ptr(self, fty: PolyFnSig<'tcx>) -> Ty<'tcx> {
2667 self.mk_ty(FnPtr(fty))
2672 obj: ty::Binder<&'tcx List<ExistentialPredicate<'tcx>>>,
2673 reg: ty::Region<'tcx>
2675 self.mk_ty(Dynamic(obj, reg))
2678 pub fn mk_projection(self,
2680 substs: &'tcx Substs<'tcx>)
2682 self.mk_ty(Projection(ProjectionTy {
2688 pub fn mk_closure(self, closure_id: DefId, closure_substs: ClosureSubsts<'tcx>)
2690 self.mk_ty(Closure(closure_id, closure_substs))
2693 pub fn mk_generator(self,
2695 generator_substs: GeneratorSubsts<'tcx>,
2696 movability: hir::GeneratorMovability)
2698 self.mk_ty(Generator(id, generator_substs, movability))
2701 pub fn mk_generator_witness(self, types: ty::Binder<&'tcx List<Ty<'tcx>>>) -> Ty<'tcx> {
2702 self.mk_ty(GeneratorWitness(types))
2705 pub fn mk_var(self, v: TyVid) -> Ty<'tcx> {
2706 self.mk_infer(TyVar(v))
2709 pub fn mk_int_var(self, v: IntVid) -> Ty<'tcx> {
2710 self.mk_infer(IntVar(v))
2713 pub fn mk_float_var(self, v: FloatVid) -> Ty<'tcx> {
2714 self.mk_infer(FloatVar(v))
2717 pub fn mk_infer(self, it: InferTy) -> Ty<'tcx> {
2718 self.mk_ty(Infer(it))
2721 pub fn mk_ty_param(self,
2723 name: InternedString) -> Ty<'tcx> {
2724 self.mk_ty(Param(ParamTy { idx: index, name: name }))
2727 pub fn mk_self_type(self) -> Ty<'tcx> {
2728 self.mk_ty_param(0, keywords::SelfType.name().as_interned_str())
2731 pub fn mk_param_from_def(self, param: &ty::GenericParamDef) -> Kind<'tcx> {
2733 GenericParamDefKind::Lifetime => {
2734 self.mk_region(ty::ReEarlyBound(param.to_early_bound_region_data())).into()
2736 GenericParamDefKind::Type {..} => self.mk_ty_param(param.index, param.name).into(),
2740 pub fn mk_opaque(self, def_id: DefId, substs: &'tcx Substs<'tcx>) -> Ty<'tcx> {
2741 self.mk_ty(Opaque(def_id, substs))
2744 pub fn intern_existential_predicates(self, eps: &[ExistentialPredicate<'tcx>])
2745 -> &'tcx List<ExistentialPredicate<'tcx>> {
2746 assert!(!eps.is_empty());
2747 assert!(eps.windows(2).all(|w| w[0].stable_cmp(self, &w[1]) != Ordering::Greater));
2748 self._intern_existential_predicates(eps)
2751 pub fn intern_predicates(self, preds: &[Predicate<'tcx>])
2752 -> &'tcx List<Predicate<'tcx>> {
2753 // FIXME consider asking the input slice to be sorted to avoid
2754 // re-interning permutations, in which case that would be asserted
2756 if preds.len() == 0 {
2757 // The macro-generated method below asserts we don't intern an empty slice.
2760 self._intern_predicates(preds)
2764 pub fn intern_type_list(self, ts: &[Ty<'tcx>]) -> &'tcx List<Ty<'tcx>> {
2768 self._intern_type_list(ts)
2772 pub fn intern_substs(self, ts: &[Kind<'tcx>]) -> &'tcx List<Kind<'tcx>> {
2776 self._intern_substs(ts)
2780 pub fn intern_projs(self, ps: &[ProjectionKind<'tcx>]) -> &'tcx List<ProjectionKind<'tcx>> {
2784 self._intern_projs(ps)
2788 pub fn intern_canonical_var_infos(self, ts: &[CanonicalVarInfo]) -> CanonicalVarInfos<'gcx> {
2792 self.global_tcx()._intern_canonical_var_infos(ts)
2796 pub fn intern_clauses(self, ts: &[Clause<'tcx>]) -> Clauses<'tcx> {
2800 self._intern_clauses(ts)
2804 pub fn intern_goals(self, ts: &[Goal<'tcx>]) -> Goals<'tcx> {
2808 self._intern_goals(ts)
2812 pub fn mk_fn_sig<I>(self,
2816 unsafety: hir::Unsafety,
2818 -> <I::Item as InternIteratorElement<Ty<'tcx>, ty::FnSig<'tcx>>>::Output
2820 I::Item: InternIteratorElement<Ty<'tcx>, ty::FnSig<'tcx>>
2822 inputs.chain(iter::once(output)).intern_with(|xs| ty::FnSig {
2823 inputs_and_output: self.intern_type_list(xs),
2824 variadic, unsafety, abi
2828 pub fn mk_existential_predicates<I: InternAs<[ExistentialPredicate<'tcx>],
2829 &'tcx List<ExistentialPredicate<'tcx>>>>(self, iter: I)
2831 iter.intern_with(|xs| self.intern_existential_predicates(xs))
2834 pub fn mk_predicates<I: InternAs<[Predicate<'tcx>],
2835 &'tcx List<Predicate<'tcx>>>>(self, iter: I)
2837 iter.intern_with(|xs| self.intern_predicates(xs))
2840 pub fn mk_type_list<I: InternAs<[Ty<'tcx>],
2841 &'tcx List<Ty<'tcx>>>>(self, iter: I) -> I::Output {
2842 iter.intern_with(|xs| self.intern_type_list(xs))
2845 pub fn mk_substs<I: InternAs<[Kind<'tcx>],
2846 &'tcx List<Kind<'tcx>>>>(self, iter: I) -> I::Output {
2847 iter.intern_with(|xs| self.intern_substs(xs))
2850 pub fn mk_substs_trait(self,
2852 rest: &[Kind<'tcx>])
2853 -> &'tcx Substs<'tcx>
2855 self.mk_substs(iter::once(self_ty.into()).chain(rest.iter().cloned()))
2858 pub fn mk_clauses<I: InternAs<[Clause<'tcx>], Clauses<'tcx>>>(self, iter: I) -> I::Output {
2859 iter.intern_with(|xs| self.intern_clauses(xs))
2862 pub fn mk_goals<I: InternAs<[Goal<'tcx>], Goals<'tcx>>>(self, iter: I) -> I::Output {
2863 iter.intern_with(|xs| self.intern_goals(xs))
2866 pub fn lint_hir<S: Into<MultiSpan>>(self,
2867 lint: &'static Lint,
2871 self.struct_span_lint_hir(lint, hir_id, span.into(), msg).emit()
2874 pub fn lint_node<S: Into<MultiSpan>>(self,
2875 lint: &'static Lint,
2879 self.struct_span_lint_node(lint, id, span.into(), msg).emit()
2882 pub fn lint_hir_note<S: Into<MultiSpan>>(self,
2883 lint: &'static Lint,
2888 let mut err = self.struct_span_lint_hir(lint, hir_id, span.into(), msg);
2893 pub fn lint_node_note<S: Into<MultiSpan>>(self,
2894 lint: &'static Lint,
2899 let mut err = self.struct_span_lint_node(lint, id, span.into(), msg);
2904 pub fn lint_level_at_node(self, lint: &'static Lint, mut id: NodeId)
2905 -> (lint::Level, lint::LintSource)
2907 // Right now we insert a `with_ignore` node in the dep graph here to
2908 // ignore the fact that `lint_levels` below depends on the entire crate.
2909 // For now this'll prevent false positives of recompiling too much when
2910 // anything changes.
2912 // Once red/green incremental compilation lands we should be able to
2913 // remove this because while the crate changes often the lint level map
2914 // will change rarely.
2915 self.dep_graph.with_ignore(|| {
2916 let sets = self.lint_levels(LOCAL_CRATE);
2918 let hir_id = self.hir.definitions().node_to_hir_id(id);
2919 if let Some(pair) = sets.level_and_source(lint, hir_id, self.sess) {
2922 let next = self.hir.get_parent_node(id);
2924 bug!("lint traversal reached the root of the crate");
2931 pub fn struct_span_lint_hir<S: Into<MultiSpan>>(self,
2932 lint: &'static Lint,
2936 -> DiagnosticBuilder<'tcx>
2938 let node_id = self.hir.hir_to_node_id(hir_id);
2939 let (level, src) = self.lint_level_at_node(lint, node_id);
2940 lint::struct_lint_level(self.sess, lint, level, src, Some(span.into()), msg)
2943 pub fn struct_span_lint_node<S: Into<MultiSpan>>(self,
2944 lint: &'static Lint,
2948 -> DiagnosticBuilder<'tcx>
2950 let (level, src) = self.lint_level_at_node(lint, id);
2951 lint::struct_lint_level(self.sess, lint, level, src, Some(span.into()), msg)
2954 pub fn struct_lint_node(self, lint: &'static Lint, id: NodeId, msg: &str)
2955 -> DiagnosticBuilder<'tcx>
2957 let (level, src) = self.lint_level_at_node(lint, id);
2958 lint::struct_lint_level(self.sess, lint, level, src, None, msg)
2961 pub fn in_scope_traits(self, id: HirId) -> Option<Lrc<StableVec<TraitCandidate>>> {
2962 self.in_scope_traits_map(id.owner)
2963 .and_then(|map| map.get(&id.local_id).cloned())
2966 pub fn named_region(self, id: HirId) -> Option<resolve_lifetime::Region> {
2967 self.named_region_map(id.owner)
2968 .and_then(|map| map.get(&id.local_id).cloned())
2971 pub fn is_late_bound(self, id: HirId) -> bool {
2972 self.is_late_bound_map(id.owner)
2973 .map(|set| set.contains(&id.local_id))
2977 pub fn object_lifetime_defaults(self, id: HirId)
2978 -> Option<Lrc<Vec<ObjectLifetimeDefault>>>
2980 self.object_lifetime_defaults_map(id.owner)
2981 .and_then(|map| map.get(&id.local_id).cloned())
2985 pub trait InternAs<T: ?Sized, R> {
2987 fn intern_with<F>(self, f: F) -> Self::Output
2988 where F: FnOnce(&T) -> R;
2991 impl<I, T, R, E> InternAs<[T], R> for I
2992 where E: InternIteratorElement<T, R>,
2993 I: Iterator<Item=E> {
2994 type Output = E::Output;
2995 fn intern_with<F>(self, f: F) -> Self::Output
2996 where F: FnOnce(&[T]) -> R {
2997 E::intern_with(self, f)
3001 pub trait InternIteratorElement<T, R>: Sized {
3003 fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output;
3006 impl<T, R> InternIteratorElement<T, R> for T {
3008 fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output {
3009 f(&iter.collect::<SmallVec<[_; 8]>>())
3013 impl<'a, T, R> InternIteratorElement<T, R> for &'a T
3017 fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output {
3018 f(&iter.cloned().collect::<SmallVec<[_; 8]>>())
3022 impl<T, R, E> InternIteratorElement<T, R> for Result<T, E> {
3023 type Output = Result<R, E>;
3024 fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output {
3025 Ok(f(&iter.collect::<Result<SmallVec<[_; 8]>, _>>()?))
3029 pub fn provide(providers: &mut ty::query::Providers<'_>) {
3030 // FIXME(#44234) - almost all of these queries have no sub-queries and
3031 // therefore no actual inputs, they're just reading tables calculated in
3032 // resolve! Does this work? Unsure! That's what the issue is about
3033 providers.in_scope_traits_map = |tcx, id| tcx.gcx.trait_map.get(&id).cloned();
3034 providers.module_exports = |tcx, id| tcx.gcx.export_map.get(&id).cloned();
3035 providers.crate_name = |tcx, id| {
3036 assert_eq!(id, LOCAL_CRATE);
3039 providers.get_lib_features = |tcx, id| {
3040 assert_eq!(id, LOCAL_CRATE);
3041 Lrc::new(middle::lib_features::collect(tcx))
3043 providers.get_lang_items = |tcx, id| {
3044 assert_eq!(id, LOCAL_CRATE);
3045 Lrc::new(middle::lang_items::collect(tcx))
3047 providers.freevars = |tcx, id| tcx.gcx.freevars.get(&id).cloned();
3048 providers.maybe_unused_trait_import = |tcx, id| {
3049 tcx.maybe_unused_trait_imports.contains(&id)
3051 providers.maybe_unused_extern_crates = |tcx, cnum| {
3052 assert_eq!(cnum, LOCAL_CRATE);
3053 Lrc::new(tcx.maybe_unused_extern_crates.clone())
3056 providers.stability_index = |tcx, cnum| {
3057 assert_eq!(cnum, LOCAL_CRATE);
3058 Lrc::new(stability::Index::new(tcx))
3060 providers.lookup_stability = |tcx, id| {
3061 assert_eq!(id.krate, LOCAL_CRATE);
3062 let id = tcx.hir.definitions().def_index_to_hir_id(id.index);
3063 tcx.stability().local_stability(id)
3065 providers.lookup_deprecation_entry = |tcx, id| {
3066 assert_eq!(id.krate, LOCAL_CRATE);
3067 let id = tcx.hir.definitions().def_index_to_hir_id(id.index);
3068 tcx.stability().local_deprecation_entry(id)
3070 providers.extern_mod_stmt_cnum = |tcx, id| {
3071 let id = tcx.hir.as_local_node_id(id).unwrap();
3072 tcx.cstore.extern_mod_stmt_cnum_untracked(id)
3074 providers.all_crate_nums = |tcx, cnum| {
3075 assert_eq!(cnum, LOCAL_CRATE);
3076 Lrc::new(tcx.cstore.crates_untracked())
3078 providers.postorder_cnums = |tcx, cnum| {
3079 assert_eq!(cnum, LOCAL_CRATE);
3080 Lrc::new(tcx.cstore.postorder_cnums_untracked())
3082 providers.output_filenames = |tcx, cnum| {
3083 assert_eq!(cnum, LOCAL_CRATE);
3084 tcx.output_filenames.clone()
3086 providers.features_query = |tcx, cnum| {
3087 assert_eq!(cnum, LOCAL_CRATE);
3088 Lrc::new(tcx.sess.features_untracked().clone())
3090 providers.is_panic_runtime = |tcx, cnum| {
3091 assert_eq!(cnum, LOCAL_CRATE);
3092 attr::contains_name(tcx.hir.krate_attrs(), "panic_runtime")
3094 providers.is_compiler_builtins = |tcx, cnum| {
3095 assert_eq!(cnum, LOCAL_CRATE);
3096 attr::contains_name(tcx.hir.krate_attrs(), "compiler_builtins")