1 // Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! type context book-keeping
13 use dep_graph::DepGraph;
14 use dep_graph::{DepNode, DepConstructor};
15 use errors::DiagnosticBuilder;
17 use session::config::{BorrowckMode, OutputFilenames};
18 use session::config::CrateType;
20 use hir::{TraitCandidate, HirId, ItemLocalId};
21 use hir::def::{Def, Export};
22 use hir::def_id::{CrateNum, DefId, DefIndex, LOCAL_CRATE};
23 use hir::map as hir_map;
24 use hir::map::DefPathHash;
25 use lint::{self, Lint};
26 use ich::{StableHashingContext, NodeIdHashingMode};
27 use infer::canonical::{CanonicalVarInfo, CanonicalVarInfos};
28 use infer::outlives::free_region_map::FreeRegionMap;
29 use middle::cstore::CrateStoreDyn;
30 use middle::cstore::EncodedMetadata;
31 use middle::lang_items;
32 use middle::resolve_lifetime::{self, ObjectLifetimeDefault};
33 use middle::stability;
34 use mir::{self, Mir, interpret};
35 use mir::interpret::Allocation;
36 use ty::subst::{CanonicalSubsts, Kind, Substs, Subst};
39 use traits::{Clause, Clauses, Goal, Goals};
40 use ty::{self, Ty, TypeAndMut};
41 use ty::{TyS, TyKind, List};
42 use ty::{AdtKind, AdtDef, ClosureSubsts, GeneratorSubsts, Region, Const};
43 use ty::{PolyFnSig, InferTy, ParamTy, ProjectionTy, ExistentialPredicate, Predicate};
45 use ty::{TyVar, TyVid, IntVar, IntVid, FloatVar, FloatVid};
47 use ty::GenericParamDefKind;
48 use ty::layout::{LayoutDetails, TargetDataLayout};
53 use util::nodemap::{DefIdSet, ItemLocalMap};
54 use util::nodemap::{FxHashMap, FxHashSet};
55 use smallvec::SmallVec;
56 use rustc_data_structures::stable_hasher::{HashStable, hash_stable_hashmap,
57 StableHasher, StableHasherResult,
59 use arena::{TypedArena, SyncDroplessArena};
60 use rustc_data_structures::indexed_vec::IndexVec;
61 use rustc_data_structures::sync::{self, Lrc, Lock, WorkerLocal};
63 use std::borrow::Borrow;
64 use std::cmp::Ordering;
65 use std::collections::hash_map::{self, Entry};
66 use std::hash::{Hash, Hasher};
73 use rustc_target::spec::abi;
74 use syntax::ast::{self, NodeId};
76 use syntax::source_map::MultiSpan;
77 use syntax::edition::Edition;
78 use syntax::feature_gate;
79 use syntax::symbol::{Symbol, keywords, InternedString};
84 pub struct AllArenas<'tcx> {
85 pub global: WorkerLocal<GlobalArenas<'tcx>>,
86 pub interner: SyncDroplessArena,
89 impl<'tcx> AllArenas<'tcx> {
90 pub fn new() -> Self {
92 global: WorkerLocal::new(|_| GlobalArenas::new()),
93 interner: SyncDroplessArena::new(),
99 pub struct GlobalArenas<'tcx> {
101 layout: TypedArena<LayoutDetails>,
104 generics: TypedArena<ty::Generics>,
105 trait_def: TypedArena<ty::TraitDef>,
106 adt_def: TypedArena<ty::AdtDef>,
107 steal_mir: TypedArena<Steal<Mir<'tcx>>>,
108 mir: TypedArena<Mir<'tcx>>,
109 tables: TypedArena<ty::TypeckTables<'tcx>>,
111 const_allocs: TypedArena<interpret::Allocation>,
114 impl<'tcx> GlobalArenas<'tcx> {
115 pub fn new() -> GlobalArenas<'tcx> {
117 layout: TypedArena::new(),
118 generics: TypedArena::new(),
119 trait_def: TypedArena::new(),
120 adt_def: TypedArena::new(),
121 steal_mir: TypedArena::new(),
122 mir: TypedArena::new(),
123 tables: TypedArena::new(),
124 const_allocs: TypedArena::new(),
129 type InternedSet<'tcx, T> = Lock<FxHashSet<Interned<'tcx, T>>>;
131 pub struct CtxtInterners<'tcx> {
132 /// The arena that types, regions, etc are allocated from
133 arena: &'tcx SyncDroplessArena,
135 /// Specifically use a speedy hash algorithm for these hash sets,
136 /// they're accessed quite often.
137 type_: InternedSet<'tcx, TyS<'tcx>>,
138 type_list: InternedSet<'tcx, List<Ty<'tcx>>>,
139 substs: InternedSet<'tcx, Substs<'tcx>>,
140 canonical_var_infos: InternedSet<'tcx, List<CanonicalVarInfo>>,
141 region: InternedSet<'tcx, RegionKind>,
142 existential_predicates: InternedSet<'tcx, List<ExistentialPredicate<'tcx>>>,
143 predicates: InternedSet<'tcx, List<Predicate<'tcx>>>,
144 const_: InternedSet<'tcx, Const<'tcx>>,
145 clauses: InternedSet<'tcx, List<Clause<'tcx>>>,
146 goals: InternedSet<'tcx, List<Goal<'tcx>>>,
149 impl<'gcx: 'tcx, 'tcx> CtxtInterners<'tcx> {
150 fn new(arena: &'tcx SyncDroplessArena) -> CtxtInterners<'tcx> {
153 type_: Default::default(),
154 type_list: Default::default(),
155 substs: Default::default(),
156 region: Default::default(),
157 existential_predicates: Default::default(),
158 canonical_var_infos: Default::default(),
159 predicates: Default::default(),
160 const_: Default::default(),
161 clauses: Default::default(),
162 goals: Default::default(),
168 local: &CtxtInterners<'tcx>,
169 global: &CtxtInterners<'gcx>,
172 let flags = super::flags::FlagComputation::for_sty(&st);
174 // HACK(eddyb) Depend on flags being accurate to
175 // determine that all contents are in the global tcx.
176 // See comments on Lift for why we can't use that.
177 if flags.flags.intersects(ty::TypeFlags::KEEP_IN_LOCAL_TCX) {
178 let mut interner = local.type_.borrow_mut();
179 if let Some(&Interned(ty)) = interner.get(&st) {
183 let ty_struct = TyS {
186 outer_exclusive_binder: flags.outer_exclusive_binder,
189 // Make sure we don't end up with inference
190 // types/regions in the global interner
191 if local as *const _ as usize == global as *const _ as usize {
192 bug!("Attempted to intern `{:?}` which contains \
193 inference types/regions in the global type context",
197 // Don't be &mut TyS.
198 let ty: Ty<'tcx> = local.arena.alloc(ty_struct);
199 interner.insert(Interned(ty));
202 let mut interner = global.type_.borrow_mut();
203 if let Some(&Interned(ty)) = interner.get(&st) {
207 let ty_struct = TyS {
210 outer_exclusive_binder: flags.outer_exclusive_binder,
213 // This is safe because all the types the ty_struct can point to
214 // already is in the global arena
215 let ty_struct: TyS<'gcx> = unsafe {
216 mem::transmute(ty_struct)
219 // Don't be &mut TyS.
220 let ty: Ty<'gcx> = global.arena.alloc(ty_struct);
221 interner.insert(Interned(ty));
227 pub struct CommonTypes<'tcx> {
247 pub re_empty: Region<'tcx>,
248 pub re_static: Region<'tcx>,
249 pub re_erased: Region<'tcx>,
252 pub struct LocalTableInContext<'a, V: 'a> {
253 local_id_root: Option<DefId>,
254 data: &'a ItemLocalMap<V>
257 /// Validate that the given HirId (respectively its `local_id` part) can be
258 /// safely used as a key in the tables of a TypeckTable. For that to be
259 /// the case, the HirId must have the same `owner` as all the other IDs in
260 /// this table (signified by `local_id_root`). Otherwise the HirId
261 /// would be in a different frame of reference and using its `local_id`
262 /// would result in lookup errors, or worse, in silently wrong data being
264 fn validate_hir_id_for_typeck_tables(local_id_root: Option<DefId>,
267 if cfg!(debug_assertions) {
268 if let Some(local_id_root) = local_id_root {
269 if hir_id.owner != local_id_root.index {
270 ty::tls::with(|tcx| {
271 let node_id = tcx.hir.hir_to_node_id(hir_id);
273 bug!("node {} with HirId::owner {:?} cannot be placed in \
274 TypeckTables with local_id_root {:?}",
275 tcx.hir.node_to_string(node_id),
276 DefId::local(hir_id.owner),
281 // We use "Null Object" TypeckTables in some of the analysis passes.
282 // These are just expected to be empty and their `local_id_root` is
283 // `None`. Therefore we cannot verify whether a given `HirId` would
284 // be a valid key for the given table. Instead we make sure that
285 // nobody tries to write to such a Null Object table.
287 bug!("access to invalid TypeckTables")
293 impl<'a, V> LocalTableInContext<'a, V> {
294 pub fn contains_key(&self, id: hir::HirId) -> bool {
295 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
296 self.data.contains_key(&id.local_id)
299 pub fn get(&self, id: hir::HirId) -> Option<&V> {
300 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
301 self.data.get(&id.local_id)
304 pub fn iter(&self) -> hash_map::Iter<hir::ItemLocalId, V> {
309 impl<'a, V> ::std::ops::Index<hir::HirId> for LocalTableInContext<'a, V> {
312 fn index(&self, key: hir::HirId) -> &V {
313 self.get(key).expect("LocalTableInContext: key not found")
317 pub struct LocalTableInContextMut<'a, V: 'a> {
318 local_id_root: Option<DefId>,
319 data: &'a mut ItemLocalMap<V>
322 impl<'a, V> LocalTableInContextMut<'a, V> {
323 pub fn get_mut(&mut self, id: hir::HirId) -> Option<&mut V> {
324 validate_hir_id_for_typeck_tables(self.local_id_root, id, true);
325 self.data.get_mut(&id.local_id)
328 pub fn entry(&mut self, id: hir::HirId) -> Entry<hir::ItemLocalId, V> {
329 validate_hir_id_for_typeck_tables(self.local_id_root, id, true);
330 self.data.entry(id.local_id)
333 pub fn insert(&mut self, id: hir::HirId, val: V) -> Option<V> {
334 validate_hir_id_for_typeck_tables(self.local_id_root, id, true);
335 self.data.insert(id.local_id, val)
338 pub fn remove(&mut self, id: hir::HirId) -> Option<V> {
339 validate_hir_id_for_typeck_tables(self.local_id_root, id, true);
340 self.data.remove(&id.local_id)
344 #[derive(RustcEncodable, RustcDecodable, Debug)]
345 pub struct TypeckTables<'tcx> {
346 /// The HirId::owner all ItemLocalIds in this table are relative to.
347 pub local_id_root: Option<DefId>,
349 /// Resolved definitions for `<T>::X` associated paths and
350 /// method calls, including those of overloaded operators.
351 type_dependent_defs: ItemLocalMap<Def>,
353 /// Resolved field indices for field accesses in expressions (`S { field }`, `obj.field`)
354 /// or patterns (`S { field }`). The index is often useful by itself, but to learn more
355 /// about the field you also need definition of the variant to which the field
356 /// belongs, but it may not exist if it's a tuple field (`tuple.0`).
357 field_indices: ItemLocalMap<usize>,
359 /// Stores the canonicalized types provided by the user. See also `UserAssertTy` statement in
361 user_provided_tys: ItemLocalMap<CanonicalTy<'tcx>>,
363 /// Stores the types for various nodes in the AST. Note that this table
364 /// is not guaranteed to be populated until after typeck. See
365 /// typeck::check::fn_ctxt for details.
366 node_types: ItemLocalMap<Ty<'tcx>>,
368 /// Stores the type parameters which were substituted to obtain the type
369 /// of this node. This only applies to nodes that refer to entities
370 /// parameterized by type parameters, such as generic fns, types, or
372 node_substs: ItemLocalMap<&'tcx Substs<'tcx>>,
374 /// Stores the substitutions that the user explicitly gave (if any)
375 /// attached to `id`. These will not include any inferred
376 /// values. The canonical form is used to capture things like `_`
377 /// or other unspecified values.
381 /// If the user wrote `foo.collect::<Vec<_>>()`, then the
382 /// canonical substitutions would include only `for<X> { Vec<X>
384 user_substs: ItemLocalMap<CanonicalSubsts<'tcx>>,
386 adjustments: ItemLocalMap<Vec<ty::adjustment::Adjustment<'tcx>>>,
388 /// Stores the actual binding mode for all instances of hir::BindingAnnotation.
389 pat_binding_modes: ItemLocalMap<BindingMode>,
391 /// Stores the types which were implicitly dereferenced in pattern binding modes
392 /// for later usage in HAIR lowering. For example,
395 /// match &&Some(5i32) {
400 /// leads to a `vec![&&Option<i32>, &Option<i32>]`. Empty vectors are not stored.
403 /// https://github.com/rust-lang/rfcs/blob/master/text/2005-match-ergonomics.md#definitions
404 pat_adjustments: ItemLocalMap<Vec<Ty<'tcx>>>,
407 pub upvar_capture_map: ty::UpvarCaptureMap<'tcx>,
409 /// Records the reasons that we picked the kind of each closure;
410 /// not all closures are present in the map.
411 closure_kind_origins: ItemLocalMap<(Span, ast::Name)>,
413 /// For each fn, records the "liberated" types of its arguments
414 /// and return type. Liberated means that all bound regions
415 /// (including late-bound regions) are replaced with free
416 /// equivalents. This table is not used in codegen (since regions
417 /// are erased there) and hence is not serialized to metadata.
418 liberated_fn_sigs: ItemLocalMap<ty::FnSig<'tcx>>,
420 /// For each FRU expression, record the normalized types of the fields
421 /// of the struct - this is needed because it is non-trivial to
422 /// normalize while preserving regions. This table is used only in
423 /// MIR construction and hence is not serialized to metadata.
424 fru_field_types: ItemLocalMap<Vec<Ty<'tcx>>>,
426 /// Maps a cast expression to its kind. This is keyed on the
427 /// *from* expression of the cast, not the cast itself.
428 cast_kinds: ItemLocalMap<ty::cast::CastKind>,
430 /// Set of trait imports actually used in the method resolution.
431 /// This is used for warning unused imports. During type
432 /// checking, this `Lrc` should not be cloned: it must have a ref-count
433 /// of 1 so that we can insert things into the set mutably.
434 pub used_trait_imports: Lrc<DefIdSet>,
436 /// If any errors occurred while type-checking this body,
437 /// this field will be set to `true`.
438 pub tainted_by_errors: bool,
440 /// Stores the free-region relationships that were deduced from
441 /// its where clauses and parameter types. These are then
442 /// read-again by borrowck.
443 pub free_region_map: FreeRegionMap<'tcx>,
445 /// All the existential types that are restricted to concrete types
447 pub concrete_existential_types: FxHashMap<DefId, Ty<'tcx>>,
450 impl<'tcx> TypeckTables<'tcx> {
451 pub fn empty(local_id_root: Option<DefId>) -> TypeckTables<'tcx> {
454 type_dependent_defs: ItemLocalMap(),
455 field_indices: ItemLocalMap(),
456 user_provided_tys: ItemLocalMap(),
457 node_types: ItemLocalMap(),
458 node_substs: ItemLocalMap(),
459 user_substs: ItemLocalMap(),
460 adjustments: ItemLocalMap(),
461 pat_binding_modes: ItemLocalMap(),
462 pat_adjustments: ItemLocalMap(),
463 upvar_capture_map: FxHashMap(),
464 closure_kind_origins: ItemLocalMap(),
465 liberated_fn_sigs: ItemLocalMap(),
466 fru_field_types: ItemLocalMap(),
467 cast_kinds: ItemLocalMap(),
468 used_trait_imports: Lrc::new(DefIdSet()),
469 tainted_by_errors: false,
470 free_region_map: FreeRegionMap::new(),
471 concrete_existential_types: FxHashMap(),
475 /// Returns the final resolution of a `QPath` in an `Expr` or `Pat` node.
476 pub fn qpath_def(&self, qpath: &hir::QPath, id: hir::HirId) -> Def {
478 hir::QPath::Resolved(_, ref path) => path.def,
479 hir::QPath::TypeRelative(..) => {
480 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
481 self.type_dependent_defs.get(&id.local_id).cloned().unwrap_or(Def::Err)
486 pub fn type_dependent_defs(&self) -> LocalTableInContext<Def> {
487 LocalTableInContext {
488 local_id_root: self.local_id_root,
489 data: &self.type_dependent_defs
493 pub fn type_dependent_defs_mut(&mut self) -> LocalTableInContextMut<Def> {
494 LocalTableInContextMut {
495 local_id_root: self.local_id_root,
496 data: &mut self.type_dependent_defs
500 pub fn field_indices(&self) -> LocalTableInContext<usize> {
501 LocalTableInContext {
502 local_id_root: self.local_id_root,
503 data: &self.field_indices
507 pub fn field_indices_mut(&mut self) -> LocalTableInContextMut<usize> {
508 LocalTableInContextMut {
509 local_id_root: self.local_id_root,
510 data: &mut self.field_indices
514 pub fn user_provided_tys(&self) -> LocalTableInContext<CanonicalTy<'tcx>> {
515 LocalTableInContext {
516 local_id_root: self.local_id_root,
517 data: &self.user_provided_tys
521 pub fn user_provided_tys_mut(&mut self) -> LocalTableInContextMut<CanonicalTy<'tcx>> {
522 LocalTableInContextMut {
523 local_id_root: self.local_id_root,
524 data: &mut self.user_provided_tys
528 pub fn node_types(&self) -> LocalTableInContext<Ty<'tcx>> {
529 LocalTableInContext {
530 local_id_root: self.local_id_root,
531 data: &self.node_types
535 pub fn node_types_mut(&mut self) -> LocalTableInContextMut<Ty<'tcx>> {
536 LocalTableInContextMut {
537 local_id_root: self.local_id_root,
538 data: &mut self.node_types
542 pub fn node_id_to_type(&self, id: hir::HirId) -> Ty<'tcx> {
543 match self.node_id_to_type_opt(id) {
546 bug!("node_id_to_type: no type for node `{}`",
548 let id = tcx.hir.hir_to_node_id(id);
549 tcx.hir.node_to_string(id)
555 pub fn node_id_to_type_opt(&self, id: hir::HirId) -> Option<Ty<'tcx>> {
556 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
557 self.node_types.get(&id.local_id).cloned()
560 pub fn node_substs_mut(&mut self) -> LocalTableInContextMut<&'tcx Substs<'tcx>> {
561 LocalTableInContextMut {
562 local_id_root: self.local_id_root,
563 data: &mut self.node_substs
567 pub fn node_substs(&self, id: hir::HirId) -> &'tcx Substs<'tcx> {
568 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
569 self.node_substs.get(&id.local_id).cloned().unwrap_or(Substs::empty())
572 pub fn node_substs_opt(&self, id: hir::HirId) -> Option<&'tcx Substs<'tcx>> {
573 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
574 self.node_substs.get(&id.local_id).cloned()
577 pub fn user_substs_mut(&mut self) -> LocalTableInContextMut<CanonicalSubsts<'tcx>> {
578 LocalTableInContextMut {
579 local_id_root: self.local_id_root,
580 data: &mut self.user_substs
584 pub fn user_substs(&self, id: hir::HirId) -> Option<CanonicalSubsts<'tcx>> {
585 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
586 self.user_substs.get(&id.local_id).cloned()
589 // Returns the type of a pattern as a monotype. Like @expr_ty, this function
590 // doesn't provide type parameter substitutions.
591 pub fn pat_ty(&self, pat: &hir::Pat) -> Ty<'tcx> {
592 self.node_id_to_type(pat.hir_id)
595 pub fn pat_ty_opt(&self, pat: &hir::Pat) -> Option<Ty<'tcx>> {
596 self.node_id_to_type_opt(pat.hir_id)
599 // Returns the type of an expression as a monotype.
601 // NB (1): This is the PRE-ADJUSTMENT TYPE for the expression. That is, in
602 // some cases, we insert `Adjustment` annotations such as auto-deref or
603 // auto-ref. The type returned by this function does not consider such
604 // adjustments. See `expr_ty_adjusted()` instead.
606 // NB (2): This type doesn't provide type parameter substitutions; e.g. if you
607 // ask for the type of "id" in "id(3)", it will return "fn(&isize) -> isize"
608 // instead of "fn(ty) -> T with T = isize".
609 pub fn expr_ty(&self, expr: &hir::Expr) -> Ty<'tcx> {
610 self.node_id_to_type(expr.hir_id)
613 pub fn expr_ty_opt(&self, expr: &hir::Expr) -> Option<Ty<'tcx>> {
614 self.node_id_to_type_opt(expr.hir_id)
617 pub fn adjustments(&self) -> LocalTableInContext<Vec<ty::adjustment::Adjustment<'tcx>>> {
618 LocalTableInContext {
619 local_id_root: self.local_id_root,
620 data: &self.adjustments
624 pub fn adjustments_mut(&mut self)
625 -> LocalTableInContextMut<Vec<ty::adjustment::Adjustment<'tcx>>> {
626 LocalTableInContextMut {
627 local_id_root: self.local_id_root,
628 data: &mut self.adjustments
632 pub fn expr_adjustments(&self, expr: &hir::Expr)
633 -> &[ty::adjustment::Adjustment<'tcx>] {
634 validate_hir_id_for_typeck_tables(self.local_id_root, expr.hir_id, false);
635 self.adjustments.get(&expr.hir_id.local_id).map_or(&[], |a| &a[..])
638 /// Returns the type of `expr`, considering any `Adjustment`
639 /// entry recorded for that expression.
640 pub fn expr_ty_adjusted(&self, expr: &hir::Expr) -> Ty<'tcx> {
641 self.expr_adjustments(expr)
643 .map_or_else(|| self.expr_ty(expr), |adj| adj.target)
646 pub fn expr_ty_adjusted_opt(&self, expr: &hir::Expr) -> Option<Ty<'tcx>> {
647 self.expr_adjustments(expr)
649 .map(|adj| adj.target)
650 .or_else(|| self.expr_ty_opt(expr))
653 pub fn is_method_call(&self, expr: &hir::Expr) -> bool {
654 // Only paths and method calls/overloaded operators have
655 // entries in type_dependent_defs, ignore the former here.
656 if let hir::ExprKind::Path(_) = expr.node {
660 match self.type_dependent_defs().get(expr.hir_id) {
661 Some(&Def::Method(_)) => true,
666 pub fn pat_binding_modes(&self) -> LocalTableInContext<BindingMode> {
667 LocalTableInContext {
668 local_id_root: self.local_id_root,
669 data: &self.pat_binding_modes
673 pub fn pat_binding_modes_mut(&mut self)
674 -> LocalTableInContextMut<BindingMode> {
675 LocalTableInContextMut {
676 local_id_root: self.local_id_root,
677 data: &mut self.pat_binding_modes
681 pub fn pat_adjustments(&self) -> LocalTableInContext<Vec<Ty<'tcx>>> {
682 LocalTableInContext {
683 local_id_root: self.local_id_root,
684 data: &self.pat_adjustments,
688 pub fn pat_adjustments_mut(&mut self)
689 -> LocalTableInContextMut<Vec<Ty<'tcx>>> {
690 LocalTableInContextMut {
691 local_id_root: self.local_id_root,
692 data: &mut self.pat_adjustments,
696 pub fn upvar_capture(&self, upvar_id: ty::UpvarId) -> ty::UpvarCapture<'tcx> {
697 self.upvar_capture_map[&upvar_id]
700 pub fn closure_kind_origins(&self) -> LocalTableInContext<(Span, ast::Name)> {
701 LocalTableInContext {
702 local_id_root: self.local_id_root,
703 data: &self.closure_kind_origins
707 pub fn closure_kind_origins_mut(&mut self) -> LocalTableInContextMut<(Span, ast::Name)> {
708 LocalTableInContextMut {
709 local_id_root: self.local_id_root,
710 data: &mut self.closure_kind_origins
714 pub fn liberated_fn_sigs(&self) -> LocalTableInContext<ty::FnSig<'tcx>> {
715 LocalTableInContext {
716 local_id_root: self.local_id_root,
717 data: &self.liberated_fn_sigs
721 pub fn liberated_fn_sigs_mut(&mut self) -> LocalTableInContextMut<ty::FnSig<'tcx>> {
722 LocalTableInContextMut {
723 local_id_root: self.local_id_root,
724 data: &mut self.liberated_fn_sigs
728 pub fn fru_field_types(&self) -> LocalTableInContext<Vec<Ty<'tcx>>> {
729 LocalTableInContext {
730 local_id_root: self.local_id_root,
731 data: &self.fru_field_types
735 pub fn fru_field_types_mut(&mut self) -> LocalTableInContextMut<Vec<Ty<'tcx>>> {
736 LocalTableInContextMut {
737 local_id_root: self.local_id_root,
738 data: &mut self.fru_field_types
742 pub fn cast_kinds(&self) -> LocalTableInContext<ty::cast::CastKind> {
743 LocalTableInContext {
744 local_id_root: self.local_id_root,
745 data: &self.cast_kinds
749 pub fn cast_kinds_mut(&mut self) -> LocalTableInContextMut<ty::cast::CastKind> {
750 LocalTableInContextMut {
751 local_id_root: self.local_id_root,
752 data: &mut self.cast_kinds
757 impl<'a, 'gcx> HashStable<StableHashingContext<'a>> for TypeckTables<'gcx> {
758 fn hash_stable<W: StableHasherResult>(&self,
759 hcx: &mut StableHashingContext<'a>,
760 hasher: &mut StableHasher<W>) {
761 let ty::TypeckTables {
763 ref type_dependent_defs,
765 ref user_provided_tys,
770 ref pat_binding_modes,
772 ref upvar_capture_map,
773 ref closure_kind_origins,
774 ref liberated_fn_sigs,
779 ref used_trait_imports,
782 ref concrete_existential_types,
785 hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| {
786 type_dependent_defs.hash_stable(hcx, hasher);
787 field_indices.hash_stable(hcx, hasher);
788 user_provided_tys.hash_stable(hcx, hasher);
789 node_types.hash_stable(hcx, hasher);
790 node_substs.hash_stable(hcx, hasher);
791 user_substs.hash_stable(hcx, hasher);
792 adjustments.hash_stable(hcx, hasher);
793 pat_binding_modes.hash_stable(hcx, hasher);
794 pat_adjustments.hash_stable(hcx, hasher);
795 hash_stable_hashmap(hcx, hasher, upvar_capture_map, |up_var_id, hcx| {
802 local_id_root.expect("trying to hash invalid TypeckTables");
804 let var_owner_def_id = DefId {
805 krate: local_id_root.krate,
808 let closure_def_id = DefId {
809 krate: local_id_root.krate,
810 index: closure_expr_id.to_def_id().index,
812 (hcx.def_path_hash(var_owner_def_id),
814 hcx.def_path_hash(closure_def_id))
817 closure_kind_origins.hash_stable(hcx, hasher);
818 liberated_fn_sigs.hash_stable(hcx, hasher);
819 fru_field_types.hash_stable(hcx, hasher);
820 cast_kinds.hash_stable(hcx, hasher);
821 used_trait_imports.hash_stable(hcx, hasher);
822 tainted_by_errors.hash_stable(hcx, hasher);
823 free_region_map.hash_stable(hcx, hasher);
824 concrete_existential_types.hash_stable(hcx, hasher);
829 impl<'tcx> CommonTypes<'tcx> {
830 fn new(interners: &CtxtInterners<'tcx>) -> CommonTypes<'tcx> {
831 // Ensure our type representation does not grow
832 #[cfg(target_pointer_width = "64")]
833 assert!(mem::size_of::<ty::TyKind>() <= 24);
834 #[cfg(target_pointer_width = "64")]
835 assert!(mem::size_of::<ty::TyS>() <= 32);
837 let mk = |sty| CtxtInterners::intern_ty(interners, interners, sty);
838 let mk_region = |r| {
839 if let Some(r) = interners.region.borrow().get(&r) {
842 let r = interners.arena.alloc(r);
843 interners.region.borrow_mut().insert(Interned(r));
851 isize: mk(Int(ast::IntTy::Isize)),
852 i8: mk(Int(ast::IntTy::I8)),
853 i16: mk(Int(ast::IntTy::I16)),
854 i32: mk(Int(ast::IntTy::I32)),
855 i64: mk(Int(ast::IntTy::I64)),
856 i128: mk(Int(ast::IntTy::I128)),
857 usize: mk(Uint(ast::UintTy::Usize)),
858 u8: mk(Uint(ast::UintTy::U8)),
859 u16: mk(Uint(ast::UintTy::U16)),
860 u32: mk(Uint(ast::UintTy::U32)),
861 u64: mk(Uint(ast::UintTy::U64)),
862 u128: mk(Uint(ast::UintTy::U128)),
863 f32: mk(Float(ast::FloatTy::F32)),
864 f64: mk(Float(ast::FloatTy::F64)),
866 re_empty: mk_region(RegionKind::ReEmpty),
867 re_static: mk_region(RegionKind::ReStatic),
868 re_erased: mk_region(RegionKind::ReErased),
873 /// The central data structure of the compiler. It stores references
874 /// to the various **arenas** and also houses the results of the
875 /// various **compiler queries** that have been performed. See the
876 /// [rustc guide] for more details.
878 /// [rustc guide]: https://rust-lang-nursery.github.io/rustc-guide/ty.html
879 #[derive(Copy, Clone)]
880 pub struct TyCtxt<'a, 'gcx: 'tcx, 'tcx: 'a> {
881 gcx: &'a GlobalCtxt<'gcx>,
882 interners: &'a CtxtInterners<'tcx>
885 impl<'a, 'gcx, 'tcx> Deref for TyCtxt<'a, 'gcx, 'tcx> {
886 type Target = &'a GlobalCtxt<'gcx>;
887 fn deref(&self) -> &Self::Target {
892 pub struct GlobalCtxt<'tcx> {
893 global_arenas: &'tcx WorkerLocal<GlobalArenas<'tcx>>,
894 global_interners: CtxtInterners<'tcx>,
896 cstore: &'tcx CrateStoreDyn,
898 pub sess: &'tcx Session,
900 pub dep_graph: DepGraph,
902 /// Common types, pre-interned for your convenience.
903 pub types: CommonTypes<'tcx>,
905 /// Map indicating what traits are in scope for places where this
906 /// is relevant; generated by resolve.
907 trait_map: FxHashMap<DefIndex,
908 Lrc<FxHashMap<ItemLocalId,
909 Lrc<StableVec<TraitCandidate>>>>>,
911 /// Export map produced by name resolution.
912 export_map: FxHashMap<DefId, Lrc<Vec<Export>>>,
914 pub hir: hir_map::Map<'tcx>,
916 /// A map from DefPathHash -> DefId. Includes DefIds from the local crate
917 /// as well as all upstream crates. Only populated in incremental mode.
918 pub def_path_hash_to_def_id: Option<FxHashMap<DefPathHash, DefId>>,
920 pub(crate) queries: query::Queries<'tcx>,
922 // Records the free variables referenced by every closure
923 // expression. Do not track deps for this, just recompute it from
924 // scratch every time.
925 freevars: FxHashMap<DefId, Lrc<Vec<hir::Freevar>>>,
927 maybe_unused_trait_imports: FxHashSet<DefId>,
929 maybe_unused_extern_crates: Vec<(DefId, Span)>,
931 // Internal cache for metadata decoding. No need to track deps on this.
932 pub rcache: Lock<FxHashMap<ty::CReaderCacheKey, Ty<'tcx>>>,
934 /// Caches the results of trait selection. This cache is used
935 /// for things that do not have to do with the parameters in scope.
936 pub selection_cache: traits::SelectionCache<'tcx>,
938 /// Caches the results of trait evaluation. This cache is used
939 /// for things that do not have to do with the parameters in scope.
940 /// Merge this with `selection_cache`?
941 pub evaluation_cache: traits::EvaluationCache<'tcx>,
943 /// The definite name of the current crate after taking into account
944 /// attributes, commandline parameters, etc.
945 pub crate_name: Symbol,
947 /// Data layout specification for the current target.
948 pub data_layout: TargetDataLayout,
950 stability_interner: Lock<FxHashSet<&'tcx attr::Stability>>,
952 /// Stores the value of constants (and deduplicates the actual memory)
953 allocation_interner: Lock<FxHashSet<&'tcx Allocation>>,
955 pub alloc_map: Lock<interpret::AllocMap<'tcx, &'tcx Allocation>>,
957 layout_interner: Lock<FxHashSet<&'tcx LayoutDetails>>,
959 /// A general purpose channel to throw data out the back towards LLVM worker
962 /// This is intended to only get used during the codegen phase of the compiler
963 /// when satisfying the query for a particular codegen unit. Internally in
964 /// the query it'll send data along this channel to get processed later.
965 pub tx_to_llvm_workers: Lock<mpsc::Sender<Box<dyn Any + Send>>>,
967 output_filenames: Arc<OutputFilenames>,
970 impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
971 /// Get the global TyCtxt.
973 pub fn global_tcx(self) -> TyCtxt<'a, 'gcx, 'gcx> {
976 interners: &self.gcx.global_interners,
980 pub fn alloc_generics(self, generics: ty::Generics) -> &'gcx ty::Generics {
981 self.global_arenas.generics.alloc(generics)
984 pub fn alloc_steal_mir(self, mir: Mir<'gcx>) -> &'gcx Steal<Mir<'gcx>> {
985 self.global_arenas.steal_mir.alloc(Steal::new(mir))
988 pub fn alloc_mir(self, mir: Mir<'gcx>) -> &'gcx Mir<'gcx> {
989 self.global_arenas.mir.alloc(mir)
992 pub fn alloc_tables(self, tables: ty::TypeckTables<'gcx>) -> &'gcx ty::TypeckTables<'gcx> {
993 self.global_arenas.tables.alloc(tables)
996 pub fn alloc_trait_def(self, def: ty::TraitDef) -> &'gcx ty::TraitDef {
997 self.global_arenas.trait_def.alloc(def)
1000 pub fn alloc_adt_def(self,
1003 variants: Vec<ty::VariantDef>,
1005 -> &'gcx ty::AdtDef {
1006 let def = ty::AdtDef::new(self, did, kind, variants, repr);
1007 self.global_arenas.adt_def.alloc(def)
1010 pub fn alloc_byte_array(self, bytes: &[u8]) -> &'gcx [u8] {
1011 if bytes.is_empty() {
1014 self.global_interners.arena.alloc_slice(bytes)
1018 pub fn alloc_const_slice(self, values: &[&'tcx ty::Const<'tcx>])
1019 -> &'tcx [&'tcx ty::Const<'tcx>] {
1020 if values.is_empty() {
1023 self.interners.arena.alloc_slice(values)
1027 pub fn alloc_name_const_slice(self, values: &[(ast::Name, &'tcx ty::Const<'tcx>)])
1028 -> &'tcx [(ast::Name, &'tcx ty::Const<'tcx>)] {
1029 if values.is_empty() {
1032 self.interners.arena.alloc_slice(values)
1036 pub fn intern_const_alloc(
1039 ) -> &'gcx Allocation {
1040 let allocs = &mut self.allocation_interner.borrow_mut();
1041 if let Some(alloc) = allocs.get(&alloc) {
1045 let interned = self.global_arenas.const_allocs.alloc(alloc);
1046 if let Some(prev) = allocs.replace(interned) { // insert into interner
1047 bug!("Tried to overwrite interned Allocation: {:#?}", prev)
1052 /// Allocates a byte or string literal for `mir::interpret`, read-only
1053 pub fn allocate_bytes(self, bytes: &[u8]) -> interpret::AllocId {
1054 // create an allocation that just contains these bytes
1055 let alloc = interpret::Allocation::from_byte_aligned_bytes(bytes);
1056 let alloc = self.intern_const_alloc(alloc);
1057 self.alloc_map.lock().allocate(alloc)
1060 pub fn intern_stability(self, stab: attr::Stability) -> &'gcx attr::Stability {
1061 let mut stability_interner = self.stability_interner.borrow_mut();
1062 if let Some(st) = stability_interner.get(&stab) {
1066 let interned = self.global_interners.arena.alloc(stab);
1067 if let Some(prev) = stability_interner.replace(interned) {
1068 bug!("Tried to overwrite interned Stability: {:?}", prev)
1073 pub fn intern_layout(self, layout: LayoutDetails) -> &'gcx LayoutDetails {
1074 let mut layout_interner = self.layout_interner.borrow_mut();
1075 if let Some(layout) = layout_interner.get(&layout) {
1079 let interned = self.global_arenas.layout.alloc(layout);
1080 if let Some(prev) = layout_interner.replace(interned) {
1081 bug!("Tried to overwrite interned Layout: {:?}", prev)
1086 pub fn lift<T: ?Sized + Lift<'tcx>>(self, value: &T) -> Option<T::Lifted> {
1087 value.lift_to_tcx(self)
1090 /// Like lift, but only tries in the global tcx.
1091 pub fn lift_to_global<T: ?Sized + Lift<'gcx>>(self, value: &T) -> Option<T::Lifted> {
1092 value.lift_to_tcx(self.global_tcx())
1095 /// Returns true if self is the same as self.global_tcx().
1096 fn is_global(self) -> bool {
1097 let local = self.interners as *const _;
1098 let global = &self.global_interners as *const _;
1099 local as usize == global as usize
1102 /// Returns true if this function must conform to `min_const_fn`
1103 pub fn is_min_const_fn(self, def_id: DefId) -> bool {
1104 if self.features().staged_api {
1105 // some intrinsics are waved through if called inside the
1106 // standard library. Users never need to call them directly
1107 if let abi::Abi::RustIntrinsic = self.fn_sig(def_id).abi() {
1108 assert!(!self.is_const_fn(def_id));
1109 match &self.item_name(def_id).as_str()[..] {
1116 // in order for a libstd function to be considered min_const_fn
1117 // it needs to be stable and have no `rustc_const_unstable` attribute
1118 match self.lookup_stability(def_id) {
1119 // stable functions with unstable const fn aren't `min_const_fn`
1120 Some(&attr::Stability { const_stability: Some(_), .. }) => false,
1121 // unstable functions don't need to conform
1122 Some(&attr::Stability { ref level, .. }) if level.is_unstable() => false,
1123 // everything else needs to conform, because it would be callable from
1124 // other `min_const_fn` functions
1128 // users enabling the `const_fn` can do what they want
1129 !self.sess.features_untracked().const_fn
1133 /// Create a type context and call the closure with a `TyCtxt` reference
1134 /// to the context. The closure enforces that the type context and any interned
1135 /// value (types, substs, etc.) can only be used while `ty::tls` has a valid
1136 /// reference to the context, to allow formatting values that need it.
1137 pub fn create_and_enter<F, R>(s: &'tcx Session,
1138 cstore: &'tcx CrateStoreDyn,
1139 local_providers: ty::query::Providers<'tcx>,
1140 extern_providers: ty::query::Providers<'tcx>,
1141 arenas: &'tcx AllArenas<'tcx>,
1142 resolutions: ty::Resolutions,
1143 hir: hir_map::Map<'tcx>,
1144 on_disk_query_result_cache: query::OnDiskCache<'tcx>,
1146 tx: mpsc::Sender<Box<dyn Any + Send>>,
1147 output_filenames: &OutputFilenames,
1149 where F: for<'b> FnOnce(TyCtxt<'b, 'tcx, 'tcx>) -> R
1151 let data_layout = TargetDataLayout::parse(&s.target.target).unwrap_or_else(|err| {
1154 let interners = CtxtInterners::new(&arenas.interner);
1155 let common_types = CommonTypes::new(&interners);
1156 let dep_graph = hir.dep_graph.clone();
1157 let max_cnum = cstore.crates_untracked().iter().map(|c| c.as_usize()).max().unwrap_or(0);
1158 let mut providers = IndexVec::from_elem_n(extern_providers, max_cnum + 1);
1159 providers[LOCAL_CRATE] = local_providers;
1161 let def_path_hash_to_def_id = if s.opts.build_dep_graph() {
1162 let upstream_def_path_tables: Vec<(CrateNum, Lrc<_>)> = cstore
1165 .map(|&cnum| (cnum, cstore.def_path_table(cnum)))
1168 let def_path_tables = || {
1169 upstream_def_path_tables
1171 .map(|&(cnum, ref rc)| (cnum, &**rc))
1172 .chain(iter::once((LOCAL_CRATE, hir.definitions().def_path_table())))
1175 // Precompute the capacity of the hashmap so we don't have to
1176 // re-allocate when populating it.
1177 let capacity = def_path_tables().map(|(_, t)| t.size()).sum::<usize>();
1179 let mut map: FxHashMap<_, _> = FxHashMap::with_capacity_and_hasher(
1181 ::std::default::Default::default()
1184 for (cnum, def_path_table) in def_path_tables() {
1185 def_path_table.add_def_path_hashes_to(cnum, &mut map);
1193 let mut trait_map: FxHashMap<_, Lrc<FxHashMap<_, _>>> = FxHashMap();
1194 for (k, v) in resolutions.trait_map {
1195 let hir_id = hir.node_to_hir_id(k);
1196 let map = trait_map.entry(hir_id.owner).or_default();
1197 Lrc::get_mut(map).unwrap()
1198 .insert(hir_id.local_id,
1199 Lrc::new(StableVec::new(v)));
1202 let gcx = &GlobalCtxt {
1205 global_arenas: &arenas.global,
1206 global_interners: interners,
1207 dep_graph: dep_graph.clone(),
1208 types: common_types,
1210 export_map: resolutions.export_map.into_iter().map(|(k, v)| {
1213 freevars: resolutions.freevars.into_iter().map(|(k, v)| {
1214 (hir.local_def_id(k), Lrc::new(v))
1216 maybe_unused_trait_imports:
1217 resolutions.maybe_unused_trait_imports
1219 .map(|id| hir.local_def_id(id))
1221 maybe_unused_extern_crates:
1222 resolutions.maybe_unused_extern_crates
1224 .map(|(id, sp)| (hir.local_def_id(id), sp))
1227 def_path_hash_to_def_id,
1228 queries: query::Queries::new(providers, on_disk_query_result_cache),
1229 rcache: Lock::new(FxHashMap()),
1230 selection_cache: traits::SelectionCache::new(),
1231 evaluation_cache: traits::EvaluationCache::new(),
1232 crate_name: Symbol::intern(crate_name),
1234 layout_interner: Lock::new(FxHashSet()),
1235 stability_interner: Lock::new(FxHashSet()),
1236 allocation_interner: Lock::new(FxHashSet()),
1237 alloc_map: Lock::new(interpret::AllocMap::new()),
1238 tx_to_llvm_workers: Lock::new(tx),
1239 output_filenames: Arc::new(output_filenames.clone()),
1242 sync::assert_send_val(&gcx);
1244 tls::enter_global(gcx, f)
1247 pub fn consider_optimizing<T: Fn() -> String>(&self, msg: T) -> bool {
1248 let cname = self.crate_name(LOCAL_CRATE).as_str();
1249 self.sess.consider_optimizing(&cname, msg)
1252 pub fn lib_features(self) -> Lrc<middle::lib_features::LibFeatures> {
1253 self.get_lib_features(LOCAL_CRATE)
1256 pub fn lang_items(self) -> Lrc<middle::lang_items::LanguageItems> {
1257 self.get_lang_items(LOCAL_CRATE)
1260 /// Due to missing llvm support for lowering 128 bit math to software emulation
1261 /// (on some targets), the lowering can be done in MIR.
1263 /// This function only exists until said support is implemented.
1264 pub fn is_binop_lang_item(&self, def_id: DefId) -> Option<(mir::BinOp, bool)> {
1265 let items = self.lang_items();
1266 let def_id = Some(def_id);
1267 if items.i128_add_fn() == def_id { Some((mir::BinOp::Add, false)) }
1268 else if items.u128_add_fn() == def_id { Some((mir::BinOp::Add, false)) }
1269 else if items.i128_sub_fn() == def_id { Some((mir::BinOp::Sub, false)) }
1270 else if items.u128_sub_fn() == def_id { Some((mir::BinOp::Sub, false)) }
1271 else if items.i128_mul_fn() == def_id { Some((mir::BinOp::Mul, false)) }
1272 else if items.u128_mul_fn() == def_id { Some((mir::BinOp::Mul, false)) }
1273 else if items.i128_div_fn() == def_id { Some((mir::BinOp::Div, false)) }
1274 else if items.u128_div_fn() == def_id { Some((mir::BinOp::Div, false)) }
1275 else if items.i128_rem_fn() == def_id { Some((mir::BinOp::Rem, false)) }
1276 else if items.u128_rem_fn() == def_id { Some((mir::BinOp::Rem, false)) }
1277 else if items.i128_shl_fn() == def_id { Some((mir::BinOp::Shl, false)) }
1278 else if items.u128_shl_fn() == def_id { Some((mir::BinOp::Shl, false)) }
1279 else if items.i128_shr_fn() == def_id { Some((mir::BinOp::Shr, false)) }
1280 else if items.u128_shr_fn() == def_id { Some((mir::BinOp::Shr, false)) }
1281 else if items.i128_addo_fn() == def_id { Some((mir::BinOp::Add, true)) }
1282 else if items.u128_addo_fn() == def_id { Some((mir::BinOp::Add, true)) }
1283 else if items.i128_subo_fn() == def_id { Some((mir::BinOp::Sub, true)) }
1284 else if items.u128_subo_fn() == def_id { Some((mir::BinOp::Sub, true)) }
1285 else if items.i128_mulo_fn() == def_id { Some((mir::BinOp::Mul, true)) }
1286 else if items.u128_mulo_fn() == def_id { Some((mir::BinOp::Mul, true)) }
1287 else if items.i128_shlo_fn() == def_id { Some((mir::BinOp::Shl, true)) }
1288 else if items.u128_shlo_fn() == def_id { Some((mir::BinOp::Shl, true)) }
1289 else if items.i128_shro_fn() == def_id { Some((mir::BinOp::Shr, true)) }
1290 else if items.u128_shro_fn() == def_id { Some((mir::BinOp::Shr, true)) }
1294 pub fn stability(self) -> Lrc<stability::Index<'tcx>> {
1295 self.stability_index(LOCAL_CRATE)
1298 pub fn crates(self) -> Lrc<Vec<CrateNum>> {
1299 self.all_crate_nums(LOCAL_CRATE)
1302 pub fn features(self) -> Lrc<feature_gate::Features> {
1303 self.features_query(LOCAL_CRATE)
1306 pub fn def_key(self, id: DefId) -> hir_map::DefKey {
1308 self.hir.def_key(id)
1310 self.cstore.def_key(id)
1314 /// Convert a `DefId` into its fully expanded `DefPath` (every
1315 /// `DefId` is really just an interned def-path).
1317 /// Note that if `id` is not local to this crate, the result will
1318 /// be a non-local `DefPath`.
1319 pub fn def_path(self, id: DefId) -> hir_map::DefPath {
1321 self.hir.def_path(id)
1323 self.cstore.def_path(id)
1328 pub fn def_path_hash(self, def_id: DefId) -> hir_map::DefPathHash {
1329 if def_id.is_local() {
1330 self.hir.definitions().def_path_hash(def_id.index)
1332 self.cstore.def_path_hash(def_id)
1336 pub fn def_path_debug_str(self, def_id: DefId) -> String {
1337 // We are explicitly not going through queries here in order to get
1338 // crate name and disambiguator since this code is called from debug!()
1339 // statements within the query system and we'd run into endless
1340 // recursion otherwise.
1341 let (crate_name, crate_disambiguator) = if def_id.is_local() {
1342 (self.crate_name.clone(),
1343 self.sess.local_crate_disambiguator())
1345 (self.cstore.crate_name_untracked(def_id.krate),
1346 self.cstore.crate_disambiguator_untracked(def_id.krate))
1351 // Don't print the whole crate disambiguator. That's just
1352 // annoying in debug output.
1353 &(crate_disambiguator.to_fingerprint().to_hex())[..4],
1354 self.def_path(def_id).to_string_no_crate())
1357 pub fn metadata_encoding_version(self) -> Vec<u8> {
1358 self.cstore.metadata_encoding_version().to_vec()
1361 // Note that this is *untracked* and should only be used within the query
1362 // system if the result is otherwise tracked through queries
1363 pub fn crate_data_as_rc_any(self, cnum: CrateNum) -> Lrc<dyn Any> {
1364 self.cstore.crate_data_as_rc_any(cnum)
1367 pub fn create_stable_hashing_context(self) -> StableHashingContext<'a> {
1368 let krate = self.dep_graph.with_ignore(|| self.gcx.hir.krate());
1370 StableHashingContext::new(self.sess,
1372 self.hir.definitions(),
1376 // This method makes sure that we have a DepNode and a Fingerprint for
1377 // every upstream crate. It needs to be called once right after the tcx is
1379 // With full-fledged red/green, the method will probably become unnecessary
1380 // as this will be done on-demand.
1381 pub fn allocate_metadata_dep_nodes(self) {
1382 // We cannot use the query versions of crates() and crate_hash(), since
1383 // those would need the DepNodes that we are allocating here.
1384 for cnum in self.cstore.crates_untracked() {
1385 let dep_node = DepNode::new(self, DepConstructor::CrateMetadata(cnum));
1386 let crate_hash = self.cstore.crate_hash_untracked(cnum);
1387 self.dep_graph.with_task(dep_node,
1390 |_, x| x // No transformation needed
1395 // This method exercises the `in_scope_traits_map` query for all possible
1396 // values so that we have their fingerprints available in the DepGraph.
1397 // This is only required as long as we still use the old dependency tracking
1398 // which needs to have the fingerprints of all input nodes beforehand.
1399 pub fn precompute_in_scope_traits_hashes(self) {
1400 for &def_index in self.trait_map.keys() {
1401 self.in_scope_traits_map(def_index);
1405 pub fn serialize_query_result_cache<E>(self,
1407 -> Result<(), E::Error>
1408 where E: ty::codec::TyEncoder
1410 self.queries.on_disk_cache.serialize(self.global_tcx(), encoder)
1413 /// If true, we should use a naive AST walk to determine if match
1414 /// guard could perform bad mutations (or mutable-borrows).
1415 pub fn check_for_mutation_in_guard_via_ast_walk(self) -> bool {
1416 !self.sess.opts.debugging_opts.disable_ast_check_for_mutation_in_guard
1419 /// If true, we should use the AST-based borrowck (we may *also* use
1420 /// the MIR-based borrowck).
1421 pub fn use_ast_borrowck(self) -> bool {
1422 self.borrowck_mode().use_ast()
1425 /// If true, we should use the MIR-based borrowck (we may *also* use
1426 /// the AST-based borrowck).
1427 pub fn use_mir_borrowck(self) -> bool {
1428 self.borrowck_mode().use_mir()
1431 /// If true, we should use the MIR-based borrow check, but also
1432 /// fall back on the AST borrow check if the MIR-based one errors.
1433 pub fn migrate_borrowck(self) -> bool {
1434 self.borrowck_mode().migrate()
1437 /// If true, make MIR codegen for `match` emit a temp that holds a
1438 /// borrow of the input to the match expression.
1439 pub fn generate_borrow_of_any_match_input(&self) -> bool {
1440 self.emit_read_for_match()
1443 /// If true, make MIR codegen for `match` emit ReadForMatch
1444 /// statements (which simulate the maximal effect of executing the
1445 /// patterns in a match arm).
1446 pub fn emit_read_for_match(&self) -> bool {
1447 self.use_mir_borrowck() && !self.sess.opts.debugging_opts.nll_dont_emit_read_for_match
1450 /// If true, pattern variables for use in guards on match arms
1451 /// will be bound as references to the data, and occurrences of
1452 /// those variables in the guard expression will implicitly
1453 /// dereference those bindings. (See rust-lang/rust#27282.)
1454 pub fn all_pat_vars_are_implicit_refs_within_guards(self) -> bool {
1455 self.borrowck_mode().use_mir()
1458 /// If true, we should enable two-phase borrows checks. This is
1459 /// done with either: `-Ztwo-phase-borrows`, `#![feature(nll)]`,
1460 /// or by opting into an edition after 2015.
1461 pub fn two_phase_borrows(self) -> bool {
1462 if self.features().nll || self.sess.opts.debugging_opts.two_phase_borrows {
1466 match self.sess.edition() {
1467 Edition::Edition2015 => false,
1468 Edition::Edition2018 => true,
1473 /// What mode(s) of borrowck should we run? AST? MIR? both?
1474 /// (Also considers the `#![feature(nll)]` setting.)
1475 pub fn borrowck_mode(&self) -> BorrowckMode {
1476 // Here are the main constraints we need to deal with:
1478 // 1. An opts.borrowck_mode of `BorrowckMode::Ast` is
1479 // synonymous with no `-Z borrowck=...` flag at all.
1480 // (This is arguably a historical accident.)
1482 // 2. `BorrowckMode::Migrate` is the limited migration to
1483 // NLL that we are deploying with the 2018 edition.
1485 // 3. We want to allow developers on the Nightly channel
1486 // to opt back into the "hard error" mode for NLL,
1487 // (which they can do via specifying `#![feature(nll)]`
1488 // explicitly in their crate).
1490 // So, this precedence list is how pnkfelix chose to work with
1491 // the above constraints:
1493 // * `#![feature(nll)]` *always* means use NLL with hard
1494 // errors. (To simplify the code here, it now even overrides
1495 // a user's attempt to specify `-Z borrowck=compare`, which
1496 // we arguably do not need anymore and should remove.)
1498 // * Otherwise, if no `-Z borrowck=...` flag was given (or
1499 // if `borrowck=ast` was specified), then use the default
1500 // as required by the edition.
1502 // * Otherwise, use the behavior requested via `-Z borrowck=...`
1504 if self.features().nll { return BorrowckMode::Mir; }
1506 match self.sess.opts.borrowck_mode {
1507 mode @ BorrowckMode::Mir |
1508 mode @ BorrowckMode::Compare |
1509 mode @ BorrowckMode::Migrate => mode,
1511 BorrowckMode::Ast => match self.sess.edition() {
1512 Edition::Edition2015 => BorrowckMode::Ast,
1513 Edition::Edition2018 => BorrowckMode::Migrate,
1515 // For now, future editions mean Migrate. (But it
1516 // would make a lot of sense for it to be changed to
1517 // `BorrowckMode::Mir`, depending on how we plan to
1518 // time the forcing of full migration to NLL.)
1519 _ => BorrowckMode::Migrate,
1524 /// Should we emit EndRegion MIR statements? These are consumed by
1525 /// MIR borrowck, but not when NLL is used. They are also consumed
1526 /// by the validation stuff.
1527 pub fn emit_end_regions(self) -> bool {
1528 self.sess.opts.debugging_opts.emit_end_regions ||
1529 self.sess.opts.debugging_opts.mir_emit_validate > 0 ||
1530 self.use_mir_borrowck()
1534 pub fn local_crate_exports_generics(self) -> bool {
1535 debug_assert!(self.sess.opts.share_generics());
1537 self.sess.crate_types.borrow().iter().any(|crate_type| {
1539 CrateType::Executable |
1540 CrateType::Staticlib |
1541 CrateType::ProcMacro |
1542 CrateType::Cdylib => false,
1544 CrateType::Dylib => true,
1550 impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
1551 pub fn encode_metadata(self)
1554 self.cstore.encode_metadata(self)
1558 impl<'gcx: 'tcx, 'tcx> GlobalCtxt<'gcx> {
1559 /// Call the closure with a local `TyCtxt` using the given arena.
1560 pub fn enter_local<F, R>(
1562 arena: &'tcx SyncDroplessArena,
1566 F: for<'a> FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R
1568 let interners = CtxtInterners::new(arena);
1571 interners: &interners,
1573 ty::tls::with_related_context(tcx.global_tcx(), |icx| {
1574 let new_icx = ty::tls::ImplicitCtxt {
1576 query: icx.query.clone(),
1577 layout_depth: icx.layout_depth,
1580 ty::tls::enter_context(&new_icx, |new_icx| {
1587 /// A trait implemented for all X<'a> types which can be safely and
1588 /// efficiently converted to X<'tcx> as long as they are part of the
1589 /// provided TyCtxt<'tcx>.
1590 /// This can be done, for example, for Ty<'tcx> or &'tcx Substs<'tcx>
1591 /// by looking them up in their respective interners.
1593 /// However, this is still not the best implementation as it does
1594 /// need to compare the components, even for interned values.
1595 /// It would be more efficient if TypedArena provided a way to
1596 /// determine whether the address is in the allocated range.
1598 /// None is returned if the value or one of the components is not part
1599 /// of the provided context.
1600 /// For Ty, None can be returned if either the type interner doesn't
1601 /// contain the TyKind key or if the address of the interned
1602 /// pointer differs. The latter case is possible if a primitive type,
1603 /// e.g. `()` or `u8`, was interned in a different context.
1604 pub trait Lift<'tcx>: fmt::Debug {
1605 type Lifted: fmt::Debug + 'tcx;
1606 fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option<Self::Lifted>;
1609 impl<'a, 'tcx> Lift<'tcx> for Ty<'a> {
1610 type Lifted = Ty<'tcx>;
1611 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Ty<'tcx>> {
1612 if tcx.interners.arena.in_arena(*self as *const _) {
1613 return Some(unsafe { mem::transmute(*self) });
1615 // Also try in the global tcx if we're not that.
1616 if !tcx.is_global() {
1617 self.lift_to_tcx(tcx.global_tcx())
1624 impl<'a, 'tcx> Lift<'tcx> for Region<'a> {
1625 type Lifted = Region<'tcx>;
1626 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Region<'tcx>> {
1627 if tcx.interners.arena.in_arena(*self as *const _) {
1628 return Some(unsafe { mem::transmute(*self) });
1630 // Also try in the global tcx if we're not that.
1631 if !tcx.is_global() {
1632 self.lift_to_tcx(tcx.global_tcx())
1639 impl<'a, 'tcx> Lift<'tcx> for &'a Goal<'a> {
1640 type Lifted = &'tcx Goal<'tcx>;
1641 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<&'tcx Goal<'tcx>> {
1642 if tcx.interners.arena.in_arena(*self as *const _) {
1643 return Some(unsafe { mem::transmute(*self) });
1645 // Also try in the global tcx if we're not that.
1646 if !tcx.is_global() {
1647 self.lift_to_tcx(tcx.global_tcx())
1654 impl<'a, 'tcx> Lift<'tcx> for &'a List<Goal<'a>> {
1655 type Lifted = &'tcx List<Goal<'tcx>>;
1656 fn lift_to_tcx<'b, 'gcx>(
1658 tcx: TyCtxt<'b, 'gcx, 'tcx>,
1659 ) -> Option<&'tcx List<Goal<'tcx>>> {
1660 if tcx.interners.arena.in_arena(*self as *const _) {
1661 return Some(unsafe { mem::transmute(*self) });
1663 // Also try in the global tcx if we're not that.
1664 if !tcx.is_global() {
1665 self.lift_to_tcx(tcx.global_tcx())
1672 impl<'a, 'tcx> Lift<'tcx> for &'a List<Clause<'a>> {
1673 type Lifted = &'tcx List<Clause<'tcx>>;
1674 fn lift_to_tcx<'b, 'gcx>(
1676 tcx: TyCtxt<'b, 'gcx, 'tcx>,
1677 ) -> Option<&'tcx List<Clause<'tcx>>> {
1678 if tcx.interners.arena.in_arena(*self as *const _) {
1679 return Some(unsafe { mem::transmute(*self) });
1681 // Also try in the global tcx if we're not that.
1682 if !tcx.is_global() {
1683 self.lift_to_tcx(tcx.global_tcx())
1690 impl<'a, 'tcx> Lift<'tcx> for &'a Const<'a> {
1691 type Lifted = &'tcx Const<'tcx>;
1692 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<&'tcx Const<'tcx>> {
1693 if tcx.interners.arena.in_arena(*self as *const _) {
1694 return Some(unsafe { mem::transmute(*self) });
1696 // Also try in the global tcx if we're not that.
1697 if !tcx.is_global() {
1698 self.lift_to_tcx(tcx.global_tcx())
1705 impl<'a, 'tcx> Lift<'tcx> for &'a Substs<'a> {
1706 type Lifted = &'tcx Substs<'tcx>;
1707 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<&'tcx Substs<'tcx>> {
1708 if self.len() == 0 {
1709 return Some(List::empty());
1711 if tcx.interners.arena.in_arena(&self[..] as *const _) {
1712 return Some(unsafe { mem::transmute(*self) });
1714 // Also try in the global tcx if we're not that.
1715 if !tcx.is_global() {
1716 self.lift_to_tcx(tcx.global_tcx())
1723 impl<'a, 'tcx> Lift<'tcx> for &'a List<Ty<'a>> {
1724 type Lifted = &'tcx List<Ty<'tcx>>;
1725 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>)
1726 -> Option<&'tcx List<Ty<'tcx>>> {
1727 if self.len() == 0 {
1728 return Some(List::empty());
1730 if tcx.interners.arena.in_arena(*self as *const _) {
1731 return Some(unsafe { mem::transmute(*self) });
1733 // Also try in the global tcx if we're not that.
1734 if !tcx.is_global() {
1735 self.lift_to_tcx(tcx.global_tcx())
1742 impl<'a, 'tcx> Lift<'tcx> for &'a List<ExistentialPredicate<'a>> {
1743 type Lifted = &'tcx List<ExistentialPredicate<'tcx>>;
1744 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>)
1745 -> Option<&'tcx List<ExistentialPredicate<'tcx>>> {
1746 if self.is_empty() {
1747 return Some(List::empty());
1749 if tcx.interners.arena.in_arena(*self as *const _) {
1750 return Some(unsafe { mem::transmute(*self) });
1752 // Also try in the global tcx if we're not that.
1753 if !tcx.is_global() {
1754 self.lift_to_tcx(tcx.global_tcx())
1761 impl<'a, 'tcx> Lift<'tcx> for &'a List<Predicate<'a>> {
1762 type Lifted = &'tcx List<Predicate<'tcx>>;
1763 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>)
1764 -> Option<&'tcx List<Predicate<'tcx>>> {
1765 if self.is_empty() {
1766 return Some(List::empty());
1768 if tcx.interners.arena.in_arena(*self as *const _) {
1769 return Some(unsafe { mem::transmute(*self) });
1771 // Also try in the global tcx if we're not that.
1772 if !tcx.is_global() {
1773 self.lift_to_tcx(tcx.global_tcx())
1780 impl<'a, 'tcx> Lift<'tcx> for &'a List<CanonicalVarInfo> {
1781 type Lifted = &'tcx List<CanonicalVarInfo>;
1782 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
1783 if self.len() == 0 {
1784 return Some(List::empty());
1786 if tcx.interners.arena.in_arena(*self as *const _) {
1787 return Some(unsafe { mem::transmute(*self) });
1789 // Also try in the global tcx if we're not that.
1790 if !tcx.is_global() {
1791 self.lift_to_tcx(tcx.global_tcx())
1799 use super::{GlobalCtxt, TyCtxt};
1805 use errors::{Diagnostic, TRACK_DIAGNOSTICS};
1806 use rustc_data_structures::OnDrop;
1807 use rustc_data_structures::sync::{self, Lrc, Lock};
1808 use dep_graph::OpenTask;
1810 #[cfg(not(parallel_queries))]
1811 use std::cell::Cell;
1813 #[cfg(parallel_queries)]
1816 /// This is the implicit state of rustc. It contains the current
1817 /// TyCtxt and query. It is updated when creating a local interner or
1818 /// executing a new query. Whenever there's a TyCtxt value available
1819 /// you should also have access to an ImplicitCtxt through the functions
1822 pub struct ImplicitCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
1823 /// The current TyCtxt. Initially created by `enter_global` and updated
1824 /// by `enter_local` with a new local interner
1825 pub tcx: TyCtxt<'a, 'gcx, 'tcx>,
1827 /// The current query job, if any. This is updated by start_job in
1828 /// ty::query::plumbing when executing a query
1829 pub query: Option<Lrc<query::QueryJob<'gcx>>>,
1831 /// Used to prevent layout from recursing too deeply.
1832 pub layout_depth: usize,
1834 /// The current dep graph task. This is used to add dependencies to queries
1835 /// when executing them
1836 pub task: &'a OpenTask,
1839 /// Sets Rayon's thread local variable which is preserved for Rayon jobs
1840 /// to `value` during the call to `f`. It is restored to its previous value after.
1841 /// This is used to set the pointer to the new ImplicitCtxt.
1842 #[cfg(parallel_queries)]
1843 fn set_tlv<F: FnOnce() -> R, R>(value: usize, f: F) -> R {
1844 rayon_core::tlv::with(value, f)
1847 /// Gets Rayon's thread local variable which is preserved for Rayon jobs.
1848 /// This is used to get the pointer to the current ImplicitCtxt.
1849 #[cfg(parallel_queries)]
1850 fn get_tlv() -> usize {
1851 rayon_core::tlv::get()
1854 /// A thread local variable which stores a pointer to the current ImplicitCtxt
1855 #[cfg(not(parallel_queries))]
1856 thread_local!(static TLV: Cell<usize> = Cell::new(0));
1858 /// Sets TLV to `value` during the call to `f`.
1859 /// It is restored to its previous value after.
1860 /// This is used to set the pointer to the new ImplicitCtxt.
1861 #[cfg(not(parallel_queries))]
1862 fn set_tlv<F: FnOnce() -> R, R>(value: usize, f: F) -> R {
1863 let old = get_tlv();
1864 let _reset = OnDrop(move || TLV.with(|tlv| tlv.set(old)));
1865 TLV.with(|tlv| tlv.set(value));
1869 /// This is used to get the pointer to the current ImplicitCtxt.
1870 #[cfg(not(parallel_queries))]
1871 fn get_tlv() -> usize {
1872 TLV.with(|tlv| tlv.get())
1875 /// This is a callback from libsyntax as it cannot access the implicit state
1876 /// in librustc otherwise
1877 fn span_debug(span: syntax_pos::Span, f: &mut fmt::Formatter) -> fmt::Result {
1879 write!(f, "{}", tcx.sess.source_map().span_to_string(span))
1883 /// This is a callback from libsyntax as it cannot access the implicit state
1884 /// in librustc otherwise. It is used to when diagnostic messages are
1885 /// emitted and stores them in the current query, if there is one.
1886 fn track_diagnostic(diagnostic: &Diagnostic) {
1887 with_context_opt(|icx| {
1888 if let Some(icx) = icx {
1889 if let Some(ref query) = icx.query {
1890 query.diagnostics.lock().push(diagnostic.clone());
1896 /// Sets up the callbacks from libsyntax on the current thread
1897 pub fn with_thread_locals<F, R>(f: F) -> R
1898 where F: FnOnce() -> R
1900 syntax_pos::SPAN_DEBUG.with(|span_dbg| {
1901 let original_span_debug = span_dbg.get();
1902 span_dbg.set(span_debug);
1904 let _on_drop = OnDrop(move || {
1905 span_dbg.set(original_span_debug);
1908 TRACK_DIAGNOSTICS.with(|current| {
1909 let original = current.get();
1910 current.set(track_diagnostic);
1912 let _on_drop = OnDrop(move || {
1913 current.set(original);
1921 /// Sets `context` as the new current ImplicitCtxt for the duration of the function `f`
1922 pub fn enter_context<'a, 'gcx: 'tcx, 'tcx, F, R>(context: &ImplicitCtxt<'a, 'gcx, 'tcx>,
1924 where F: FnOnce(&ImplicitCtxt<'a, 'gcx, 'tcx>) -> R
1926 set_tlv(context as *const _ as usize, || {
1931 /// Enters GlobalCtxt by setting up libsyntax callbacks and
1932 /// creating a initial TyCtxt and ImplicitCtxt.
1933 /// This happens once per rustc session and TyCtxts only exists
1934 /// inside the `f` function.
1935 pub fn enter_global<'gcx, F, R>(gcx: &GlobalCtxt<'gcx>, f: F) -> R
1936 where F: for<'a> FnOnce(TyCtxt<'a, 'gcx, 'gcx>) -> R
1938 with_thread_locals(|| {
1939 // Update GCX_PTR to indicate there's a GlobalCtxt available
1940 GCX_PTR.with(|lock| {
1941 *lock.lock() = gcx as *const _ as usize;
1943 // Set GCX_PTR back to 0 when we exit
1944 let _on_drop = OnDrop(move || {
1945 GCX_PTR.with(|lock| *lock.lock() = 0);
1950 interners: &gcx.global_interners,
1952 let icx = ImplicitCtxt {
1956 task: &OpenTask::Ignore,
1958 enter_context(&icx, |_| {
1964 /// Stores a pointer to the GlobalCtxt if one is available.
1965 /// This is used to access the GlobalCtxt in the deadlock handler
1967 scoped_thread_local!(pub static GCX_PTR: Lock<usize>);
1969 /// Creates a TyCtxt and ImplicitCtxt based on the GCX_PTR thread local.
1970 /// This is used in the deadlock handler.
1971 pub unsafe fn with_global<F, R>(f: F) -> R
1972 where F: for<'a, 'gcx, 'tcx> FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R
1974 let gcx = GCX_PTR.with(|lock| *lock.lock());
1976 let gcx = &*(gcx as *const GlobalCtxt<'_>);
1979 interners: &gcx.global_interners,
1981 let icx = ImplicitCtxt {
1985 task: &OpenTask::Ignore,
1987 enter_context(&icx, |_| f(tcx))
1990 /// Allows access to the current ImplicitCtxt in a closure if one is available
1991 pub fn with_context_opt<F, R>(f: F) -> R
1992 where F: for<'a, 'gcx, 'tcx> FnOnce(Option<&ImplicitCtxt<'a, 'gcx, 'tcx>>) -> R
1994 let context = get_tlv();
1998 // We could get a ImplicitCtxt pointer from another thread.
1999 // Ensure that ImplicitCtxt is Sync
2000 sync::assert_sync::<ImplicitCtxt>();
2002 unsafe { f(Some(&*(context as *const ImplicitCtxt))) }
2006 /// Allows access to the current ImplicitCtxt.
2007 /// Panics if there is no ImplicitCtxt available
2008 pub fn with_context<F, R>(f: F) -> R
2009 where F: for<'a, 'gcx, 'tcx> FnOnce(&ImplicitCtxt<'a, 'gcx, 'tcx>) -> R
2011 with_context_opt(|opt_context| f(opt_context.expect("no ImplicitCtxt stored in tls")))
2014 /// Allows access to the current ImplicitCtxt whose tcx field has the same global
2015 /// interner as the tcx argument passed in. This means the closure is given an ImplicitCtxt
2016 /// with the same 'gcx lifetime as the TyCtxt passed in.
2017 /// This will panic if you pass it a TyCtxt which has a different global interner from
2018 /// the current ImplicitCtxt's tcx field.
2019 pub fn with_related_context<'a, 'gcx, 'tcx1, F, R>(tcx: TyCtxt<'a, 'gcx, 'tcx1>, f: F) -> R
2020 where F: for<'b, 'tcx2> FnOnce(&ImplicitCtxt<'b, 'gcx, 'tcx2>) -> R
2022 with_context(|context| {
2024 let gcx = tcx.gcx as *const _ as usize;
2025 assert!(context.tcx.gcx as *const _ as usize == gcx);
2026 let context: &ImplicitCtxt = mem::transmute(context);
2032 /// Allows access to the current ImplicitCtxt whose tcx field has the same global
2033 /// interner and local interner as the tcx argument passed in. This means the closure
2034 /// is given an ImplicitCtxt with the same 'tcx and 'gcx lifetimes as the TyCtxt passed in.
2035 /// This will panic if you pass it a TyCtxt which has a different global interner or
2036 /// a different local interner from the current ImplicitCtxt's tcx field.
2037 pub fn with_fully_related_context<'a, 'gcx, 'tcx, F, R>(tcx: TyCtxt<'a, 'gcx, 'tcx>, f: F) -> R
2038 where F: for<'b> FnOnce(&ImplicitCtxt<'b, 'gcx, 'tcx>) -> R
2040 with_context(|context| {
2042 let gcx = tcx.gcx as *const _ as usize;
2043 let interners = tcx.interners as *const _ as usize;
2044 assert!(context.tcx.gcx as *const _ as usize == gcx);
2045 assert!(context.tcx.interners as *const _ as usize == interners);
2046 let context: &ImplicitCtxt = mem::transmute(context);
2052 /// Allows access to the TyCtxt in the current ImplicitCtxt.
2053 /// Panics if there is no ImplicitCtxt available
2054 pub fn with<F, R>(f: F) -> R
2055 where F: for<'a, 'gcx, 'tcx> FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R
2057 with_context(|context| f(context.tcx))
2060 /// Allows access to the TyCtxt in the current ImplicitCtxt.
2061 /// The closure is passed None if there is no ImplicitCtxt available
2062 pub fn with_opt<F, R>(f: F) -> R
2063 where F: for<'a, 'gcx, 'tcx> FnOnce(Option<TyCtxt<'a, 'gcx, 'tcx>>) -> R
2065 with_context_opt(|opt_context| f(opt_context.map(|context| context.tcx)))
2069 macro_rules! sty_debug_print {
2070 ($ctxt: expr, $($variant: ident),*) => {{
2071 // curious inner module to allow variant names to be used as
2073 #[allow(non_snake_case)]
2075 use ty::{self, TyCtxt};
2076 use ty::context::Interned;
2078 #[derive(Copy, Clone)]
2081 region_infer: usize,
2086 pub fn go(tcx: TyCtxt) {
2087 let mut total = DebugStat {
2089 region_infer: 0, ty_infer: 0, both_infer: 0,
2091 $(let mut $variant = total;)*
2094 for &Interned(t) in tcx.interners.type_.borrow().iter() {
2095 let variant = match t.sty {
2096 ty::Bool | ty::Char | ty::Int(..) | ty::Uint(..) |
2097 ty::Float(..) | ty::Str | ty::Never => continue,
2098 ty::Error => /* unimportant */ continue,
2099 $(ty::$variant(..) => &mut $variant,)*
2101 let region = t.flags.intersects(ty::TypeFlags::HAS_RE_INFER);
2102 let ty = t.flags.intersects(ty::TypeFlags::HAS_TY_INFER);
2106 if region { total.region_infer += 1; variant.region_infer += 1 }
2107 if ty { total.ty_infer += 1; variant.ty_infer += 1 }
2108 if region && ty { total.both_infer += 1; variant.both_infer += 1 }
2110 println!("Ty interner total ty region both");
2111 $(println!(" {:18}: {uses:6} {usespc:4.1}%, \
2112 {ty:4.1}% {region:5.1}% {both:4.1}%",
2113 stringify!($variant),
2114 uses = $variant.total,
2115 usespc = $variant.total as f64 * 100.0 / total.total as f64,
2116 ty = $variant.ty_infer as f64 * 100.0 / total.total as f64,
2117 region = $variant.region_infer as f64 * 100.0 / total.total as f64,
2118 both = $variant.both_infer as f64 * 100.0 / total.total as f64);
2120 println!(" total {uses:6} \
2121 {ty:4.1}% {region:5.1}% {both:4.1}%",
2123 ty = total.ty_infer as f64 * 100.0 / total.total as f64,
2124 region = total.region_infer as f64 * 100.0 / total.total as f64,
2125 both = total.both_infer as f64 * 100.0 / total.total as f64)
2133 impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
2134 pub fn print_debug_stats(self) {
2137 Adt, Array, Slice, RawPtr, Ref, FnDef, FnPtr,
2138 Generator, GeneratorWitness, Dynamic, Closure, Tuple,
2139 Param, Infer, Projection, Anon, Foreign);
2141 println!("Substs interner: #{}", self.interners.substs.borrow().len());
2142 println!("Region interner: #{}", self.interners.region.borrow().len());
2143 println!("Stability interner: #{}", self.stability_interner.borrow().len());
2144 println!("Allocation interner: #{}", self.allocation_interner.borrow().len());
2145 println!("Layout interner: #{}", self.layout_interner.borrow().len());
2150 /// An entry in an interner.
2151 struct Interned<'tcx, T: 'tcx+?Sized>(&'tcx T);
2153 // NB: An Interned<Ty> compares and hashes as a sty.
2154 impl<'tcx> PartialEq for Interned<'tcx, TyS<'tcx>> {
2155 fn eq(&self, other: &Interned<'tcx, TyS<'tcx>>) -> bool {
2156 self.0.sty == other.0.sty
2160 impl<'tcx> Eq for Interned<'tcx, TyS<'tcx>> {}
2162 impl<'tcx> Hash for Interned<'tcx, TyS<'tcx>> {
2163 fn hash<H: Hasher>(&self, s: &mut H) {
2168 impl<'tcx: 'lcx, 'lcx> Borrow<TyKind<'lcx>> for Interned<'tcx, TyS<'tcx>> {
2169 fn borrow<'a>(&'a self) -> &'a TyKind<'lcx> {
2174 // NB: An Interned<List<T>> compares and hashes as its elements.
2175 impl<'tcx, T: PartialEq> PartialEq for Interned<'tcx, List<T>> {
2176 fn eq(&self, other: &Interned<'tcx, List<T>>) -> bool {
2177 self.0[..] == other.0[..]
2181 impl<'tcx, T: Eq> Eq for Interned<'tcx, List<T>> {}
2183 impl<'tcx, T: Hash> Hash for Interned<'tcx, List<T>> {
2184 fn hash<H: Hasher>(&self, s: &mut H) {
2189 impl<'tcx: 'lcx, 'lcx> Borrow<[Ty<'lcx>]> for Interned<'tcx, List<Ty<'tcx>>> {
2190 fn borrow<'a>(&'a self) -> &'a [Ty<'lcx>] {
2195 impl<'tcx: 'lcx, 'lcx> Borrow<[CanonicalVarInfo]> for Interned<'tcx, List<CanonicalVarInfo>> {
2196 fn borrow<'a>(&'a self) -> &'a [CanonicalVarInfo] {
2201 impl<'tcx: 'lcx, 'lcx> Borrow<[Kind<'lcx>]> for Interned<'tcx, Substs<'tcx>> {
2202 fn borrow<'a>(&'a self) -> &'a [Kind<'lcx>] {
2207 impl<'tcx> Borrow<RegionKind> for Interned<'tcx, RegionKind> {
2208 fn borrow<'a>(&'a self) -> &'a RegionKind {
2213 impl<'tcx: 'lcx, 'lcx> Borrow<[ExistentialPredicate<'lcx>]>
2214 for Interned<'tcx, List<ExistentialPredicate<'tcx>>> {
2215 fn borrow<'a>(&'a self) -> &'a [ExistentialPredicate<'lcx>] {
2220 impl<'tcx: 'lcx, 'lcx> Borrow<[Predicate<'lcx>]>
2221 for Interned<'tcx, List<Predicate<'tcx>>> {
2222 fn borrow<'a>(&'a self) -> &'a [Predicate<'lcx>] {
2227 impl<'tcx: 'lcx, 'lcx> Borrow<Const<'lcx>> for Interned<'tcx, Const<'tcx>> {
2228 fn borrow<'a>(&'a self) -> &'a Const<'lcx> {
2233 impl<'tcx: 'lcx, 'lcx> Borrow<[Clause<'lcx>]>
2234 for Interned<'tcx, List<Clause<'tcx>>> {
2235 fn borrow<'a>(&'a self) -> &'a [Clause<'lcx>] {
2240 impl<'tcx: 'lcx, 'lcx> Borrow<[Goal<'lcx>]>
2241 for Interned<'tcx, List<Goal<'tcx>>> {
2242 fn borrow<'a>(&'a self) -> &'a [Goal<'lcx>] {
2247 macro_rules! intern_method {
2248 ($lt_tcx:tt, $name:ident: $method:ident($alloc:ty,
2251 $keep_in_local_tcx:expr) -> $ty:ty) => {
2252 impl<'a, 'gcx, $lt_tcx> TyCtxt<'a, 'gcx, $lt_tcx> {
2253 pub fn $method(self, v: $alloc) -> &$lt_tcx $ty {
2254 let key = ($alloc_to_key)(&v);
2256 // HACK(eddyb) Depend on flags being accurate to
2257 // determine that all contents are in the global tcx.
2258 // See comments on Lift for why we can't use that.
2259 if ($keep_in_local_tcx)(&v) {
2260 let mut interner = self.interners.$name.borrow_mut();
2261 if let Some(&Interned(v)) = interner.get(key) {
2265 // Make sure we don't end up with inference
2266 // types/regions in the global tcx.
2267 if self.is_global() {
2268 bug!("Attempted to intern `{:?}` which contains \
2269 inference types/regions in the global type context",
2273 let i = $alloc_method(&self.interners.arena, v);
2274 interner.insert(Interned(i));
2277 let mut interner = self.global_interners.$name.borrow_mut();
2278 if let Some(&Interned(v)) = interner.get(key) {
2282 // This transmutes $alloc<'tcx> to $alloc<'gcx>
2286 let i: &$lt_tcx $ty = $alloc_method(&self.global_interners.arena, v);
2288 let i = unsafe { mem::transmute(i) };
2289 interner.insert(Interned(i));
2297 macro_rules! direct_interners {
2298 ($lt_tcx:tt, $($name:ident: $method:ident($keep_in_local_tcx:expr) -> $ty:ty),+) => {
2299 $(impl<$lt_tcx> PartialEq for Interned<$lt_tcx, $ty> {
2300 fn eq(&self, other: &Self) -> bool {
2305 impl<$lt_tcx> Eq for Interned<$lt_tcx, $ty> {}
2307 impl<$lt_tcx> Hash for Interned<$lt_tcx, $ty> {
2308 fn hash<H: Hasher>(&self, s: &mut H) {
2316 |a: &$lt_tcx SyncDroplessArena, v| -> &$lt_tcx $ty { a.alloc(v) },
2318 $keep_in_local_tcx) -> $ty);)+
2322 pub fn keep_local<'tcx, T: ty::TypeFoldable<'tcx>>(x: &T) -> bool {
2323 x.has_type_flags(ty::TypeFlags::KEEP_IN_LOCAL_TCX)
2326 direct_interners!('tcx,
2327 region: mk_region(|r: &RegionKind| r.keep_in_local_tcx()) -> RegionKind,
2328 const_: mk_const(|c: &Const| keep_local(&c.ty) || keep_local(&c.val)) -> Const<'tcx>
2331 macro_rules! slice_interners {
2332 ($($field:ident: $method:ident($ty:ident)),+) => (
2333 $(intern_method!( 'tcx, $field: $method(
2335 |a, v| List::from_arena(a, v),
2337 |xs: &[$ty]| xs.iter().any(keep_local)) -> List<$ty<'tcx>>);)+
2342 existential_predicates: _intern_existential_predicates(ExistentialPredicate),
2343 predicates: _intern_predicates(Predicate),
2344 type_list: _intern_type_list(Ty),
2345 substs: _intern_substs(Kind),
2346 clauses: _intern_clauses(Clause),
2347 goals: _intern_goals(Goal)
2350 // This isn't a perfect fit: CanonicalVarInfo slices are always
2351 // allocated in the global arena, so this `intern_method!` macro is
2352 // overly general. But we just return false for the code that checks
2353 // whether they belong in the thread-local arena, so no harm done, and
2354 // seems better than open-coding the rest.
2357 canonical_var_infos: _intern_canonical_var_infos(
2358 &[CanonicalVarInfo],
2359 |a, v| List::from_arena(a, v),
2361 |_xs: &[CanonicalVarInfo]| -> bool { false }
2362 ) -> List<CanonicalVarInfo>
2365 impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
2366 /// Given a `fn` type, returns an equivalent `unsafe fn` type;
2367 /// that is, a `fn` type that is equivalent in every way for being
2369 pub fn safe_to_unsafe_fn_ty(self, sig: PolyFnSig<'tcx>) -> Ty<'tcx> {
2370 assert_eq!(sig.unsafety(), hir::Unsafety::Normal);
2371 self.mk_fn_ptr(sig.map_bound(|sig| ty::FnSig {
2372 unsafety: hir::Unsafety::Unsafe,
2377 /// Given a closure signature `sig`, returns an equivalent `fn`
2378 /// type with the same signature. Detuples and so forth -- so
2379 /// e.g. if we have a sig with `Fn<(u32, i32)>` then you would get
2380 /// a `fn(u32, i32)`.
2381 pub fn coerce_closure_fn_ty(self, sig: PolyFnSig<'tcx>) -> Ty<'tcx> {
2382 let converted_sig = sig.map_bound(|s| {
2383 let params_iter = match s.inputs()[0].sty {
2384 ty::Tuple(params) => {
2385 params.into_iter().cloned()
2393 hir::Unsafety::Normal,
2398 self.mk_fn_ptr(converted_sig)
2401 pub fn mk_ty(&self, st: TyKind<'tcx>) -> Ty<'tcx> {
2402 CtxtInterners::intern_ty(&self.interners, &self.global_interners, st)
2405 pub fn mk_mach_int(self, tm: ast::IntTy) -> Ty<'tcx> {
2407 ast::IntTy::Isize => self.types.isize,
2408 ast::IntTy::I8 => self.types.i8,
2409 ast::IntTy::I16 => self.types.i16,
2410 ast::IntTy::I32 => self.types.i32,
2411 ast::IntTy::I64 => self.types.i64,
2412 ast::IntTy::I128 => self.types.i128,
2416 pub fn mk_mach_uint(self, tm: ast::UintTy) -> Ty<'tcx> {
2418 ast::UintTy::Usize => self.types.usize,
2419 ast::UintTy::U8 => self.types.u8,
2420 ast::UintTy::U16 => self.types.u16,
2421 ast::UintTy::U32 => self.types.u32,
2422 ast::UintTy::U64 => self.types.u64,
2423 ast::UintTy::U128 => self.types.u128,
2427 pub fn mk_mach_float(self, tm: ast::FloatTy) -> Ty<'tcx> {
2429 ast::FloatTy::F32 => self.types.f32,
2430 ast::FloatTy::F64 => self.types.f64,
2434 pub fn mk_str(self) -> Ty<'tcx> {
2438 pub fn mk_static_str(self) -> Ty<'tcx> {
2439 self.mk_imm_ref(self.types.re_static, self.mk_str())
2442 pub fn mk_adt(self, def: &'tcx AdtDef, substs: &'tcx Substs<'tcx>) -> Ty<'tcx> {
2443 // take a copy of substs so that we own the vectors inside
2444 self.mk_ty(Adt(def, substs))
2447 pub fn mk_foreign(self, def_id: DefId) -> Ty<'tcx> {
2448 self.mk_ty(Foreign(def_id))
2451 pub fn mk_box(self, ty: Ty<'tcx>) -> Ty<'tcx> {
2452 let def_id = self.require_lang_item(lang_items::OwnedBoxLangItem);
2453 let adt_def = self.adt_def(def_id);
2454 let substs = Substs::for_item(self, def_id, |param, substs| {
2456 GenericParamDefKind::Lifetime => bug!(),
2457 GenericParamDefKind::Type { has_default, .. } => {
2458 if param.index == 0 {
2461 assert!(has_default);
2462 self.type_of(param.def_id).subst(self, substs).into()
2467 self.mk_ty(Adt(adt_def, substs))
2470 pub fn mk_ptr(self, tm: TypeAndMut<'tcx>) -> Ty<'tcx> {
2471 self.mk_ty(RawPtr(tm))
2474 pub fn mk_ref(self, r: Region<'tcx>, tm: TypeAndMut<'tcx>) -> Ty<'tcx> {
2475 self.mk_ty(Ref(r, tm.ty, tm.mutbl))
2478 pub fn mk_mut_ref(self, r: Region<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> {
2479 self.mk_ref(r, TypeAndMut {ty: ty, mutbl: hir::MutMutable})
2482 pub fn mk_imm_ref(self, r: Region<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> {
2483 self.mk_ref(r, TypeAndMut {ty: ty, mutbl: hir::MutImmutable})
2486 pub fn mk_mut_ptr(self, ty: Ty<'tcx>) -> Ty<'tcx> {
2487 self.mk_ptr(TypeAndMut {ty: ty, mutbl: hir::MutMutable})
2490 pub fn mk_imm_ptr(self, ty: Ty<'tcx>) -> Ty<'tcx> {
2491 self.mk_ptr(TypeAndMut {ty: ty, mutbl: hir::MutImmutable})
2494 pub fn mk_nil_ptr(self) -> Ty<'tcx> {
2495 self.mk_imm_ptr(self.mk_nil())
2498 pub fn mk_array(self, ty: Ty<'tcx>, n: u64) -> Ty<'tcx> {
2499 self.mk_ty(Array(ty, ty::Const::from_usize(self, n)))
2502 pub fn mk_slice(self, ty: Ty<'tcx>) -> Ty<'tcx> {
2503 self.mk_ty(Slice(ty))
2506 pub fn intern_tup(self, ts: &[Ty<'tcx>]) -> Ty<'tcx> {
2507 self.mk_ty(Tuple(self.intern_type_list(ts)))
2510 pub fn mk_tup<I: InternAs<[Ty<'tcx>], Ty<'tcx>>>(self, iter: I) -> I::Output {
2511 iter.intern_with(|ts| self.mk_ty(Tuple(self.intern_type_list(ts))))
2514 pub fn mk_nil(self) -> Ty<'tcx> {
2515 self.intern_tup(&[])
2518 pub fn mk_diverging_default(self) -> Ty<'tcx> {
2519 if self.features().never_type {
2522 self.intern_tup(&[])
2526 pub fn mk_bool(self) -> Ty<'tcx> {
2530 pub fn mk_fn_def(self, def_id: DefId,
2531 substs: &'tcx Substs<'tcx>) -> Ty<'tcx> {
2532 self.mk_ty(FnDef(def_id, substs))
2535 pub fn mk_fn_ptr(self, fty: PolyFnSig<'tcx>) -> Ty<'tcx> {
2536 self.mk_ty(FnPtr(fty))
2541 obj: ty::Binder<&'tcx List<ExistentialPredicate<'tcx>>>,
2542 reg: ty::Region<'tcx>
2544 self.mk_ty(Dynamic(obj, reg))
2547 pub fn mk_projection(self,
2549 substs: &'tcx Substs<'tcx>)
2551 self.mk_ty(Projection(ProjectionTy {
2557 pub fn mk_closure(self, closure_id: DefId, closure_substs: ClosureSubsts<'tcx>)
2559 self.mk_ty(Closure(closure_id, closure_substs))
2562 pub fn mk_generator(self,
2564 generator_substs: GeneratorSubsts<'tcx>,
2565 movability: hir::GeneratorMovability)
2567 self.mk_ty(Generator(id, generator_substs, movability))
2570 pub fn mk_generator_witness(self, types: ty::Binder<&'tcx List<Ty<'tcx>>>) -> Ty<'tcx> {
2571 self.mk_ty(GeneratorWitness(types))
2574 pub fn mk_var(self, v: TyVid) -> Ty<'tcx> {
2575 self.mk_infer(TyVar(v))
2578 pub fn mk_int_var(self, v: IntVid) -> Ty<'tcx> {
2579 self.mk_infer(IntVar(v))
2582 pub fn mk_float_var(self, v: FloatVid) -> Ty<'tcx> {
2583 self.mk_infer(FloatVar(v))
2586 pub fn mk_infer(self, it: InferTy) -> Ty<'tcx> {
2587 self.mk_ty(Infer(it))
2590 pub fn mk_ty_param(self,
2592 name: InternedString) -> Ty<'tcx> {
2593 self.mk_ty(Param(ParamTy { idx: index, name: name }))
2596 pub fn mk_self_type(self) -> Ty<'tcx> {
2597 self.mk_ty_param(0, keywords::SelfType.name().as_interned_str())
2600 pub fn mk_param_from_def(self, param: &ty::GenericParamDef) -> Kind<'tcx> {
2602 GenericParamDefKind::Lifetime => {
2603 self.mk_region(ty::ReEarlyBound(param.to_early_bound_region_data())).into()
2605 GenericParamDefKind::Type {..} => self.mk_ty_param(param.index, param.name).into(),
2609 pub fn mk_anon(self, def_id: DefId, substs: &'tcx Substs<'tcx>) -> Ty<'tcx> {
2610 self.mk_ty(Anon(def_id, substs))
2613 pub fn intern_existential_predicates(self, eps: &[ExistentialPredicate<'tcx>])
2614 -> &'tcx List<ExistentialPredicate<'tcx>> {
2615 assert!(!eps.is_empty());
2616 assert!(eps.windows(2).all(|w| w[0].stable_cmp(self, &w[1]) != Ordering::Greater));
2617 self._intern_existential_predicates(eps)
2620 pub fn intern_predicates(self, preds: &[Predicate<'tcx>])
2621 -> &'tcx List<Predicate<'tcx>> {
2622 // FIXME consider asking the input slice to be sorted to avoid
2623 // re-interning permutations, in which case that would be asserted
2625 if preds.len() == 0 {
2626 // The macro-generated method below asserts we don't intern an empty slice.
2629 self._intern_predicates(preds)
2633 pub fn intern_type_list(self, ts: &[Ty<'tcx>]) -> &'tcx List<Ty<'tcx>> {
2637 self._intern_type_list(ts)
2641 pub fn intern_substs(self, ts: &[Kind<'tcx>]) -> &'tcx List<Kind<'tcx>> {
2645 self._intern_substs(ts)
2649 pub fn intern_canonical_var_infos(self, ts: &[CanonicalVarInfo]) -> CanonicalVarInfos<'gcx> {
2653 self.global_tcx()._intern_canonical_var_infos(ts)
2657 pub fn intern_clauses(self, ts: &[Clause<'tcx>]) -> Clauses<'tcx> {
2661 self._intern_clauses(ts)
2665 pub fn intern_goals(self, ts: &[Goal<'tcx>]) -> Goals<'tcx> {
2669 self._intern_goals(ts)
2673 pub fn mk_fn_sig<I>(self,
2677 unsafety: hir::Unsafety,
2679 -> <I::Item as InternIteratorElement<Ty<'tcx>, ty::FnSig<'tcx>>>::Output
2681 I::Item: InternIteratorElement<Ty<'tcx>, ty::FnSig<'tcx>>
2683 inputs.chain(iter::once(output)).intern_with(|xs| ty::FnSig {
2684 inputs_and_output: self.intern_type_list(xs),
2685 variadic, unsafety, abi
2689 pub fn mk_existential_predicates<I: InternAs<[ExistentialPredicate<'tcx>],
2690 &'tcx List<ExistentialPredicate<'tcx>>>>(self, iter: I)
2692 iter.intern_with(|xs| self.intern_existential_predicates(xs))
2695 pub fn mk_predicates<I: InternAs<[Predicate<'tcx>],
2696 &'tcx List<Predicate<'tcx>>>>(self, iter: I)
2698 iter.intern_with(|xs| self.intern_predicates(xs))
2701 pub fn mk_type_list<I: InternAs<[Ty<'tcx>],
2702 &'tcx List<Ty<'tcx>>>>(self, iter: I) -> I::Output {
2703 iter.intern_with(|xs| self.intern_type_list(xs))
2706 pub fn mk_substs<I: InternAs<[Kind<'tcx>],
2707 &'tcx List<Kind<'tcx>>>>(self, iter: I) -> I::Output {
2708 iter.intern_with(|xs| self.intern_substs(xs))
2711 pub fn mk_substs_trait(self,
2713 rest: &[Kind<'tcx>])
2714 -> &'tcx Substs<'tcx>
2716 self.mk_substs(iter::once(self_ty.into()).chain(rest.iter().cloned()))
2719 pub fn mk_clauses<I: InternAs<[Clause<'tcx>], Clauses<'tcx>>>(self, iter: I) -> I::Output {
2720 iter.intern_with(|xs| self.intern_clauses(xs))
2723 pub fn mk_goals<I: InternAs<[Goal<'tcx>], Goals<'tcx>>>(self, iter: I) -> I::Output {
2724 iter.intern_with(|xs| self.intern_goals(xs))
2727 pub fn mk_goal(self, goal: Goal<'tcx>) -> &'tcx Goal {
2728 &self.intern_goals(&[goal])[0]
2731 pub fn lint_hir<S: Into<MultiSpan>>(self,
2732 lint: &'static Lint,
2736 self.struct_span_lint_hir(lint, hir_id, span.into(), msg).emit()
2739 pub fn lint_node<S: Into<MultiSpan>>(self,
2740 lint: &'static Lint,
2744 self.struct_span_lint_node(lint, id, span.into(), msg).emit()
2747 pub fn lint_hir_note<S: Into<MultiSpan>>(self,
2748 lint: &'static Lint,
2753 let mut err = self.struct_span_lint_hir(lint, hir_id, span.into(), msg);
2758 pub fn lint_node_note<S: Into<MultiSpan>>(self,
2759 lint: &'static Lint,
2764 let mut err = self.struct_span_lint_node(lint, id, span.into(), msg);
2769 pub fn lint_level_at_node(self, lint: &'static Lint, mut id: NodeId)
2770 -> (lint::Level, lint::LintSource)
2772 // Right now we insert a `with_ignore` node in the dep graph here to
2773 // ignore the fact that `lint_levels` below depends on the entire crate.
2774 // For now this'll prevent false positives of recompiling too much when
2775 // anything changes.
2777 // Once red/green incremental compilation lands we should be able to
2778 // remove this because while the crate changes often the lint level map
2779 // will change rarely.
2780 self.dep_graph.with_ignore(|| {
2781 let sets = self.lint_levels(LOCAL_CRATE);
2783 let hir_id = self.hir.definitions().node_to_hir_id(id);
2784 if let Some(pair) = sets.level_and_source(lint, hir_id, self.sess) {
2787 let next = self.hir.get_parent_node(id);
2789 bug!("lint traversal reached the root of the crate");
2796 pub fn struct_span_lint_hir<S: Into<MultiSpan>>(self,
2797 lint: &'static Lint,
2801 -> DiagnosticBuilder<'tcx>
2803 let node_id = self.hir.hir_to_node_id(hir_id);
2804 let (level, src) = self.lint_level_at_node(lint, node_id);
2805 lint::struct_lint_level(self.sess, lint, level, src, Some(span.into()), msg)
2808 pub fn struct_span_lint_node<S: Into<MultiSpan>>(self,
2809 lint: &'static Lint,
2813 -> DiagnosticBuilder<'tcx>
2815 let (level, src) = self.lint_level_at_node(lint, id);
2816 lint::struct_lint_level(self.sess, lint, level, src, Some(span.into()), msg)
2819 pub fn struct_lint_node(self, lint: &'static Lint, id: NodeId, msg: &str)
2820 -> DiagnosticBuilder<'tcx>
2822 let (level, src) = self.lint_level_at_node(lint, id);
2823 lint::struct_lint_level(self.sess, lint, level, src, None, msg)
2826 pub fn in_scope_traits(self, id: HirId) -> Option<Lrc<StableVec<TraitCandidate>>> {
2827 self.in_scope_traits_map(id.owner)
2828 .and_then(|map| map.get(&id.local_id).cloned())
2831 pub fn named_region(self, id: HirId) -> Option<resolve_lifetime::Region> {
2832 self.named_region_map(id.owner)
2833 .and_then(|map| map.get(&id.local_id).cloned())
2836 pub fn is_late_bound(self, id: HirId) -> bool {
2837 self.is_late_bound_map(id.owner)
2838 .map(|set| set.contains(&id.local_id))
2842 pub fn object_lifetime_defaults(self, id: HirId)
2843 -> Option<Lrc<Vec<ObjectLifetimeDefault>>>
2845 self.object_lifetime_defaults_map(id.owner)
2846 .and_then(|map| map.get(&id.local_id).cloned())
2850 pub trait InternAs<T: ?Sized, R> {
2852 fn intern_with<F>(self, f: F) -> Self::Output
2853 where F: FnOnce(&T) -> R;
2856 impl<I, T, R, E> InternAs<[T], R> for I
2857 where E: InternIteratorElement<T, R>,
2858 I: Iterator<Item=E> {
2859 type Output = E::Output;
2860 fn intern_with<F>(self, f: F) -> Self::Output
2861 where F: FnOnce(&[T]) -> R {
2862 E::intern_with(self, f)
2866 pub trait InternIteratorElement<T, R>: Sized {
2868 fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output;
2871 impl<T, R> InternIteratorElement<T, R> for T {
2873 fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output {
2874 f(&iter.collect::<SmallVec<[_; 8]>>())
2878 impl<'a, T, R> InternIteratorElement<T, R> for &'a T
2882 fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output {
2883 f(&iter.cloned().collect::<SmallVec<[_; 8]>>())
2887 impl<T, R, E> InternIteratorElement<T, R> for Result<T, E> {
2888 type Output = Result<R, E>;
2889 fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output {
2890 Ok(f(&iter.collect::<Result<SmallVec<[_; 8]>, _>>()?))
2894 pub fn provide(providers: &mut ty::query::Providers) {
2895 // FIXME(#44234) - almost all of these queries have no sub-queries and
2896 // therefore no actual inputs, they're just reading tables calculated in
2897 // resolve! Does this work? Unsure! That's what the issue is about
2898 providers.in_scope_traits_map = |tcx, id| tcx.gcx.trait_map.get(&id).cloned();
2899 providers.module_exports = |tcx, id| tcx.gcx.export_map.get(&id).cloned();
2900 providers.crate_name = |tcx, id| {
2901 assert_eq!(id, LOCAL_CRATE);
2904 providers.get_lib_features = |tcx, id| {
2905 assert_eq!(id, LOCAL_CRATE);
2906 Lrc::new(middle::lib_features::collect(tcx))
2908 providers.get_lang_items = |tcx, id| {
2909 assert_eq!(id, LOCAL_CRATE);
2910 Lrc::new(middle::lang_items::collect(tcx))
2912 providers.freevars = |tcx, id| tcx.gcx.freevars.get(&id).cloned();
2913 providers.maybe_unused_trait_import = |tcx, id| {
2914 tcx.maybe_unused_trait_imports.contains(&id)
2916 providers.maybe_unused_extern_crates = |tcx, cnum| {
2917 assert_eq!(cnum, LOCAL_CRATE);
2918 Lrc::new(tcx.maybe_unused_extern_crates.clone())
2921 providers.stability_index = |tcx, cnum| {
2922 assert_eq!(cnum, LOCAL_CRATE);
2923 Lrc::new(stability::Index::new(tcx))
2925 providers.lookup_stability = |tcx, id| {
2926 assert_eq!(id.krate, LOCAL_CRATE);
2927 let id = tcx.hir.definitions().def_index_to_hir_id(id.index);
2928 tcx.stability().local_stability(id)
2930 providers.lookup_deprecation_entry = |tcx, id| {
2931 assert_eq!(id.krate, LOCAL_CRATE);
2932 let id = tcx.hir.definitions().def_index_to_hir_id(id.index);
2933 tcx.stability().local_deprecation_entry(id)
2935 providers.extern_mod_stmt_cnum = |tcx, id| {
2936 let id = tcx.hir.as_local_node_id(id).unwrap();
2937 tcx.cstore.extern_mod_stmt_cnum_untracked(id)
2939 providers.all_crate_nums = |tcx, cnum| {
2940 assert_eq!(cnum, LOCAL_CRATE);
2941 Lrc::new(tcx.cstore.crates_untracked())
2943 providers.postorder_cnums = |tcx, cnum| {
2944 assert_eq!(cnum, LOCAL_CRATE);
2945 Lrc::new(tcx.cstore.postorder_cnums_untracked())
2947 providers.output_filenames = |tcx, cnum| {
2948 assert_eq!(cnum, LOCAL_CRATE);
2949 tcx.output_filenames.clone()
2951 providers.features_query = |tcx, cnum| {
2952 assert_eq!(cnum, LOCAL_CRATE);
2953 Lrc::new(tcx.sess.features_untracked().clone())
2955 providers.is_panic_runtime = |tcx, cnum| {
2956 assert_eq!(cnum, LOCAL_CRATE);
2957 attr::contains_name(tcx.hir.krate_attrs(), "panic_runtime")
2959 providers.is_compiler_builtins = |tcx, cnum| {
2960 assert_eq!(cnum, LOCAL_CRATE);
2961 attr::contains_name(tcx.hir.krate_attrs(), "compiler_builtins")