1 // Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! type context book-keeping
13 use dep_graph::DepGraph;
14 use dep_graph::{DepNode, DepConstructor};
15 use errors::DiagnosticBuilder;
17 use session::config::{BorrowckMode, OutputFilenames, OptLevel};
18 use session::config::CrateType;
20 use hir::{TraitCandidate, HirId, ItemLocalId};
21 use hir::def::{Def, Export};
22 use hir::def_id::{CrateNum, DefId, DefIndex, LOCAL_CRATE};
23 use hir::map as hir_map;
24 use hir::map::DefPathHash;
25 use lint::{self, Lint};
26 use ich::{StableHashingContext, NodeIdHashingMode};
27 use infer::canonical::{CanonicalVarInfo, CanonicalVarInfos};
28 use infer::outlives::free_region_map::FreeRegionMap;
29 use middle::cstore::{CrateStoreDyn, LinkMeta};
30 use middle::cstore::EncodedMetadata;
31 use middle::lang_items;
32 use middle::resolve_lifetime::{self, ObjectLifetimeDefault};
33 use middle::stability;
34 use mir::{self, Mir, interpret};
35 use mir::interpret::Allocation;
36 use ty::subst::{Kind, Substs, Subst};
39 use traits::{Clause, Clauses, Goal, Goals};
40 use ty::{self, Ty, TypeAndMut};
41 use ty::{TyS, TypeVariants, Slice};
42 use ty::{AdtKind, AdtDef, ClosureSubsts, GeneratorSubsts, Region, Const};
43 use ty::{PolyFnSig, InferTy, ParamTy, ProjectionTy, ExistentialPredicate, Predicate};
45 use ty::{TyVar, TyVid, IntVar, IntVid, FloatVar, FloatVid};
46 use ty::TypeVariants::*;
47 use ty::GenericParamDefKind;
48 use ty::layout::{LayoutDetails, TargetDataLayout};
53 use util::nodemap::{DefIdSet, ItemLocalMap};
54 use util::nodemap::{FxHashMap, FxHashSet};
55 use rustc_data_structures::accumulate_vec::AccumulateVec;
56 use rustc_data_structures::stable_hasher::{HashStable, hash_stable_hashmap,
57 StableHasher, StableHasherResult,
59 use arena::{TypedArena, SyncDroplessArena};
60 use rustc_data_structures::indexed_vec::IndexVec;
61 use rustc_data_structures::sync::{self, Lrc, Lock, WorkerLocal};
63 use std::borrow::Borrow;
64 use std::cmp::Ordering;
65 use std::collections::hash_map::{self, Entry};
66 use std::hash::{Hash, Hasher};
73 use rustc_target::spec::abi;
74 use syntax::ast::{self, NodeId};
76 use syntax::codemap::MultiSpan;
77 use syntax::edition::Edition;
78 use syntax::feature_gate;
79 use syntax::symbol::{Symbol, keywords, InternedString};
84 pub struct AllArenas<'tcx> {
85 pub global: WorkerLocal<GlobalArenas<'tcx>>,
86 pub interner: SyncDroplessArena,
89 impl<'tcx> AllArenas<'tcx> {
90 pub fn new() -> Self {
92 global: WorkerLocal::new(|_| GlobalArenas::new()),
93 interner: SyncDroplessArena::new(),
99 pub struct GlobalArenas<'tcx> {
101 layout: TypedArena<LayoutDetails>,
104 generics: TypedArena<ty::Generics>,
105 trait_def: TypedArena<ty::TraitDef>,
106 adt_def: TypedArena<ty::AdtDef>,
107 steal_mir: TypedArena<Steal<Mir<'tcx>>>,
108 mir: TypedArena<Mir<'tcx>>,
109 tables: TypedArena<ty::TypeckTables<'tcx>>,
111 const_allocs: TypedArena<interpret::Allocation>,
114 impl<'tcx> GlobalArenas<'tcx> {
115 pub fn new() -> GlobalArenas<'tcx> {
117 layout: TypedArena::new(),
118 generics: TypedArena::new(),
119 trait_def: TypedArena::new(),
120 adt_def: TypedArena::new(),
121 steal_mir: TypedArena::new(),
122 mir: TypedArena::new(),
123 tables: TypedArena::new(),
124 const_allocs: TypedArena::new(),
129 type InternedSet<'tcx, T> = Lock<FxHashSet<Interned<'tcx, T>>>;
131 pub struct CtxtInterners<'tcx> {
132 /// The arena that types, regions, etc are allocated from
133 arena: &'tcx SyncDroplessArena,
135 /// Specifically use a speedy hash algorithm for these hash sets,
136 /// they're accessed quite often.
137 type_: InternedSet<'tcx, TyS<'tcx>>,
138 type_list: InternedSet<'tcx, Slice<Ty<'tcx>>>,
139 substs: InternedSet<'tcx, Substs<'tcx>>,
140 canonical_var_infos: InternedSet<'tcx, Slice<CanonicalVarInfo>>,
141 region: InternedSet<'tcx, RegionKind>,
142 existential_predicates: InternedSet<'tcx, Slice<ExistentialPredicate<'tcx>>>,
143 predicates: InternedSet<'tcx, Slice<Predicate<'tcx>>>,
144 const_: InternedSet<'tcx, Const<'tcx>>,
145 clauses: InternedSet<'tcx, Slice<Clause<'tcx>>>,
146 goals: InternedSet<'tcx, Slice<Goal<'tcx>>>,
149 impl<'gcx: 'tcx, 'tcx> CtxtInterners<'tcx> {
150 fn new(arena: &'tcx SyncDroplessArena) -> CtxtInterners<'tcx> {
153 type_: Default::default(),
154 type_list: Default::default(),
155 substs: Default::default(),
156 region: Default::default(),
157 existential_predicates: Default::default(),
158 canonical_var_infos: Default::default(),
159 predicates: Default::default(),
160 const_: Default::default(),
161 clauses: Default::default(),
162 goals: Default::default(),
168 local: &CtxtInterners<'tcx>,
169 global: &CtxtInterners<'gcx>,
170 st: TypeVariants<'tcx>
172 let flags = super::flags::FlagComputation::for_sty(&st);
174 // HACK(eddyb) Depend on flags being accurate to
175 // determine that all contents are in the global tcx.
176 // See comments on Lift for why we can't use that.
177 if flags.flags.intersects(ty::TypeFlags::KEEP_IN_LOCAL_TCX) {
178 let mut interner = local.type_.borrow_mut();
179 if let Some(&Interned(ty)) = interner.get(&st) {
183 let ty_struct = TyS {
186 outer_exclusive_binder: flags.outer_exclusive_binder,
189 // Make sure we don't end up with inference
190 // types/regions in the global interner
191 if local as *const _ as usize == global as *const _ as usize {
192 bug!("Attempted to intern `{:?}` which contains \
193 inference types/regions in the global type context",
197 // Don't be &mut TyS.
198 let ty: Ty<'tcx> = local.arena.alloc(ty_struct);
199 interner.insert(Interned(ty));
202 let mut interner = global.type_.borrow_mut();
203 if let Some(&Interned(ty)) = interner.get(&st) {
207 let ty_struct = TyS {
210 outer_exclusive_binder: flags.outer_exclusive_binder,
213 // This is safe because all the types the ty_struct can point to
214 // already is in the global arena
215 let ty_struct: TyS<'gcx> = unsafe {
216 mem::transmute(ty_struct)
219 // Don't be &mut TyS.
220 let ty: Ty<'gcx> = global.arena.alloc(ty_struct);
221 interner.insert(Interned(ty));
227 pub struct CommonTypes<'tcx> {
247 pub re_empty: Region<'tcx>,
248 pub re_static: Region<'tcx>,
249 pub re_erased: Region<'tcx>,
252 pub struct LocalTableInContext<'a, V: 'a> {
253 local_id_root: Option<DefId>,
254 data: &'a ItemLocalMap<V>
257 /// Validate that the given HirId (respectively its `local_id` part) can be
258 /// safely used as a key in the tables of a TypeckTable. For that to be
259 /// the case, the HirId must have the same `owner` as all the other IDs in
260 /// this table (signified by `local_id_root`). Otherwise the HirId
261 /// would be in a different frame of reference and using its `local_id`
262 /// would result in lookup errors, or worse, in silently wrong data being
264 fn validate_hir_id_for_typeck_tables(local_id_root: Option<DefId>,
267 if cfg!(debug_assertions) {
268 if let Some(local_id_root) = local_id_root {
269 if hir_id.owner != local_id_root.index {
270 ty::tls::with(|tcx| {
271 let node_id = tcx.hir.hir_to_node_id(hir_id);
273 bug!("node {} with HirId::owner {:?} cannot be placed in \
274 TypeckTables with local_id_root {:?}",
275 tcx.hir.node_to_string(node_id),
276 DefId::local(hir_id.owner),
281 // We use "Null Object" TypeckTables in some of the analysis passes.
282 // These are just expected to be empty and their `local_id_root` is
283 // `None`. Therefore we cannot verify whether a given `HirId` would
284 // be a valid key for the given table. Instead we make sure that
285 // nobody tries to write to such a Null Object table.
287 bug!("access to invalid TypeckTables")
293 impl<'a, V> LocalTableInContext<'a, V> {
294 pub fn contains_key(&self, id: hir::HirId) -> bool {
295 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
296 self.data.contains_key(&id.local_id)
299 pub fn get(&self, id: hir::HirId) -> Option<&V> {
300 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
301 self.data.get(&id.local_id)
304 pub fn iter(&self) -> hash_map::Iter<hir::ItemLocalId, V> {
309 impl<'a, V> ::std::ops::Index<hir::HirId> for LocalTableInContext<'a, V> {
312 fn index(&self, key: hir::HirId) -> &V {
313 self.get(key).expect("LocalTableInContext: key not found")
317 pub struct LocalTableInContextMut<'a, V: 'a> {
318 local_id_root: Option<DefId>,
319 data: &'a mut ItemLocalMap<V>
322 impl<'a, V> LocalTableInContextMut<'a, V> {
323 pub fn get_mut(&mut self, id: hir::HirId) -> Option<&mut V> {
324 validate_hir_id_for_typeck_tables(self.local_id_root, id, true);
325 self.data.get_mut(&id.local_id)
328 pub fn entry(&mut self, id: hir::HirId) -> Entry<hir::ItemLocalId, V> {
329 validate_hir_id_for_typeck_tables(self.local_id_root, id, true);
330 self.data.entry(id.local_id)
333 pub fn insert(&mut self, id: hir::HirId, val: V) -> Option<V> {
334 validate_hir_id_for_typeck_tables(self.local_id_root, id, true);
335 self.data.insert(id.local_id, val)
338 pub fn remove(&mut self, id: hir::HirId) -> Option<V> {
339 validate_hir_id_for_typeck_tables(self.local_id_root, id, true);
340 self.data.remove(&id.local_id)
344 #[derive(RustcEncodable, RustcDecodable, Debug)]
345 pub struct TypeckTables<'tcx> {
346 /// The HirId::owner all ItemLocalIds in this table are relative to.
347 pub local_id_root: Option<DefId>,
349 /// Resolved definitions for `<T>::X` associated paths and
350 /// method calls, including those of overloaded operators.
351 type_dependent_defs: ItemLocalMap<Def>,
353 /// Resolved field indices for field accesses in expressions (`S { field }`, `obj.field`)
354 /// or patterns (`S { field }`). The index is often useful by itself, but to learn more
355 /// about the field you also need definition of the variant to which the field
356 /// belongs, but it may not exist if it's a tuple field (`tuple.0`).
357 field_indices: ItemLocalMap<usize>,
359 /// Stores the canonicalized types provided by the user. See also `UserAssertTy` statement in
361 user_provided_tys: ItemLocalMap<CanonicalTy<'tcx>>,
363 /// Stores the types for various nodes in the AST. Note that this table
364 /// is not guaranteed to be populated until after typeck. See
365 /// typeck::check::fn_ctxt for details.
366 node_types: ItemLocalMap<Ty<'tcx>>,
368 /// Stores the type parameters which were substituted to obtain the type
369 /// of this node. This only applies to nodes that refer to entities
370 /// parameterized by type parameters, such as generic fns, types, or
372 node_substs: ItemLocalMap<&'tcx Substs<'tcx>>,
374 adjustments: ItemLocalMap<Vec<ty::adjustment::Adjustment<'tcx>>>,
376 /// Stores the actual binding mode for all instances of hir::BindingAnnotation.
377 pat_binding_modes: ItemLocalMap<BindingMode>,
379 /// Stores the types which were implicitly dereferenced in pattern binding modes
380 /// for later usage in HAIR lowering. For example,
383 /// match &&Some(5i32) {
388 /// leads to a `vec![&&Option<i32>, &Option<i32>]`. Empty vectors are not stored.
391 /// https://github.com/rust-lang/rfcs/blob/master/text/2005-match-ergonomics.md#definitions
392 pat_adjustments: ItemLocalMap<Vec<Ty<'tcx>>>,
395 pub upvar_capture_map: ty::UpvarCaptureMap<'tcx>,
397 /// Records the reasons that we picked the kind of each closure;
398 /// not all closures are present in the map.
399 closure_kind_origins: ItemLocalMap<(Span, ast::Name)>,
401 /// For each fn, records the "liberated" types of its arguments
402 /// and return type. Liberated means that all bound regions
403 /// (including late-bound regions) are replaced with free
404 /// equivalents. This table is not used in codegen (since regions
405 /// are erased there) and hence is not serialized to metadata.
406 liberated_fn_sigs: ItemLocalMap<ty::FnSig<'tcx>>,
408 /// For each FRU expression, record the normalized types of the fields
409 /// of the struct - this is needed because it is non-trivial to
410 /// normalize while preserving regions. This table is used only in
411 /// MIR construction and hence is not serialized to metadata.
412 fru_field_types: ItemLocalMap<Vec<Ty<'tcx>>>,
414 /// Maps a cast expression to its kind. This is keyed on the
415 /// *from* expression of the cast, not the cast itself.
416 cast_kinds: ItemLocalMap<ty::cast::CastKind>,
418 /// Set of trait imports actually used in the method resolution.
419 /// This is used for warning unused imports. During type
420 /// checking, this `Lrc` should not be cloned: it must have a ref-count
421 /// of 1 so that we can insert things into the set mutably.
422 pub used_trait_imports: Lrc<DefIdSet>,
424 /// If any errors occurred while type-checking this body,
425 /// this field will be set to `true`.
426 pub tainted_by_errors: bool,
428 /// Stores the free-region relationships that were deduced from
429 /// its where clauses and parameter types. These are then
430 /// read-again by borrowck.
431 pub free_region_map: FreeRegionMap<'tcx>,
433 /// All the existential types that are restricted to concrete types
435 pub concrete_existential_types: FxHashMap<DefId, Ty<'tcx>>,
438 impl<'tcx> TypeckTables<'tcx> {
439 pub fn empty(local_id_root: Option<DefId>) -> TypeckTables<'tcx> {
442 type_dependent_defs: ItemLocalMap(),
443 field_indices: ItemLocalMap(),
444 user_provided_tys: ItemLocalMap(),
445 node_types: ItemLocalMap(),
446 node_substs: ItemLocalMap(),
447 adjustments: ItemLocalMap(),
448 pat_binding_modes: ItemLocalMap(),
449 pat_adjustments: ItemLocalMap(),
450 upvar_capture_map: FxHashMap(),
451 closure_kind_origins: ItemLocalMap(),
452 liberated_fn_sigs: ItemLocalMap(),
453 fru_field_types: ItemLocalMap(),
454 cast_kinds: ItemLocalMap(),
455 used_trait_imports: Lrc::new(DefIdSet()),
456 tainted_by_errors: false,
457 free_region_map: FreeRegionMap::new(),
458 concrete_existential_types: FxHashMap(),
462 /// Returns the final resolution of a `QPath` in an `Expr` or `Pat` node.
463 pub fn qpath_def(&self, qpath: &hir::QPath, id: hir::HirId) -> Def {
465 hir::QPath::Resolved(_, ref path) => path.def,
466 hir::QPath::TypeRelative(..) => {
467 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
468 self.type_dependent_defs.get(&id.local_id).cloned().unwrap_or(Def::Err)
473 pub fn type_dependent_defs(&self) -> LocalTableInContext<Def> {
474 LocalTableInContext {
475 local_id_root: self.local_id_root,
476 data: &self.type_dependent_defs
480 pub fn type_dependent_defs_mut(&mut self) -> LocalTableInContextMut<Def> {
481 LocalTableInContextMut {
482 local_id_root: self.local_id_root,
483 data: &mut self.type_dependent_defs
487 pub fn field_indices(&self) -> LocalTableInContext<usize> {
488 LocalTableInContext {
489 local_id_root: self.local_id_root,
490 data: &self.field_indices
494 pub fn field_indices_mut(&mut self) -> LocalTableInContextMut<usize> {
495 LocalTableInContextMut {
496 local_id_root: self.local_id_root,
497 data: &mut self.field_indices
501 pub fn user_provided_tys(&self) -> LocalTableInContext<CanonicalTy<'tcx>> {
502 LocalTableInContext {
503 local_id_root: self.local_id_root,
504 data: &self.user_provided_tys
508 pub fn user_provided_tys_mut(&mut self) -> LocalTableInContextMut<CanonicalTy<'tcx>> {
509 LocalTableInContextMut {
510 local_id_root: self.local_id_root,
511 data: &mut self.user_provided_tys
515 pub fn node_types(&self) -> LocalTableInContext<Ty<'tcx>> {
516 LocalTableInContext {
517 local_id_root: self.local_id_root,
518 data: &self.node_types
522 pub fn node_types_mut(&mut self) -> LocalTableInContextMut<Ty<'tcx>> {
523 LocalTableInContextMut {
524 local_id_root: self.local_id_root,
525 data: &mut self.node_types
529 pub fn node_id_to_type(&self, id: hir::HirId) -> Ty<'tcx> {
530 match self.node_id_to_type_opt(id) {
533 bug!("node_id_to_type: no type for node `{}`",
535 let id = tcx.hir.hir_to_node_id(id);
536 tcx.hir.node_to_string(id)
542 pub fn node_id_to_type_opt(&self, id: hir::HirId) -> Option<Ty<'tcx>> {
543 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
544 self.node_types.get(&id.local_id).cloned()
547 pub fn node_substs_mut(&mut self) -> LocalTableInContextMut<&'tcx Substs<'tcx>> {
548 LocalTableInContextMut {
549 local_id_root: self.local_id_root,
550 data: &mut self.node_substs
554 pub fn node_substs(&self, id: hir::HirId) -> &'tcx Substs<'tcx> {
555 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
556 self.node_substs.get(&id.local_id).cloned().unwrap_or(Substs::empty())
559 pub fn node_substs_opt(&self, id: hir::HirId) -> Option<&'tcx Substs<'tcx>> {
560 validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
561 self.node_substs.get(&id.local_id).cloned()
564 // Returns the type of a pattern as a monotype. Like @expr_ty, this function
565 // doesn't provide type parameter substitutions.
566 pub fn pat_ty(&self, pat: &hir::Pat) -> Ty<'tcx> {
567 self.node_id_to_type(pat.hir_id)
570 pub fn pat_ty_opt(&self, pat: &hir::Pat) -> Option<Ty<'tcx>> {
571 self.node_id_to_type_opt(pat.hir_id)
574 // Returns the type of an expression as a monotype.
576 // NB (1): This is the PRE-ADJUSTMENT TYPE for the expression. That is, in
577 // some cases, we insert `Adjustment` annotations such as auto-deref or
578 // auto-ref. The type returned by this function does not consider such
579 // adjustments. See `expr_ty_adjusted()` instead.
581 // NB (2): This type doesn't provide type parameter substitutions; e.g. if you
582 // ask for the type of "id" in "id(3)", it will return "fn(&isize) -> isize"
583 // instead of "fn(ty) -> T with T = isize".
584 pub fn expr_ty(&self, expr: &hir::Expr) -> Ty<'tcx> {
585 self.node_id_to_type(expr.hir_id)
588 pub fn expr_ty_opt(&self, expr: &hir::Expr) -> Option<Ty<'tcx>> {
589 self.node_id_to_type_opt(expr.hir_id)
592 pub fn adjustments(&self) -> LocalTableInContext<Vec<ty::adjustment::Adjustment<'tcx>>> {
593 LocalTableInContext {
594 local_id_root: self.local_id_root,
595 data: &self.adjustments
599 pub fn adjustments_mut(&mut self)
600 -> LocalTableInContextMut<Vec<ty::adjustment::Adjustment<'tcx>>> {
601 LocalTableInContextMut {
602 local_id_root: self.local_id_root,
603 data: &mut self.adjustments
607 pub fn expr_adjustments(&self, expr: &hir::Expr)
608 -> &[ty::adjustment::Adjustment<'tcx>] {
609 validate_hir_id_for_typeck_tables(self.local_id_root, expr.hir_id, false);
610 self.adjustments.get(&expr.hir_id.local_id).map_or(&[], |a| &a[..])
613 /// Returns the type of `expr`, considering any `Adjustment`
614 /// entry recorded for that expression.
615 pub fn expr_ty_adjusted(&self, expr: &hir::Expr) -> Ty<'tcx> {
616 self.expr_adjustments(expr)
618 .map_or_else(|| self.expr_ty(expr), |adj| adj.target)
621 pub fn expr_ty_adjusted_opt(&self, expr: &hir::Expr) -> Option<Ty<'tcx>> {
622 self.expr_adjustments(expr)
624 .map(|adj| adj.target)
625 .or_else(|| self.expr_ty_opt(expr))
628 pub fn is_method_call(&self, expr: &hir::Expr) -> bool {
629 // Only paths and method calls/overloaded operators have
630 // entries in type_dependent_defs, ignore the former here.
631 if let hir::ExprKind::Path(_) = expr.node {
635 match self.type_dependent_defs().get(expr.hir_id) {
636 Some(&Def::Method(_)) => true,
641 pub fn pat_binding_modes(&self) -> LocalTableInContext<BindingMode> {
642 LocalTableInContext {
643 local_id_root: self.local_id_root,
644 data: &self.pat_binding_modes
648 pub fn pat_binding_modes_mut(&mut self)
649 -> LocalTableInContextMut<BindingMode> {
650 LocalTableInContextMut {
651 local_id_root: self.local_id_root,
652 data: &mut self.pat_binding_modes
656 pub fn pat_adjustments(&self) -> LocalTableInContext<Vec<Ty<'tcx>>> {
657 LocalTableInContext {
658 local_id_root: self.local_id_root,
659 data: &self.pat_adjustments,
663 pub fn pat_adjustments_mut(&mut self)
664 -> LocalTableInContextMut<Vec<Ty<'tcx>>> {
665 LocalTableInContextMut {
666 local_id_root: self.local_id_root,
667 data: &mut self.pat_adjustments,
671 pub fn upvar_capture(&self, upvar_id: ty::UpvarId) -> ty::UpvarCapture<'tcx> {
672 self.upvar_capture_map[&upvar_id]
675 pub fn closure_kind_origins(&self) -> LocalTableInContext<(Span, ast::Name)> {
676 LocalTableInContext {
677 local_id_root: self.local_id_root,
678 data: &self.closure_kind_origins
682 pub fn closure_kind_origins_mut(&mut self) -> LocalTableInContextMut<(Span, ast::Name)> {
683 LocalTableInContextMut {
684 local_id_root: self.local_id_root,
685 data: &mut self.closure_kind_origins
689 pub fn liberated_fn_sigs(&self) -> LocalTableInContext<ty::FnSig<'tcx>> {
690 LocalTableInContext {
691 local_id_root: self.local_id_root,
692 data: &self.liberated_fn_sigs
696 pub fn liberated_fn_sigs_mut(&mut self) -> LocalTableInContextMut<ty::FnSig<'tcx>> {
697 LocalTableInContextMut {
698 local_id_root: self.local_id_root,
699 data: &mut self.liberated_fn_sigs
703 pub fn fru_field_types(&self) -> LocalTableInContext<Vec<Ty<'tcx>>> {
704 LocalTableInContext {
705 local_id_root: self.local_id_root,
706 data: &self.fru_field_types
710 pub fn fru_field_types_mut(&mut self) -> LocalTableInContextMut<Vec<Ty<'tcx>>> {
711 LocalTableInContextMut {
712 local_id_root: self.local_id_root,
713 data: &mut self.fru_field_types
717 pub fn cast_kinds(&self) -> LocalTableInContext<ty::cast::CastKind> {
718 LocalTableInContext {
719 local_id_root: self.local_id_root,
720 data: &self.cast_kinds
724 pub fn cast_kinds_mut(&mut self) -> LocalTableInContextMut<ty::cast::CastKind> {
725 LocalTableInContextMut {
726 local_id_root: self.local_id_root,
727 data: &mut self.cast_kinds
732 impl<'a, 'gcx> HashStable<StableHashingContext<'a>> for TypeckTables<'gcx> {
733 fn hash_stable<W: StableHasherResult>(&self,
734 hcx: &mut StableHashingContext<'a>,
735 hasher: &mut StableHasher<W>) {
736 let ty::TypeckTables {
738 ref type_dependent_defs,
740 ref user_provided_tys,
744 ref pat_binding_modes,
746 ref upvar_capture_map,
747 ref closure_kind_origins,
748 ref liberated_fn_sigs,
753 ref used_trait_imports,
756 ref concrete_existential_types,
759 hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| {
760 type_dependent_defs.hash_stable(hcx, hasher);
761 field_indices.hash_stable(hcx, hasher);
762 user_provided_tys.hash_stable(hcx, hasher);
763 node_types.hash_stable(hcx, hasher);
764 node_substs.hash_stable(hcx, hasher);
765 adjustments.hash_stable(hcx, hasher);
766 pat_binding_modes.hash_stable(hcx, hasher);
767 pat_adjustments.hash_stable(hcx, hasher);
768 hash_stable_hashmap(hcx, hasher, upvar_capture_map, |up_var_id, hcx| {
775 local_id_root.expect("trying to hash invalid TypeckTables");
777 let var_owner_def_id = DefId {
778 krate: local_id_root.krate,
781 let closure_def_id = DefId {
782 krate: local_id_root.krate,
783 index: closure_expr_id.to_def_id().index,
785 (hcx.def_path_hash(var_owner_def_id),
787 hcx.def_path_hash(closure_def_id))
790 closure_kind_origins.hash_stable(hcx, hasher);
791 liberated_fn_sigs.hash_stable(hcx, hasher);
792 fru_field_types.hash_stable(hcx, hasher);
793 cast_kinds.hash_stable(hcx, hasher);
794 used_trait_imports.hash_stable(hcx, hasher);
795 tainted_by_errors.hash_stable(hcx, hasher);
796 free_region_map.hash_stable(hcx, hasher);
797 concrete_existential_types.hash_stable(hcx, hasher);
802 impl<'tcx> CommonTypes<'tcx> {
803 fn new(interners: &CtxtInterners<'tcx>) -> CommonTypes<'tcx> {
804 // Ensure our type representation does not grow
805 #[cfg(target_pointer_width = "64")]
806 assert!(mem::size_of::<ty::TypeVariants>() <= 24);
807 #[cfg(target_pointer_width = "64")]
808 assert!(mem::size_of::<ty::TyS>() <= 32);
810 let mk = |sty| CtxtInterners::intern_ty(interners, interners, sty);
811 let mk_region = |r| {
812 if let Some(r) = interners.region.borrow().get(&r) {
815 let r = interners.arena.alloc(r);
816 interners.region.borrow_mut().insert(Interned(r));
824 isize: mk(TyInt(ast::IntTy::Isize)),
825 i8: mk(TyInt(ast::IntTy::I8)),
826 i16: mk(TyInt(ast::IntTy::I16)),
827 i32: mk(TyInt(ast::IntTy::I32)),
828 i64: mk(TyInt(ast::IntTy::I64)),
829 i128: mk(TyInt(ast::IntTy::I128)),
830 usize: mk(TyUint(ast::UintTy::Usize)),
831 u8: mk(TyUint(ast::UintTy::U8)),
832 u16: mk(TyUint(ast::UintTy::U16)),
833 u32: mk(TyUint(ast::UintTy::U32)),
834 u64: mk(TyUint(ast::UintTy::U64)),
835 u128: mk(TyUint(ast::UintTy::U128)),
836 f32: mk(TyFloat(ast::FloatTy::F32)),
837 f64: mk(TyFloat(ast::FloatTy::F64)),
839 re_empty: mk_region(RegionKind::ReEmpty),
840 re_static: mk_region(RegionKind::ReStatic),
841 re_erased: mk_region(RegionKind::ReErased),
846 /// The central data structure of the compiler. It stores references
847 /// to the various **arenas** and also houses the results of the
848 /// various **compiler queries** that have been performed. See the
849 /// [rustc guide] for more details.
851 /// [rustc guide]: https://rust-lang-nursery.github.io/rustc-guide/ty.html
852 #[derive(Copy, Clone)]
853 pub struct TyCtxt<'a, 'gcx: 'tcx, 'tcx: 'a> {
854 gcx: &'a GlobalCtxt<'gcx>,
855 interners: &'a CtxtInterners<'tcx>
858 impl<'a, 'gcx, 'tcx> Deref for TyCtxt<'a, 'gcx, 'tcx> {
859 type Target = &'a GlobalCtxt<'gcx>;
860 fn deref(&self) -> &Self::Target {
865 pub struct GlobalCtxt<'tcx> {
866 global_arenas: &'tcx WorkerLocal<GlobalArenas<'tcx>>,
867 global_interners: CtxtInterners<'tcx>,
869 cstore: &'tcx CrateStoreDyn,
871 pub sess: &'tcx Session,
873 pub dep_graph: DepGraph,
875 /// Common types, pre-interned for your convenience.
876 pub types: CommonTypes<'tcx>,
878 /// Map indicating what traits are in scope for places where this
879 /// is relevant; generated by resolve.
880 trait_map: FxHashMap<DefIndex,
881 Lrc<FxHashMap<ItemLocalId,
882 Lrc<StableVec<TraitCandidate>>>>>,
884 /// Export map produced by name resolution.
885 export_map: FxHashMap<DefId, Lrc<Vec<Export>>>,
887 pub hir: hir_map::Map<'tcx>,
889 /// A map from DefPathHash -> DefId. Includes DefIds from the local crate
890 /// as well as all upstream crates. Only populated in incremental mode.
891 pub def_path_hash_to_def_id: Option<FxHashMap<DefPathHash, DefId>>,
893 pub(crate) queries: query::Queries<'tcx>,
895 // Records the free variables refrenced by every closure
896 // expression. Do not track deps for this, just recompute it from
897 // scratch every time.
898 freevars: FxHashMap<DefId, Lrc<Vec<hir::Freevar>>>,
900 maybe_unused_trait_imports: FxHashSet<DefId>,
902 maybe_unused_extern_crates: Vec<(DefId, Span)>,
904 // Internal cache for metadata decoding. No need to track deps on this.
905 pub rcache: Lock<FxHashMap<ty::CReaderCacheKey, Ty<'tcx>>>,
907 /// Caches the results of trait selection. This cache is used
908 /// for things that do not have to do with the parameters in scope.
909 pub selection_cache: traits::SelectionCache<'tcx>,
911 /// Caches the results of trait evaluation. This cache is used
912 /// for things that do not have to do with the parameters in scope.
913 /// Merge this with `selection_cache`?
914 pub evaluation_cache: traits::EvaluationCache<'tcx>,
916 /// The definite name of the current crate after taking into account
917 /// attributes, commandline parameters, etc.
918 pub crate_name: Symbol,
920 /// Data layout specification for the current target.
921 pub data_layout: TargetDataLayout,
923 stability_interner: Lock<FxHashSet<&'tcx attr::Stability>>,
925 /// Stores the value of constants (and deduplicates the actual memory)
926 allocation_interner: Lock<FxHashSet<&'tcx Allocation>>,
928 pub alloc_map: Lock<interpret::AllocMap<'tcx, &'tcx Allocation>>,
930 layout_interner: Lock<FxHashSet<&'tcx LayoutDetails>>,
932 /// A general purpose channel to throw data out the back towards LLVM worker
935 /// This is intended to only get used during the codegen phase of the compiler
936 /// when satisfying the query for a particular codegen unit. Internally in
937 /// the query it'll send data along this channel to get processed later.
938 pub tx_to_llvm_workers: Lock<mpsc::Sender<Box<dyn Any + Send>>>,
940 output_filenames: Arc<OutputFilenames>,
943 impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
944 /// Get the global TyCtxt.
946 pub fn global_tcx(self) -> TyCtxt<'a, 'gcx, 'gcx> {
949 interners: &self.gcx.global_interners,
953 pub fn alloc_generics(self, generics: ty::Generics) -> &'gcx ty::Generics {
954 self.global_arenas.generics.alloc(generics)
957 pub fn alloc_steal_mir(self, mir: Mir<'gcx>) -> &'gcx Steal<Mir<'gcx>> {
958 self.global_arenas.steal_mir.alloc(Steal::new(mir))
961 pub fn alloc_mir(self, mir: Mir<'gcx>) -> &'gcx Mir<'gcx> {
962 self.global_arenas.mir.alloc(mir)
965 pub fn alloc_tables(self, tables: ty::TypeckTables<'gcx>) -> &'gcx ty::TypeckTables<'gcx> {
966 self.global_arenas.tables.alloc(tables)
969 pub fn alloc_trait_def(self, def: ty::TraitDef) -> &'gcx ty::TraitDef {
970 self.global_arenas.trait_def.alloc(def)
973 pub fn alloc_adt_def(self,
976 variants: Vec<ty::VariantDef>,
978 -> &'gcx ty::AdtDef {
979 let def = ty::AdtDef::new(self, did, kind, variants, repr);
980 self.global_arenas.adt_def.alloc(def)
983 pub fn alloc_byte_array(self, bytes: &[u8]) -> &'gcx [u8] {
984 if bytes.is_empty() {
987 self.global_interners.arena.alloc_slice(bytes)
991 pub fn alloc_const_slice(self, values: &[&'tcx ty::Const<'tcx>])
992 -> &'tcx [&'tcx ty::Const<'tcx>] {
993 if values.is_empty() {
996 self.interners.arena.alloc_slice(values)
1000 pub fn alloc_name_const_slice(self, values: &[(ast::Name, &'tcx ty::Const<'tcx>)])
1001 -> &'tcx [(ast::Name, &'tcx ty::Const<'tcx>)] {
1002 if values.is_empty() {
1005 self.interners.arena.alloc_slice(values)
1009 pub fn intern_const_alloc(
1012 ) -> &'gcx Allocation {
1013 let allocs = &mut self.allocation_interner.borrow_mut();
1014 if let Some(alloc) = allocs.get(&alloc) {
1018 let interned = self.global_arenas.const_allocs.alloc(alloc);
1019 if let Some(prev) = allocs.replace(interned) {
1020 bug!("Tried to overwrite interned Allocation: {:#?}", prev)
1025 /// Allocates a byte or string literal for `mir::interpret`
1026 pub fn allocate_bytes(self, bytes: &[u8]) -> interpret::AllocId {
1027 // create an allocation that just contains these bytes
1028 let alloc = interpret::Allocation::from_byte_aligned_bytes(bytes);
1029 let alloc = self.intern_const_alloc(alloc);
1030 self.alloc_map.lock().allocate(alloc)
1033 pub fn intern_stability(self, stab: attr::Stability) -> &'gcx attr::Stability {
1034 let mut stability_interner = self.stability_interner.borrow_mut();
1035 if let Some(st) = stability_interner.get(&stab) {
1039 let interned = self.global_interners.arena.alloc(stab);
1040 if let Some(prev) = stability_interner.replace(interned) {
1041 bug!("Tried to overwrite interned Stability: {:?}", prev)
1046 pub fn intern_layout(self, layout: LayoutDetails) -> &'gcx LayoutDetails {
1047 let mut layout_interner = self.layout_interner.borrow_mut();
1048 if let Some(layout) = layout_interner.get(&layout) {
1052 let interned = self.global_arenas.layout.alloc(layout);
1053 if let Some(prev) = layout_interner.replace(interned) {
1054 bug!("Tried to overwrite interned Layout: {:?}", prev)
1059 pub fn lift<T: ?Sized + Lift<'tcx>>(self, value: &T) -> Option<T::Lifted> {
1060 value.lift_to_tcx(self)
1063 /// Like lift, but only tries in the global tcx.
1064 pub fn lift_to_global<T: ?Sized + Lift<'gcx>>(self, value: &T) -> Option<T::Lifted> {
1065 value.lift_to_tcx(self.global_tcx())
1068 /// Returns true if self is the same as self.global_tcx().
1069 fn is_global(self) -> bool {
1070 let local = self.interners as *const _;
1071 let global = &self.global_interners as *const _;
1072 local as usize == global as usize
1075 /// Create a type context and call the closure with a `TyCtxt` reference
1076 /// to the context. The closure enforces that the type context and any interned
1077 /// value (types, substs, etc.) can only be used while `ty::tls` has a valid
1078 /// reference to the context, to allow formatting values that need it.
1079 pub fn create_and_enter<F, R>(s: &'tcx Session,
1080 cstore: &'tcx CrateStoreDyn,
1081 local_providers: ty::query::Providers<'tcx>,
1082 extern_providers: ty::query::Providers<'tcx>,
1083 arenas: &'tcx AllArenas<'tcx>,
1084 resolutions: ty::Resolutions,
1085 hir: hir_map::Map<'tcx>,
1086 on_disk_query_result_cache: query::OnDiskCache<'tcx>,
1088 tx: mpsc::Sender<Box<dyn Any + Send>>,
1089 output_filenames: &OutputFilenames,
1091 where F: for<'b> FnOnce(TyCtxt<'b, 'tcx, 'tcx>) -> R
1093 let data_layout = TargetDataLayout::parse(&s.target.target).unwrap_or_else(|err| {
1096 let interners = CtxtInterners::new(&arenas.interner);
1097 let common_types = CommonTypes::new(&interners);
1098 let dep_graph = hir.dep_graph.clone();
1099 let max_cnum = cstore.crates_untracked().iter().map(|c| c.as_usize()).max().unwrap_or(0);
1100 let mut providers = IndexVec::from_elem_n(extern_providers, max_cnum + 1);
1101 providers[LOCAL_CRATE] = local_providers;
1103 let def_path_hash_to_def_id = if s.opts.build_dep_graph() {
1104 let upstream_def_path_tables: Vec<(CrateNum, Lrc<_>)> = cstore
1107 .map(|&cnum| (cnum, cstore.def_path_table(cnum)))
1110 let def_path_tables = || {
1111 upstream_def_path_tables
1113 .map(|&(cnum, ref rc)| (cnum, &**rc))
1114 .chain(iter::once((LOCAL_CRATE, hir.definitions().def_path_table())))
1117 // Precompute the capacity of the hashmap so we don't have to
1118 // re-allocate when populating it.
1119 let capacity = def_path_tables().map(|(_, t)| t.size()).sum::<usize>();
1121 let mut map: FxHashMap<_, _> = FxHashMap::with_capacity_and_hasher(
1123 ::std::default::Default::default()
1126 for (cnum, def_path_table) in def_path_tables() {
1127 def_path_table.add_def_path_hashes_to(cnum, &mut map);
1135 let mut trait_map = FxHashMap();
1136 for (k, v) in resolutions.trait_map {
1137 let hir_id = hir.node_to_hir_id(k);
1138 let map = trait_map.entry(hir_id.owner)
1139 .or_insert_with(|| Lrc::new(FxHashMap()));
1140 Lrc::get_mut(map).unwrap()
1141 .insert(hir_id.local_id,
1142 Lrc::new(StableVec::new(v)));
1145 let gcx = &GlobalCtxt {
1148 global_arenas: &arenas.global,
1149 global_interners: interners,
1150 dep_graph: dep_graph.clone(),
1151 types: common_types,
1153 export_map: resolutions.export_map.into_iter().map(|(k, v)| {
1156 freevars: resolutions.freevars.into_iter().map(|(k, v)| {
1157 (hir.local_def_id(k), Lrc::new(v))
1159 maybe_unused_trait_imports:
1160 resolutions.maybe_unused_trait_imports
1162 .map(|id| hir.local_def_id(id))
1164 maybe_unused_extern_crates:
1165 resolutions.maybe_unused_extern_crates
1167 .map(|(id, sp)| (hir.local_def_id(id), sp))
1170 def_path_hash_to_def_id,
1171 queries: query::Queries::new(providers, on_disk_query_result_cache),
1172 rcache: Lock::new(FxHashMap()),
1173 selection_cache: traits::SelectionCache::new(),
1174 evaluation_cache: traits::EvaluationCache::new(),
1175 crate_name: Symbol::intern(crate_name),
1177 layout_interner: Lock::new(FxHashSet()),
1178 stability_interner: Lock::new(FxHashSet()),
1179 allocation_interner: Lock::new(FxHashSet()),
1180 alloc_map: Lock::new(interpret::AllocMap::new()),
1181 tx_to_llvm_workers: Lock::new(tx),
1182 output_filenames: Arc::new(output_filenames.clone()),
1185 sync::assert_send_val(&gcx);
1187 tls::enter_global(gcx, f)
1190 pub fn consider_optimizing<T: Fn() -> String>(&self, msg: T) -> bool {
1191 let cname = self.crate_name(LOCAL_CRATE).as_str();
1192 self.sess.consider_optimizing(&cname, msg)
1195 pub fn lang_items(self) -> Lrc<middle::lang_items::LanguageItems> {
1196 self.get_lang_items(LOCAL_CRATE)
1199 /// Due to missing llvm support for lowering 128 bit math to software emulation
1200 /// (on some targets), the lowering can be done in MIR.
1202 /// This function only exists until said support is implemented.
1203 pub fn is_binop_lang_item(&self, def_id: DefId) -> Option<(mir::BinOp, bool)> {
1204 let items = self.lang_items();
1205 let def_id = Some(def_id);
1206 if items.i128_add_fn() == def_id { Some((mir::BinOp::Add, false)) }
1207 else if items.u128_add_fn() == def_id { Some((mir::BinOp::Add, false)) }
1208 else if items.i128_sub_fn() == def_id { Some((mir::BinOp::Sub, false)) }
1209 else if items.u128_sub_fn() == def_id { Some((mir::BinOp::Sub, false)) }
1210 else if items.i128_mul_fn() == def_id { Some((mir::BinOp::Mul, false)) }
1211 else if items.u128_mul_fn() == def_id { Some((mir::BinOp::Mul, false)) }
1212 else if items.i128_div_fn() == def_id { Some((mir::BinOp::Div, false)) }
1213 else if items.u128_div_fn() == def_id { Some((mir::BinOp::Div, false)) }
1214 else if items.i128_rem_fn() == def_id { Some((mir::BinOp::Rem, false)) }
1215 else if items.u128_rem_fn() == def_id { Some((mir::BinOp::Rem, false)) }
1216 else if items.i128_shl_fn() == def_id { Some((mir::BinOp::Shl, false)) }
1217 else if items.u128_shl_fn() == def_id { Some((mir::BinOp::Shl, false)) }
1218 else if items.i128_shr_fn() == def_id { Some((mir::BinOp::Shr, false)) }
1219 else if items.u128_shr_fn() == def_id { Some((mir::BinOp::Shr, false)) }
1220 else if items.i128_addo_fn() == def_id { Some((mir::BinOp::Add, true)) }
1221 else if items.u128_addo_fn() == def_id { Some((mir::BinOp::Add, true)) }
1222 else if items.i128_subo_fn() == def_id { Some((mir::BinOp::Sub, true)) }
1223 else if items.u128_subo_fn() == def_id { Some((mir::BinOp::Sub, true)) }
1224 else if items.i128_mulo_fn() == def_id { Some((mir::BinOp::Mul, true)) }
1225 else if items.u128_mulo_fn() == def_id { Some((mir::BinOp::Mul, true)) }
1226 else if items.i128_shlo_fn() == def_id { Some((mir::BinOp::Shl, true)) }
1227 else if items.u128_shlo_fn() == def_id { Some((mir::BinOp::Shl, true)) }
1228 else if items.i128_shro_fn() == def_id { Some((mir::BinOp::Shr, true)) }
1229 else if items.u128_shro_fn() == def_id { Some((mir::BinOp::Shr, true)) }
1233 pub fn stability(self) -> Lrc<stability::Index<'tcx>> {
1234 self.stability_index(LOCAL_CRATE)
1237 pub fn crates(self) -> Lrc<Vec<CrateNum>> {
1238 self.all_crate_nums(LOCAL_CRATE)
1241 pub fn features(self) -> Lrc<feature_gate::Features> {
1242 self.features_query(LOCAL_CRATE)
1245 pub fn def_key(self, id: DefId) -> hir_map::DefKey {
1247 self.hir.def_key(id)
1249 self.cstore.def_key(id)
1253 /// Convert a `DefId` into its fully expanded `DefPath` (every
1254 /// `DefId` is really just an interned def-path).
1256 /// Note that if `id` is not local to this crate, the result will
1257 /// be a non-local `DefPath`.
1258 pub fn def_path(self, id: DefId) -> hir_map::DefPath {
1260 self.hir.def_path(id)
1262 self.cstore.def_path(id)
1267 pub fn def_path_hash(self, def_id: DefId) -> hir_map::DefPathHash {
1268 if def_id.is_local() {
1269 self.hir.definitions().def_path_hash(def_id.index)
1271 self.cstore.def_path_hash(def_id)
1275 pub fn def_path_debug_str(self, def_id: DefId) -> String {
1276 // We are explicitly not going through queries here in order to get
1277 // crate name and disambiguator since this code is called from debug!()
1278 // statements within the query system and we'd run into endless
1279 // recursion otherwise.
1280 let (crate_name, crate_disambiguator) = if def_id.is_local() {
1281 (self.crate_name.clone(),
1282 self.sess.local_crate_disambiguator())
1284 (self.cstore.crate_name_untracked(def_id.krate),
1285 self.cstore.crate_disambiguator_untracked(def_id.krate))
1290 // Don't print the whole crate disambiguator. That's just
1291 // annoying in debug output.
1292 &(crate_disambiguator.to_fingerprint().to_hex())[..4],
1293 self.def_path(def_id).to_string_no_crate())
1296 pub fn metadata_encoding_version(self) -> Vec<u8> {
1297 self.cstore.metadata_encoding_version().to_vec()
1300 // Note that this is *untracked* and should only be used within the query
1301 // system if the result is otherwise tracked through queries
1302 pub fn crate_data_as_rc_any(self, cnum: CrateNum) -> Lrc<dyn Any> {
1303 self.cstore.crate_data_as_rc_any(cnum)
1306 pub fn create_stable_hashing_context(self) -> StableHashingContext<'a> {
1307 let krate = self.dep_graph.with_ignore(|| self.gcx.hir.krate());
1309 StableHashingContext::new(self.sess,
1311 self.hir.definitions(),
1315 // This method makes sure that we have a DepNode and a Fingerprint for
1316 // every upstream crate. It needs to be called once right after the tcx is
1318 // With full-fledged red/green, the method will probably become unnecessary
1319 // as this will be done on-demand.
1320 pub fn allocate_metadata_dep_nodes(self) {
1321 // We cannot use the query versions of crates() and crate_hash(), since
1322 // those would need the DepNodes that we are allocating here.
1323 for cnum in self.cstore.crates_untracked() {
1324 let dep_node = DepNode::new(self, DepConstructor::CrateMetadata(cnum));
1325 let crate_hash = self.cstore.crate_hash_untracked(cnum);
1326 self.dep_graph.with_task(dep_node,
1329 |_, x| x // No transformation needed
1334 // This method exercises the `in_scope_traits_map` query for all possible
1335 // values so that we have their fingerprints available in the DepGraph.
1336 // This is only required as long as we still use the old dependency tracking
1337 // which needs to have the fingerprints of all input nodes beforehand.
1338 pub fn precompute_in_scope_traits_hashes(self) {
1339 for &def_index in self.trait_map.keys() {
1340 self.in_scope_traits_map(def_index);
1344 pub fn serialize_query_result_cache<E>(self,
1346 -> Result<(), E::Error>
1347 where E: ty::codec::TyEncoder
1349 self.queries.on_disk_cache.serialize(self.global_tcx(), encoder)
1352 /// If true, we should use a naive AST walk to determine if match
1353 /// guard could perform bad mutations (or mutable-borrows).
1354 pub fn check_for_mutation_in_guard_via_ast_walk(self) -> bool {
1355 !self.sess.opts.debugging_opts.disable_ast_check_for_mutation_in_guard
1358 /// If true, we should use the AST-based borrowck (we may *also* use
1359 /// the MIR-based borrowck).
1360 pub fn use_ast_borrowck(self) -> bool {
1361 self.borrowck_mode().use_ast()
1364 /// If true, we should use the MIR-based borrowck (we may *also* use
1365 /// the AST-based borrowck).
1366 pub fn use_mir_borrowck(self) -> bool {
1367 self.borrowck_mode().use_mir()
1370 /// If true, we should use the MIR-based borrow check, but also
1371 /// fall back on the AST borrow check if the MIR-based one errors.
1372 pub fn migrate_borrowck(self) -> bool {
1373 self.borrowck_mode().migrate()
1376 /// If true, make MIR codegen for `match` emit a temp that holds a
1377 /// borrow of the input to the match expression.
1378 pub fn generate_borrow_of_any_match_input(&self) -> bool {
1379 self.emit_read_for_match()
1382 /// If true, make MIR codegen for `match` emit ReadForMatch
1383 /// statements (which simulate the maximal effect of executing the
1384 /// patterns in a match arm).
1385 pub fn emit_read_for_match(&self) -> bool {
1386 self.use_mir_borrowck() && !self.sess.opts.debugging_opts.nll_dont_emit_read_for_match
1389 /// If true, pattern variables for use in guards on match arms
1390 /// will be bound as references to the data, and occurrences of
1391 /// those variables in the guard expression will implicitly
1392 /// dereference those bindings. (See rust-lang/rust#27282.)
1393 pub fn all_pat_vars_are_implicit_refs_within_guards(self) -> bool {
1394 self.borrowck_mode().use_mir()
1397 /// If true, we should enable two-phase borrows checks. This is
1398 /// done with either: `-Ztwo-phase-borrows`, `#![feature(nll)]`,
1399 /// or by opting into an edition after 2015.
1400 pub fn two_phase_borrows(self) -> bool {
1401 if self.features().nll || self.sess.opts.debugging_opts.two_phase_borrows {
1405 match self.sess.edition() {
1406 Edition::Edition2015 => false,
1407 Edition::Edition2018 => true,
1412 /// What mode(s) of borrowck should we run? AST? MIR? both?
1413 /// (Also considers the `#![feature(nll)]` setting.)
1414 pub fn borrowck_mode(&self) -> BorrowckMode {
1415 // Here are the main constraints we need to deal with:
1417 // 1. An opts.borrowck_mode of `BorrowckMode::Ast` is
1418 // synonymous with no `-Z borrowck=...` flag at all.
1419 // (This is arguably a historical accident.)
1421 // 2. `BorrowckMode::Migrate` is the limited migration to
1422 // NLL that we are deploying with the 2018 edition.
1424 // 3. We want to allow developers on the Nightly channel
1425 // to opt back into the "hard error" mode for NLL,
1426 // (which they can do via specifying `#![feature(nll)]`
1427 // explicitly in their crate).
1429 // So, this precedence list is how pnkfelix chose to work with
1430 // the above constraints:
1432 // * `#![feature(nll)]` *always* means use NLL with hard
1433 // errors. (To simplify the code here, it now even overrides
1434 // a user's attempt to specify `-Z borrowck=compare`, which
1435 // we arguably do not need anymore and should remove.)
1437 // * Otherwise, if no `-Z borrowck=...` flag was given (or
1438 // if `borrowck=ast` was specified), then use the default
1439 // as required by the edition.
1441 // * Otherwise, use the behavior requested via `-Z borrowck=...`
1443 if self.features().nll { return BorrowckMode::Mir; }
1445 match self.sess.opts.borrowck_mode {
1446 mode @ BorrowckMode::Mir |
1447 mode @ BorrowckMode::Compare |
1448 mode @ BorrowckMode::Migrate => mode,
1450 BorrowckMode::Ast => match self.sess.edition() {
1451 Edition::Edition2015 => BorrowckMode::Ast,
1452 Edition::Edition2018 => BorrowckMode::Migrate,
1454 // For now, future editions mean Migrate. (But it
1455 // would make a lot of sense for it to be changed to
1456 // `BorrowckMode::Mir`, depending on how we plan to
1457 // time the forcing of full migration to NLL.)
1458 _ => BorrowckMode::Migrate,
1463 /// Should we emit EndRegion MIR statements? These are consumed by
1464 /// MIR borrowck, but not when NLL is used. They are also consumed
1465 /// by the validation stuff.
1466 pub fn emit_end_regions(self) -> bool {
1467 self.sess.opts.debugging_opts.emit_end_regions ||
1468 self.sess.opts.debugging_opts.mir_emit_validate > 0 ||
1469 self.use_mir_borrowck()
1473 pub fn share_generics(self) -> bool {
1474 match self.sess.opts.debugging_opts.share_generics {
1475 Some(setting) => setting,
1477 self.sess.opts.incremental.is_some() ||
1478 match self.sess.opts.optimize {
1482 OptLevel::SizeMin => true,
1484 OptLevel::Aggressive => false,
1491 pub fn local_crate_exports_generics(self) -> bool {
1492 debug_assert!(self.share_generics());
1494 self.sess.crate_types.borrow().iter().any(|crate_type| {
1496 CrateType::Executable |
1497 CrateType::Staticlib |
1498 CrateType::ProcMacro |
1499 CrateType::Cdylib => false,
1501 CrateType::Dylib => true,
1507 impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
1508 pub fn encode_metadata(self, link_meta: &LinkMeta)
1511 self.cstore.encode_metadata(self, link_meta)
1515 impl<'gcx: 'tcx, 'tcx> GlobalCtxt<'gcx> {
1516 /// Call the closure with a local `TyCtxt` using the given arena.
1517 pub fn enter_local<F, R>(
1519 arena: &'tcx SyncDroplessArena,
1523 F: for<'a> FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R
1525 let interners = CtxtInterners::new(arena);
1528 interners: &interners,
1530 ty::tls::with_related_context(tcx.global_tcx(), |icx| {
1531 let new_icx = ty::tls::ImplicitCtxt {
1533 query: icx.query.clone(),
1534 layout_depth: icx.layout_depth,
1537 ty::tls::enter_context(&new_icx, |new_icx| {
1544 /// A trait implemented for all X<'a> types which can be safely and
1545 /// efficiently converted to X<'tcx> as long as they are part of the
1546 /// provided TyCtxt<'tcx>.
1547 /// This can be done, for example, for Ty<'tcx> or &'tcx Substs<'tcx>
1548 /// by looking them up in their respective interners.
1550 /// However, this is still not the best implementation as it does
1551 /// need to compare the components, even for interned values.
1552 /// It would be more efficient if TypedArena provided a way to
1553 /// determine whether the address is in the allocated range.
1555 /// None is returned if the value or one of the components is not part
1556 /// of the provided context.
1557 /// For Ty, None can be returned if either the type interner doesn't
1558 /// contain the TypeVariants key or if the address of the interned
1559 /// pointer differs. The latter case is possible if a primitive type,
1560 /// e.g. `()` or `u8`, was interned in a different context.
1561 pub trait Lift<'tcx>: fmt::Debug {
1562 type Lifted: fmt::Debug + 'tcx;
1563 fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option<Self::Lifted>;
1566 impl<'a, 'tcx> Lift<'tcx> for Ty<'a> {
1567 type Lifted = Ty<'tcx>;
1568 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Ty<'tcx>> {
1569 if tcx.interners.arena.in_arena(*self as *const _) {
1570 return Some(unsafe { mem::transmute(*self) });
1572 // Also try in the global tcx if we're not that.
1573 if !tcx.is_global() {
1574 self.lift_to_tcx(tcx.global_tcx())
1581 impl<'a, 'tcx> Lift<'tcx> for Region<'a> {
1582 type Lifted = Region<'tcx>;
1583 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Region<'tcx>> {
1584 if tcx.interners.arena.in_arena(*self as *const _) {
1585 return Some(unsafe { mem::transmute(*self) });
1587 // Also try in the global tcx if we're not that.
1588 if !tcx.is_global() {
1589 self.lift_to_tcx(tcx.global_tcx())
1596 impl<'a, 'tcx> Lift<'tcx> for &'a Goal<'a> {
1597 type Lifted = &'tcx Goal<'tcx>;
1598 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<&'tcx Goal<'tcx>> {
1599 if tcx.interners.arena.in_arena(*self as *const _) {
1600 return Some(unsafe { mem::transmute(*self) });
1602 // Also try in the global tcx if we're not that.
1603 if !tcx.is_global() {
1604 self.lift_to_tcx(tcx.global_tcx())
1611 impl<'a, 'tcx> Lift<'tcx> for &'a Slice<Goal<'a>> {
1612 type Lifted = &'tcx Slice<Goal<'tcx>>;
1613 fn lift_to_tcx<'b, 'gcx>(
1615 tcx: TyCtxt<'b, 'gcx, 'tcx>,
1616 ) -> Option<&'tcx Slice<Goal<'tcx>>> {
1617 if tcx.interners.arena.in_arena(*self as *const _) {
1618 return Some(unsafe { mem::transmute(*self) });
1620 // Also try in the global tcx if we're not that.
1621 if !tcx.is_global() {
1622 self.lift_to_tcx(tcx.global_tcx())
1629 impl<'a, 'tcx> Lift<'tcx> for &'a Slice<Clause<'a>> {
1630 type Lifted = &'tcx Slice<Clause<'tcx>>;
1631 fn lift_to_tcx<'b, 'gcx>(
1633 tcx: TyCtxt<'b, 'gcx, 'tcx>,
1634 ) -> Option<&'tcx Slice<Clause<'tcx>>> {
1635 if tcx.interners.arena.in_arena(*self as *const _) {
1636 return Some(unsafe { mem::transmute(*self) });
1638 // Also try in the global tcx if we're not that.
1639 if !tcx.is_global() {
1640 self.lift_to_tcx(tcx.global_tcx())
1647 impl<'a, 'tcx> Lift<'tcx> for &'a Const<'a> {
1648 type Lifted = &'tcx Const<'tcx>;
1649 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<&'tcx Const<'tcx>> {
1650 if tcx.interners.arena.in_arena(*self as *const _) {
1651 return Some(unsafe { mem::transmute(*self) });
1653 // Also try in the global tcx if we're not that.
1654 if !tcx.is_global() {
1655 self.lift_to_tcx(tcx.global_tcx())
1662 impl<'a, 'tcx> Lift<'tcx> for &'a Substs<'a> {
1663 type Lifted = &'tcx Substs<'tcx>;
1664 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<&'tcx Substs<'tcx>> {
1665 if self.len() == 0 {
1666 return Some(Slice::empty());
1668 if tcx.interners.arena.in_arena(&self[..] as *const _) {
1669 return Some(unsafe { mem::transmute(*self) });
1671 // Also try in the global tcx if we're not that.
1672 if !tcx.is_global() {
1673 self.lift_to_tcx(tcx.global_tcx())
1680 impl<'a, 'tcx> Lift<'tcx> for &'a Slice<Ty<'a>> {
1681 type Lifted = &'tcx Slice<Ty<'tcx>>;
1682 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>)
1683 -> Option<&'tcx Slice<Ty<'tcx>>> {
1684 if self.len() == 0 {
1685 return Some(Slice::empty());
1687 if tcx.interners.arena.in_arena(*self as *const _) {
1688 return Some(unsafe { mem::transmute(*self) });
1690 // Also try in the global tcx if we're not that.
1691 if !tcx.is_global() {
1692 self.lift_to_tcx(tcx.global_tcx())
1699 impl<'a, 'tcx> Lift<'tcx> for &'a Slice<ExistentialPredicate<'a>> {
1700 type Lifted = &'tcx Slice<ExistentialPredicate<'tcx>>;
1701 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>)
1702 -> Option<&'tcx Slice<ExistentialPredicate<'tcx>>> {
1703 if self.is_empty() {
1704 return Some(Slice::empty());
1706 if tcx.interners.arena.in_arena(*self as *const _) {
1707 return Some(unsafe { mem::transmute(*self) });
1709 // Also try in the global tcx if we're not that.
1710 if !tcx.is_global() {
1711 self.lift_to_tcx(tcx.global_tcx())
1718 impl<'a, 'tcx> Lift<'tcx> for &'a Slice<Predicate<'a>> {
1719 type Lifted = &'tcx Slice<Predicate<'tcx>>;
1720 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>)
1721 -> Option<&'tcx Slice<Predicate<'tcx>>> {
1722 if self.is_empty() {
1723 return Some(Slice::empty());
1725 if tcx.interners.arena.in_arena(*self as *const _) {
1726 return Some(unsafe { mem::transmute(*self) });
1728 // Also try in the global tcx if we're not that.
1729 if !tcx.is_global() {
1730 self.lift_to_tcx(tcx.global_tcx())
1737 impl<'a, 'tcx> Lift<'tcx> for &'a Slice<CanonicalVarInfo> {
1738 type Lifted = &'tcx Slice<CanonicalVarInfo>;
1739 fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
1740 if self.len() == 0 {
1741 return Some(Slice::empty());
1743 if tcx.interners.arena.in_arena(*self as *const _) {
1744 return Some(unsafe { mem::transmute(*self) });
1746 // Also try in the global tcx if we're not that.
1747 if !tcx.is_global() {
1748 self.lift_to_tcx(tcx.global_tcx())
1756 use super::{GlobalCtxt, TyCtxt};
1762 use errors::{Diagnostic, TRACK_DIAGNOSTICS};
1763 use rustc_data_structures::OnDrop;
1764 use rustc_data_structures::sync::{self, Lrc, Lock};
1765 use dep_graph::OpenTask;
1767 #[cfg(not(parallel_queries))]
1768 use std::cell::Cell;
1770 #[cfg(parallel_queries)]
1773 /// This is the implicit state of rustc. It contains the current
1774 /// TyCtxt and query. It is updated when creating a local interner or
1775 /// executing a new query. Whenever there's a TyCtxt value available
1776 /// you should also have access to an ImplicitCtxt through the functions
1779 pub struct ImplicitCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
1780 /// The current TyCtxt. Initially created by `enter_global` and updated
1781 /// by `enter_local` with a new local interner
1782 pub tcx: TyCtxt<'a, 'gcx, 'tcx>,
1784 /// The current query job, if any. This is updated by start_job in
1785 /// ty::query::plumbing when executing a query
1786 pub query: Option<Lrc<query::QueryJob<'gcx>>>,
1788 /// Used to prevent layout from recursing too deeply.
1789 pub layout_depth: usize,
1791 /// The current dep graph task. This is used to add dependencies to queries
1792 /// when executing them
1793 pub task: &'a OpenTask,
1796 /// Sets Rayon's thread local variable which is preserved for Rayon jobs
1797 /// to `value` during the call to `f`. It is restored to its previous value after.
1798 /// This is used to set the pointer to the new ImplicitCtxt.
1799 #[cfg(parallel_queries)]
1800 fn set_tlv<F: FnOnce() -> R, R>(value: usize, f: F) -> R {
1801 rayon_core::tlv::with(value, f)
1804 /// Gets Rayon's thread local variable which is preserved for Rayon jobs.
1805 /// This is used to get the pointer to the current ImplicitCtxt.
1806 #[cfg(parallel_queries)]
1807 fn get_tlv() -> usize {
1808 rayon_core::tlv::get()
1811 /// A thread local variable which stores a pointer to the current ImplicitCtxt
1812 #[cfg(not(parallel_queries))]
1813 thread_local!(static TLV: Cell<usize> = Cell::new(0));
1815 /// Sets TLV to `value` during the call to `f`.
1816 /// It is restored to its previous value after.
1817 /// This is used to set the pointer to the new ImplicitCtxt.
1818 #[cfg(not(parallel_queries))]
1819 fn set_tlv<F: FnOnce() -> R, R>(value: usize, f: F) -> R {
1820 let old = get_tlv();
1821 let _reset = OnDrop(move || TLV.with(|tlv| tlv.set(old)));
1822 TLV.with(|tlv| tlv.set(value));
1826 /// This is used to get the pointer to the current ImplicitCtxt.
1827 #[cfg(not(parallel_queries))]
1828 fn get_tlv() -> usize {
1829 TLV.with(|tlv| tlv.get())
1832 /// This is a callback from libsyntax as it cannot access the implicit state
1833 /// in librustc otherwise
1834 fn span_debug(span: syntax_pos::Span, f: &mut fmt::Formatter) -> fmt::Result {
1836 write!(f, "{}", tcx.sess.codemap().span_to_string(span))
1840 /// This is a callback from libsyntax as it cannot access the implicit state
1841 /// in librustc otherwise. It is used to when diagnostic messages are
1842 /// emitted and stores them in the current query, if there is one.
1843 fn track_diagnostic(diagnostic: &Diagnostic) {
1844 with_context_opt(|icx| {
1845 if let Some(icx) = icx {
1846 if let Some(ref query) = icx.query {
1847 query.diagnostics.lock().push(diagnostic.clone());
1853 /// Sets up the callbacks from libsyntax on the current thread
1854 pub fn with_thread_locals<F, R>(f: F) -> R
1855 where F: FnOnce() -> R
1857 syntax_pos::SPAN_DEBUG.with(|span_dbg| {
1858 let original_span_debug = span_dbg.get();
1859 span_dbg.set(span_debug);
1861 let _on_drop = OnDrop(move || {
1862 span_dbg.set(original_span_debug);
1865 TRACK_DIAGNOSTICS.with(|current| {
1866 let original = current.get();
1867 current.set(track_diagnostic);
1869 let _on_drop = OnDrop(move || {
1870 current.set(original);
1878 /// Sets `context` as the new current ImplicitCtxt for the duration of the function `f`
1879 pub fn enter_context<'a, 'gcx: 'tcx, 'tcx, F, R>(context: &ImplicitCtxt<'a, 'gcx, 'tcx>,
1881 where F: FnOnce(&ImplicitCtxt<'a, 'gcx, 'tcx>) -> R
1883 set_tlv(context as *const _ as usize, || {
1888 /// Enters GlobalCtxt by setting up libsyntax callbacks and
1889 /// creating a initial TyCtxt and ImplicitCtxt.
1890 /// This happens once per rustc session and TyCtxts only exists
1891 /// inside the `f` function.
1892 pub fn enter_global<'gcx, F, R>(gcx: &GlobalCtxt<'gcx>, f: F) -> R
1893 where F: for<'a> FnOnce(TyCtxt<'a, 'gcx, 'gcx>) -> R
1895 with_thread_locals(|| {
1896 // Update GCX_PTR to indicate there's a GlobalCtxt available
1897 GCX_PTR.with(|lock| {
1898 *lock.lock() = gcx as *const _ as usize;
1900 // Set GCX_PTR back to 0 when we exit
1901 let _on_drop = OnDrop(move || {
1902 GCX_PTR.with(|lock| *lock.lock() = 0);
1907 interners: &gcx.global_interners,
1909 let icx = ImplicitCtxt {
1913 task: &OpenTask::Ignore,
1915 enter_context(&icx, |_| {
1921 /// Stores a pointer to the GlobalCtxt if one is available.
1922 /// This is used to access the GlobalCtxt in the deadlock handler
1924 scoped_thread_local!(pub static GCX_PTR: Lock<usize>);
1926 /// Creates a TyCtxt and ImplicitCtxt based on the GCX_PTR thread local.
1927 /// This is used in the deadlock handler.
1928 pub unsafe fn with_global<F, R>(f: F) -> R
1929 where F: for<'a, 'gcx, 'tcx> FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R
1931 let gcx = GCX_PTR.with(|lock| *lock.lock());
1933 let gcx = &*(gcx as *const GlobalCtxt<'_>);
1936 interners: &gcx.global_interners,
1938 let icx = ImplicitCtxt {
1942 task: &OpenTask::Ignore,
1944 enter_context(&icx, |_| f(tcx))
1947 /// Allows access to the current ImplicitCtxt in a closure if one is available
1948 pub fn with_context_opt<F, R>(f: F) -> R
1949 where F: for<'a, 'gcx, 'tcx> FnOnce(Option<&ImplicitCtxt<'a, 'gcx, 'tcx>>) -> R
1951 let context = get_tlv();
1955 // We could get a ImplicitCtxt pointer from another thread.
1956 // Ensure that ImplicitCtxt is Sync
1957 sync::assert_sync::<ImplicitCtxt>();
1959 unsafe { f(Some(&*(context as *const ImplicitCtxt))) }
1963 /// Allows access to the current ImplicitCtxt.
1964 /// Panics if there is no ImplicitCtxt available
1965 pub fn with_context<F, R>(f: F) -> R
1966 where F: for<'a, 'gcx, 'tcx> FnOnce(&ImplicitCtxt<'a, 'gcx, 'tcx>) -> R
1968 with_context_opt(|opt_context| f(opt_context.expect("no ImplicitCtxt stored in tls")))
1971 /// Allows access to the current ImplicitCtxt whose tcx field has the same global
1972 /// interner as the tcx argument passed in. This means the closure is given an ImplicitCtxt
1973 /// with the same 'gcx lifetime as the TyCtxt passed in.
1974 /// This will panic if you pass it a TyCtxt which has a different global interner from
1975 /// the current ImplicitCtxt's tcx field.
1976 pub fn with_related_context<'a, 'gcx, 'tcx1, F, R>(tcx: TyCtxt<'a, 'gcx, 'tcx1>, f: F) -> R
1977 where F: for<'b, 'tcx2> FnOnce(&ImplicitCtxt<'b, 'gcx, 'tcx2>) -> R
1979 with_context(|context| {
1981 let gcx = tcx.gcx as *const _ as usize;
1982 assert!(context.tcx.gcx as *const _ as usize == gcx);
1983 let context: &ImplicitCtxt = mem::transmute(context);
1989 /// Allows access to the current ImplicitCtxt whose tcx field has the same global
1990 /// interner and local interner as the tcx argument passed in. This means the closure
1991 /// is given an ImplicitCtxt with the same 'tcx and 'gcx lifetimes as the TyCtxt passed in.
1992 /// This will panic if you pass it a TyCtxt which has a different global interner or
1993 /// a different local interner from the current ImplicitCtxt's tcx field.
1994 pub fn with_fully_related_context<'a, 'gcx, 'tcx, F, R>(tcx: TyCtxt<'a, 'gcx, 'tcx>, f: F) -> R
1995 where F: for<'b> FnOnce(&ImplicitCtxt<'b, 'gcx, 'tcx>) -> R
1997 with_context(|context| {
1999 let gcx = tcx.gcx as *const _ as usize;
2000 let interners = tcx.interners as *const _ as usize;
2001 assert!(context.tcx.gcx as *const _ as usize == gcx);
2002 assert!(context.tcx.interners as *const _ as usize == interners);
2003 let context: &ImplicitCtxt = mem::transmute(context);
2009 /// Allows access to the TyCtxt in the current ImplicitCtxt.
2010 /// Panics if there is no ImplicitCtxt available
2011 pub fn with<F, R>(f: F) -> R
2012 where F: for<'a, 'gcx, 'tcx> FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R
2014 with_context(|context| f(context.tcx))
2017 /// Allows access to the TyCtxt in the current ImplicitCtxt.
2018 /// The closure is passed None if there is no ImplicitCtxt available
2019 pub fn with_opt<F, R>(f: F) -> R
2020 where F: for<'a, 'gcx, 'tcx> FnOnce(Option<TyCtxt<'a, 'gcx, 'tcx>>) -> R
2022 with_context_opt(|opt_context| f(opt_context.map(|context| context.tcx)))
2026 macro_rules! sty_debug_print {
2027 ($ctxt: expr, $($variant: ident),*) => {{
2028 // curious inner module to allow variant names to be used as
2030 #[allow(non_snake_case)]
2032 use ty::{self, TyCtxt};
2033 use ty::context::Interned;
2035 #[derive(Copy, Clone)]
2038 region_infer: usize,
2043 pub fn go(tcx: TyCtxt) {
2044 let mut total = DebugStat {
2046 region_infer: 0, ty_infer: 0, both_infer: 0,
2048 $(let mut $variant = total;)*
2051 for &Interned(t) in tcx.interners.type_.borrow().iter() {
2052 let variant = match t.sty {
2053 ty::TyBool | ty::TyChar | ty::TyInt(..) | ty::TyUint(..) |
2054 ty::TyFloat(..) | ty::TyStr | ty::TyNever => continue,
2055 ty::TyError => /* unimportant */ continue,
2056 $(ty::$variant(..) => &mut $variant,)*
2058 let region = t.flags.intersects(ty::TypeFlags::HAS_RE_INFER);
2059 let ty = t.flags.intersects(ty::TypeFlags::HAS_TY_INFER);
2063 if region { total.region_infer += 1; variant.region_infer += 1 }
2064 if ty { total.ty_infer += 1; variant.ty_infer += 1 }
2065 if region && ty { total.both_infer += 1; variant.both_infer += 1 }
2067 println!("Ty interner total ty region both");
2068 $(println!(" {:18}: {uses:6} {usespc:4.1}%, \
2069 {ty:4.1}% {region:5.1}% {both:4.1}%",
2070 stringify!($variant),
2071 uses = $variant.total,
2072 usespc = $variant.total as f64 * 100.0 / total.total as f64,
2073 ty = $variant.ty_infer as f64 * 100.0 / total.total as f64,
2074 region = $variant.region_infer as f64 * 100.0 / total.total as f64,
2075 both = $variant.both_infer as f64 * 100.0 / total.total as f64);
2077 println!(" total {uses:6} \
2078 {ty:4.1}% {region:5.1}% {both:4.1}%",
2080 ty = total.ty_infer as f64 * 100.0 / total.total as f64,
2081 region = total.region_infer as f64 * 100.0 / total.total as f64,
2082 both = total.both_infer as f64 * 100.0 / total.total as f64)
2090 impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
2091 pub fn print_debug_stats(self) {
2094 TyAdt, TyArray, TySlice, TyRawPtr, TyRef, TyFnDef, TyFnPtr,
2095 TyGenerator, TyGeneratorWitness, TyDynamic, TyClosure, TyTuple,
2096 TyParam, TyInfer, TyProjection, TyAnon, TyForeign);
2098 println!("Substs interner: #{}", self.interners.substs.borrow().len());
2099 println!("Region interner: #{}", self.interners.region.borrow().len());
2100 println!("Stability interner: #{}", self.stability_interner.borrow().len());
2101 println!("Allocation interner: #{}", self.allocation_interner.borrow().len());
2102 println!("Layout interner: #{}", self.layout_interner.borrow().len());
2107 /// An entry in an interner.
2108 struct Interned<'tcx, T: 'tcx+?Sized>(&'tcx T);
2110 // NB: An Interned<Ty> compares and hashes as a sty.
2111 impl<'tcx> PartialEq for Interned<'tcx, TyS<'tcx>> {
2112 fn eq(&self, other: &Interned<'tcx, TyS<'tcx>>) -> bool {
2113 self.0.sty == other.0.sty
2117 impl<'tcx> Eq for Interned<'tcx, TyS<'tcx>> {}
2119 impl<'tcx> Hash for Interned<'tcx, TyS<'tcx>> {
2120 fn hash<H: Hasher>(&self, s: &mut H) {
2125 impl<'tcx: 'lcx, 'lcx> Borrow<TypeVariants<'lcx>> for Interned<'tcx, TyS<'tcx>> {
2126 fn borrow<'a>(&'a self) -> &'a TypeVariants<'lcx> {
2131 // NB: An Interned<Slice<T>> compares and hashes as its elements.
2132 impl<'tcx, T: PartialEq> PartialEq for Interned<'tcx, Slice<T>> {
2133 fn eq(&self, other: &Interned<'tcx, Slice<T>>) -> bool {
2134 self.0[..] == other.0[..]
2138 impl<'tcx, T: Eq> Eq for Interned<'tcx, Slice<T>> {}
2140 impl<'tcx, T: Hash> Hash for Interned<'tcx, Slice<T>> {
2141 fn hash<H: Hasher>(&self, s: &mut H) {
2146 impl<'tcx: 'lcx, 'lcx> Borrow<[Ty<'lcx>]> for Interned<'tcx, Slice<Ty<'tcx>>> {
2147 fn borrow<'a>(&'a self) -> &'a [Ty<'lcx>] {
2152 impl<'tcx: 'lcx, 'lcx> Borrow<[CanonicalVarInfo]> for Interned<'tcx, Slice<CanonicalVarInfo>> {
2153 fn borrow<'a>(&'a self) -> &'a [CanonicalVarInfo] {
2158 impl<'tcx: 'lcx, 'lcx> Borrow<[Kind<'lcx>]> for Interned<'tcx, Substs<'tcx>> {
2159 fn borrow<'a>(&'a self) -> &'a [Kind<'lcx>] {
2164 impl<'tcx> Borrow<RegionKind> for Interned<'tcx, RegionKind> {
2165 fn borrow<'a>(&'a self) -> &'a RegionKind {
2170 impl<'tcx: 'lcx, 'lcx> Borrow<[ExistentialPredicate<'lcx>]>
2171 for Interned<'tcx, Slice<ExistentialPredicate<'tcx>>> {
2172 fn borrow<'a>(&'a self) -> &'a [ExistentialPredicate<'lcx>] {
2177 impl<'tcx: 'lcx, 'lcx> Borrow<[Predicate<'lcx>]>
2178 for Interned<'tcx, Slice<Predicate<'tcx>>> {
2179 fn borrow<'a>(&'a self) -> &'a [Predicate<'lcx>] {
2184 impl<'tcx: 'lcx, 'lcx> Borrow<Const<'lcx>> for Interned<'tcx, Const<'tcx>> {
2185 fn borrow<'a>(&'a self) -> &'a Const<'lcx> {
2190 impl<'tcx: 'lcx, 'lcx> Borrow<[Clause<'lcx>]>
2191 for Interned<'tcx, Slice<Clause<'tcx>>> {
2192 fn borrow<'a>(&'a self) -> &'a [Clause<'lcx>] {
2197 impl<'tcx: 'lcx, 'lcx> Borrow<[Goal<'lcx>]>
2198 for Interned<'tcx, Slice<Goal<'tcx>>> {
2199 fn borrow<'a>(&'a self) -> &'a [Goal<'lcx>] {
2204 macro_rules! intern_method {
2205 ($lt_tcx:tt, $name:ident: $method:ident($alloc:ty,
2208 $keep_in_local_tcx:expr) -> $ty:ty) => {
2209 impl<'a, 'gcx, $lt_tcx> TyCtxt<'a, 'gcx, $lt_tcx> {
2210 pub fn $method(self, v: $alloc) -> &$lt_tcx $ty {
2211 let key = ($alloc_to_key)(&v);
2213 // HACK(eddyb) Depend on flags being accurate to
2214 // determine that all contents are in the global tcx.
2215 // See comments on Lift for why we can't use that.
2216 if ($keep_in_local_tcx)(&v) {
2217 let mut interner = self.interners.$name.borrow_mut();
2218 if let Some(&Interned(v)) = interner.get(key) {
2222 // Make sure we don't end up with inference
2223 // types/regions in the global tcx.
2224 if self.is_global() {
2225 bug!("Attempted to intern `{:?}` which contains \
2226 inference types/regions in the global type context",
2230 let i = $alloc_method(&self.interners.arena, v);
2231 interner.insert(Interned(i));
2234 let mut interner = self.global_interners.$name.borrow_mut();
2235 if let Some(&Interned(v)) = interner.get(key) {
2239 // This transmutes $alloc<'tcx> to $alloc<'gcx>
2243 let i: &$lt_tcx $ty = $alloc_method(&self.global_interners.arena, v);
2245 let i = unsafe { mem::transmute(i) };
2246 interner.insert(Interned(i));
2254 macro_rules! direct_interners {
2255 ($lt_tcx:tt, $($name:ident: $method:ident($keep_in_local_tcx:expr) -> $ty:ty),+) => {
2256 $(impl<$lt_tcx> PartialEq for Interned<$lt_tcx, $ty> {
2257 fn eq(&self, other: &Self) -> bool {
2262 impl<$lt_tcx> Eq for Interned<$lt_tcx, $ty> {}
2264 impl<$lt_tcx> Hash for Interned<$lt_tcx, $ty> {
2265 fn hash<H: Hasher>(&self, s: &mut H) {
2273 |a: &$lt_tcx SyncDroplessArena, v| -> &$lt_tcx $ty { a.alloc(v) },
2275 $keep_in_local_tcx) -> $ty);)+
2279 pub fn keep_local<'tcx, T: ty::TypeFoldable<'tcx>>(x: &T) -> bool {
2280 x.has_type_flags(ty::TypeFlags::KEEP_IN_LOCAL_TCX)
2283 direct_interners!('tcx,
2284 region: mk_region(|r: &RegionKind| r.keep_in_local_tcx()) -> RegionKind,
2285 const_: mk_const(|c: &Const| keep_local(&c.ty) || keep_local(&c.val)) -> Const<'tcx>
2288 macro_rules! slice_interners {
2289 ($($field:ident: $method:ident($ty:ident)),+) => (
2290 $(intern_method!( 'tcx, $field: $method(
2292 |a, v| Slice::from_arena(a, v),
2294 |xs: &[$ty]| xs.iter().any(keep_local)) -> Slice<$ty<'tcx>>);)+
2299 existential_predicates: _intern_existential_predicates(ExistentialPredicate),
2300 predicates: _intern_predicates(Predicate),
2301 type_list: _intern_type_list(Ty),
2302 substs: _intern_substs(Kind),
2303 clauses: _intern_clauses(Clause),
2304 goals: _intern_goals(Goal)
2307 // This isn't a perfect fit: CanonicalVarInfo slices are always
2308 // allocated in the global arena, so this `intern_method!` macro is
2309 // overly general. But we just return false for the code that checks
2310 // whether they belong in the thread-local arena, so no harm done, and
2311 // seems better than open-coding the rest.
2314 canonical_var_infos: _intern_canonical_var_infos(
2315 &[CanonicalVarInfo],
2316 |a, v| Slice::from_arena(a, v),
2318 |_xs: &[CanonicalVarInfo]| -> bool { false }
2319 ) -> Slice<CanonicalVarInfo>
2322 impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
2323 /// Given a `fn` type, returns an equivalent `unsafe fn` type;
2324 /// that is, a `fn` type that is equivalent in every way for being
2326 pub fn safe_to_unsafe_fn_ty(self, sig: PolyFnSig<'tcx>) -> Ty<'tcx> {
2327 assert_eq!(sig.unsafety(), hir::Unsafety::Normal);
2328 self.mk_fn_ptr(sig.map_bound(|sig| ty::FnSig {
2329 unsafety: hir::Unsafety::Unsafe,
2334 /// Given a closure signature `sig`, returns an equivalent `fn`
2335 /// type with the same signature. Detuples and so forth -- so
2336 /// e.g. if we have a sig with `Fn<(u32, i32)>` then you would get
2337 /// a `fn(u32, i32)`.
2338 pub fn coerce_closure_fn_ty(self, sig: PolyFnSig<'tcx>) -> Ty<'tcx> {
2339 let converted_sig = sig.map_bound(|s| {
2340 let params_iter = match s.inputs()[0].sty {
2341 ty::TyTuple(params) => {
2342 params.into_iter().cloned()
2350 hir::Unsafety::Normal,
2355 self.mk_fn_ptr(converted_sig)
2358 pub fn mk_ty(&self, st: TypeVariants<'tcx>) -> Ty<'tcx> {
2359 CtxtInterners::intern_ty(&self.interners, &self.global_interners, st)
2362 pub fn mk_mach_int(self, tm: ast::IntTy) -> Ty<'tcx> {
2364 ast::IntTy::Isize => self.types.isize,
2365 ast::IntTy::I8 => self.types.i8,
2366 ast::IntTy::I16 => self.types.i16,
2367 ast::IntTy::I32 => self.types.i32,
2368 ast::IntTy::I64 => self.types.i64,
2369 ast::IntTy::I128 => self.types.i128,
2373 pub fn mk_mach_uint(self, tm: ast::UintTy) -> Ty<'tcx> {
2375 ast::UintTy::Usize => self.types.usize,
2376 ast::UintTy::U8 => self.types.u8,
2377 ast::UintTy::U16 => self.types.u16,
2378 ast::UintTy::U32 => self.types.u32,
2379 ast::UintTy::U64 => self.types.u64,
2380 ast::UintTy::U128 => self.types.u128,
2384 pub fn mk_mach_float(self, tm: ast::FloatTy) -> Ty<'tcx> {
2386 ast::FloatTy::F32 => self.types.f32,
2387 ast::FloatTy::F64 => self.types.f64,
2391 pub fn mk_str(self) -> Ty<'tcx> {
2395 pub fn mk_static_str(self) -> Ty<'tcx> {
2396 self.mk_imm_ref(self.types.re_static, self.mk_str())
2399 pub fn mk_adt(self, def: &'tcx AdtDef, substs: &'tcx Substs<'tcx>) -> Ty<'tcx> {
2400 // take a copy of substs so that we own the vectors inside
2401 self.mk_ty(TyAdt(def, substs))
2404 pub fn mk_foreign(self, def_id: DefId) -> Ty<'tcx> {
2405 self.mk_ty(TyForeign(def_id))
2408 pub fn mk_box(self, ty: Ty<'tcx>) -> Ty<'tcx> {
2409 let def_id = self.require_lang_item(lang_items::OwnedBoxLangItem);
2410 let adt_def = self.adt_def(def_id);
2411 let substs = Substs::for_item(self, def_id, |param, substs| {
2413 GenericParamDefKind::Lifetime => bug!(),
2414 GenericParamDefKind::Type { has_default, .. } => {
2415 if param.index == 0 {
2418 assert!(has_default);
2419 self.type_of(param.def_id).subst(self, substs).into()
2424 self.mk_ty(TyAdt(adt_def, substs))
2427 pub fn mk_ptr(self, tm: TypeAndMut<'tcx>) -> Ty<'tcx> {
2428 self.mk_ty(TyRawPtr(tm))
2431 pub fn mk_ref(self, r: Region<'tcx>, tm: TypeAndMut<'tcx>) -> Ty<'tcx> {
2432 self.mk_ty(TyRef(r, tm.ty, tm.mutbl))
2435 pub fn mk_mut_ref(self, r: Region<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> {
2436 self.mk_ref(r, TypeAndMut {ty: ty, mutbl: hir::MutMutable})
2439 pub fn mk_imm_ref(self, r: Region<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> {
2440 self.mk_ref(r, TypeAndMut {ty: ty, mutbl: hir::MutImmutable})
2443 pub fn mk_mut_ptr(self, ty: Ty<'tcx>) -> Ty<'tcx> {
2444 self.mk_ptr(TypeAndMut {ty: ty, mutbl: hir::MutMutable})
2447 pub fn mk_imm_ptr(self, ty: Ty<'tcx>) -> Ty<'tcx> {
2448 self.mk_ptr(TypeAndMut {ty: ty, mutbl: hir::MutImmutable})
2451 pub fn mk_nil_ptr(self) -> Ty<'tcx> {
2452 self.mk_imm_ptr(self.mk_nil())
2455 pub fn mk_array(self, ty: Ty<'tcx>, n: u64) -> Ty<'tcx> {
2456 self.mk_ty(TyArray(ty, ty::Const::from_usize(self, n)))
2459 pub fn mk_slice(self, ty: Ty<'tcx>) -> Ty<'tcx> {
2460 self.mk_ty(TySlice(ty))
2463 pub fn intern_tup(self, ts: &[Ty<'tcx>]) -> Ty<'tcx> {
2464 self.mk_ty(TyTuple(self.intern_type_list(ts)))
2467 pub fn mk_tup<I: InternAs<[Ty<'tcx>], Ty<'tcx>>>(self, iter: I) -> I::Output {
2468 iter.intern_with(|ts| self.mk_ty(TyTuple(self.intern_type_list(ts))))
2471 pub fn mk_nil(self) -> Ty<'tcx> {
2472 self.intern_tup(&[])
2475 pub fn mk_diverging_default(self) -> Ty<'tcx> {
2476 if self.features().never_type {
2479 self.intern_tup(&[])
2483 pub fn mk_bool(self) -> Ty<'tcx> {
2487 pub fn mk_fn_def(self, def_id: DefId,
2488 substs: &'tcx Substs<'tcx>) -> Ty<'tcx> {
2489 self.mk_ty(TyFnDef(def_id, substs))
2492 pub fn mk_fn_ptr(self, fty: PolyFnSig<'tcx>) -> Ty<'tcx> {
2493 self.mk_ty(TyFnPtr(fty))
2498 obj: ty::Binder<&'tcx Slice<ExistentialPredicate<'tcx>>>,
2499 reg: ty::Region<'tcx>
2501 self.mk_ty(TyDynamic(obj, reg))
2504 pub fn mk_projection(self,
2506 substs: &'tcx Substs<'tcx>)
2508 self.mk_ty(TyProjection(ProjectionTy {
2514 pub fn mk_closure(self, closure_id: DefId, closure_substs: ClosureSubsts<'tcx>)
2516 self.mk_ty(TyClosure(closure_id, closure_substs))
2519 pub fn mk_generator(self,
2521 generator_substs: GeneratorSubsts<'tcx>,
2522 movability: hir::GeneratorMovability)
2524 self.mk_ty(TyGenerator(id, generator_substs, movability))
2527 pub fn mk_generator_witness(self, types: ty::Binder<&'tcx Slice<Ty<'tcx>>>) -> Ty<'tcx> {
2528 self.mk_ty(TyGeneratorWitness(types))
2531 pub fn mk_var(self, v: TyVid) -> Ty<'tcx> {
2532 self.mk_infer(TyVar(v))
2535 pub fn mk_int_var(self, v: IntVid) -> Ty<'tcx> {
2536 self.mk_infer(IntVar(v))
2539 pub fn mk_float_var(self, v: FloatVid) -> Ty<'tcx> {
2540 self.mk_infer(FloatVar(v))
2543 pub fn mk_infer(self, it: InferTy) -> Ty<'tcx> {
2544 self.mk_ty(TyInfer(it))
2547 pub fn mk_ty_param(self,
2549 name: InternedString) -> Ty<'tcx> {
2550 self.mk_ty(TyParam(ParamTy { idx: index, name: name }))
2553 pub fn mk_self_type(self) -> Ty<'tcx> {
2554 self.mk_ty_param(0, keywords::SelfType.name().as_interned_str())
2557 pub fn mk_param_from_def(self, param: &ty::GenericParamDef) -> Kind<'tcx> {
2559 GenericParamDefKind::Lifetime => {
2560 self.mk_region(ty::ReEarlyBound(param.to_early_bound_region_data())).into()
2562 GenericParamDefKind::Type {..} => self.mk_ty_param(param.index, param.name).into(),
2566 pub fn mk_anon(self, def_id: DefId, substs: &'tcx Substs<'tcx>) -> Ty<'tcx> {
2567 self.mk_ty(TyAnon(def_id, substs))
2570 pub fn intern_existential_predicates(self, eps: &[ExistentialPredicate<'tcx>])
2571 -> &'tcx Slice<ExistentialPredicate<'tcx>> {
2572 assert!(!eps.is_empty());
2573 assert!(eps.windows(2).all(|w| w[0].stable_cmp(self, &w[1]) != Ordering::Greater));
2574 self._intern_existential_predicates(eps)
2577 pub fn intern_predicates(self, preds: &[Predicate<'tcx>])
2578 -> &'tcx Slice<Predicate<'tcx>> {
2579 // FIXME consider asking the input slice to be sorted to avoid
2580 // re-interning permutations, in which case that would be asserted
2582 if preds.len() == 0 {
2583 // The macro-generated method below asserts we don't intern an empty slice.
2586 self._intern_predicates(preds)
2590 pub fn intern_type_list(self, ts: &[Ty<'tcx>]) -> &'tcx Slice<Ty<'tcx>> {
2594 self._intern_type_list(ts)
2598 pub fn intern_substs(self, ts: &[Kind<'tcx>]) -> &'tcx Slice<Kind<'tcx>> {
2602 self._intern_substs(ts)
2606 pub fn intern_canonical_var_infos(self, ts: &[CanonicalVarInfo]) -> CanonicalVarInfos<'gcx> {
2610 self.global_tcx()._intern_canonical_var_infos(ts)
2614 pub fn intern_clauses(self, ts: &[Clause<'tcx>]) -> Clauses<'tcx> {
2618 self._intern_clauses(ts)
2622 pub fn intern_goals(self, ts: &[Goal<'tcx>]) -> Goals<'tcx> {
2626 self._intern_goals(ts)
2630 pub fn mk_fn_sig<I>(self,
2634 unsafety: hir::Unsafety,
2636 -> <I::Item as InternIteratorElement<Ty<'tcx>, ty::FnSig<'tcx>>>::Output
2638 I::Item: InternIteratorElement<Ty<'tcx>, ty::FnSig<'tcx>>
2640 inputs.chain(iter::once(output)).intern_with(|xs| ty::FnSig {
2641 inputs_and_output: self.intern_type_list(xs),
2642 variadic, unsafety, abi
2646 pub fn mk_existential_predicates<I: InternAs<[ExistentialPredicate<'tcx>],
2647 &'tcx Slice<ExistentialPredicate<'tcx>>>>(self, iter: I)
2649 iter.intern_with(|xs| self.intern_existential_predicates(xs))
2652 pub fn mk_predicates<I: InternAs<[Predicate<'tcx>],
2653 &'tcx Slice<Predicate<'tcx>>>>(self, iter: I)
2655 iter.intern_with(|xs| self.intern_predicates(xs))
2658 pub fn mk_type_list<I: InternAs<[Ty<'tcx>],
2659 &'tcx Slice<Ty<'tcx>>>>(self, iter: I) -> I::Output {
2660 iter.intern_with(|xs| self.intern_type_list(xs))
2663 pub fn mk_substs<I: InternAs<[Kind<'tcx>],
2664 &'tcx Slice<Kind<'tcx>>>>(self, iter: I) -> I::Output {
2665 iter.intern_with(|xs| self.intern_substs(xs))
2668 pub fn mk_substs_trait(self,
2670 rest: &[Kind<'tcx>])
2671 -> &'tcx Substs<'tcx>
2673 self.mk_substs(iter::once(self_ty.into()).chain(rest.iter().cloned()))
2676 pub fn mk_clauses<I: InternAs<[Clause<'tcx>], Clauses<'tcx>>>(self, iter: I) -> I::Output {
2677 iter.intern_with(|xs| self.intern_clauses(xs))
2680 pub fn mk_goals<I: InternAs<[Goal<'tcx>], Goals<'tcx>>>(self, iter: I) -> I::Output {
2681 iter.intern_with(|xs| self.intern_goals(xs))
2684 pub fn mk_goal(self, goal: Goal<'tcx>) -> &'tcx Goal {
2685 &self.intern_goals(&[goal])[0]
2688 pub fn lint_hir<S: Into<MultiSpan>>(self,
2689 lint: &'static Lint,
2693 self.struct_span_lint_hir(lint, hir_id, span.into(), msg).emit()
2696 pub fn lint_node<S: Into<MultiSpan>>(self,
2697 lint: &'static Lint,
2701 self.struct_span_lint_node(lint, id, span.into(), msg).emit()
2704 pub fn lint_hir_note<S: Into<MultiSpan>>(self,
2705 lint: &'static Lint,
2710 let mut err = self.struct_span_lint_hir(lint, hir_id, span.into(), msg);
2715 pub fn lint_node_note<S: Into<MultiSpan>>(self,
2716 lint: &'static Lint,
2721 let mut err = self.struct_span_lint_node(lint, id, span.into(), msg);
2726 pub fn lint_level_at_node(self, lint: &'static Lint, mut id: NodeId)
2727 -> (lint::Level, lint::LintSource)
2729 // Right now we insert a `with_ignore` node in the dep graph here to
2730 // ignore the fact that `lint_levels` below depends on the entire crate.
2731 // For now this'll prevent false positives of recompiling too much when
2732 // anything changes.
2734 // Once red/green incremental compilation lands we should be able to
2735 // remove this because while the crate changes often the lint level map
2736 // will change rarely.
2737 self.dep_graph.with_ignore(|| {
2738 let sets = self.lint_levels(LOCAL_CRATE);
2740 let hir_id = self.hir.definitions().node_to_hir_id(id);
2741 if let Some(pair) = sets.level_and_source(lint, hir_id, self.sess) {
2744 let next = self.hir.get_parent_node(id);
2746 bug!("lint traversal reached the root of the crate");
2753 pub fn struct_span_lint_hir<S: Into<MultiSpan>>(self,
2754 lint: &'static Lint,
2758 -> DiagnosticBuilder<'tcx>
2760 let node_id = self.hir.hir_to_node_id(hir_id);
2761 let (level, src) = self.lint_level_at_node(lint, node_id);
2762 lint::struct_lint_level(self.sess, lint, level, src, Some(span.into()), msg)
2765 pub fn struct_span_lint_node<S: Into<MultiSpan>>(self,
2766 lint: &'static Lint,
2770 -> DiagnosticBuilder<'tcx>
2772 let (level, src) = self.lint_level_at_node(lint, id);
2773 lint::struct_lint_level(self.sess, lint, level, src, Some(span.into()), msg)
2776 pub fn struct_lint_node(self, lint: &'static Lint, id: NodeId, msg: &str)
2777 -> DiagnosticBuilder<'tcx>
2779 let (level, src) = self.lint_level_at_node(lint, id);
2780 lint::struct_lint_level(self.sess, lint, level, src, None, msg)
2783 pub fn in_scope_traits(self, id: HirId) -> Option<Lrc<StableVec<TraitCandidate>>> {
2784 self.in_scope_traits_map(id.owner)
2785 .and_then(|map| map.get(&id.local_id).cloned())
2788 pub fn named_region(self, id: HirId) -> Option<resolve_lifetime::Region> {
2789 self.named_region_map(id.owner)
2790 .and_then(|map| map.get(&id.local_id).cloned())
2793 pub fn is_late_bound(self, id: HirId) -> bool {
2794 self.is_late_bound_map(id.owner)
2795 .map(|set| set.contains(&id.local_id))
2799 pub fn object_lifetime_defaults(self, id: HirId)
2800 -> Option<Lrc<Vec<ObjectLifetimeDefault>>>
2802 self.object_lifetime_defaults_map(id.owner)
2803 .and_then(|map| map.get(&id.local_id).cloned())
2807 pub trait InternAs<T: ?Sized, R> {
2809 fn intern_with<F>(self, f: F) -> Self::Output
2810 where F: FnOnce(&T) -> R;
2813 impl<I, T, R, E> InternAs<[T], R> for I
2814 where E: InternIteratorElement<T, R>,
2815 I: Iterator<Item=E> {
2816 type Output = E::Output;
2817 fn intern_with<F>(self, f: F) -> Self::Output
2818 where F: FnOnce(&[T]) -> R {
2819 E::intern_with(self, f)
2823 pub trait InternIteratorElement<T, R>: Sized {
2825 fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output;
2828 impl<T, R> InternIteratorElement<T, R> for T {
2830 fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output {
2831 f(&iter.collect::<AccumulateVec<[_; 8]>>())
2835 impl<'a, T, R> InternIteratorElement<T, R> for &'a T
2839 fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output {
2840 f(&iter.cloned().collect::<AccumulateVec<[_; 8]>>())
2844 impl<T, R, E> InternIteratorElement<T, R> for Result<T, E> {
2845 type Output = Result<R, E>;
2846 fn intern_with<I: Iterator<Item=Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output {
2847 Ok(f(&iter.collect::<Result<AccumulateVec<[_; 8]>, _>>()?))
2851 pub fn provide(providers: &mut ty::query::Providers) {
2852 // FIXME(#44234) - almost all of these queries have no sub-queries and
2853 // therefore no actual inputs, they're just reading tables calculated in
2854 // resolve! Does this work? Unsure! That's what the issue is about
2855 providers.in_scope_traits_map = |tcx, id| tcx.gcx.trait_map.get(&id).cloned();
2856 providers.module_exports = |tcx, id| tcx.gcx.export_map.get(&id).cloned();
2857 providers.crate_name = |tcx, id| {
2858 assert_eq!(id, LOCAL_CRATE);
2861 providers.get_lang_items = |tcx, id| {
2862 assert_eq!(id, LOCAL_CRATE);
2863 // FIXME(#42293) Right now we insert a `with_ignore` node in the dep
2864 // graph here to ignore the fact that `get_lang_items` below depends on
2865 // the entire crate. For now this'll prevent false positives of
2866 // recompiling too much when anything changes.
2868 // Once red/green incremental compilation lands we should be able to
2869 // remove this because while the crate changes often the lint level map
2870 // will change rarely.
2871 tcx.dep_graph.with_ignore(|| Lrc::new(middle::lang_items::collect(tcx)))
2873 providers.freevars = |tcx, id| tcx.gcx.freevars.get(&id).cloned();
2874 providers.maybe_unused_trait_import = |tcx, id| {
2875 tcx.maybe_unused_trait_imports.contains(&id)
2877 providers.maybe_unused_extern_crates = |tcx, cnum| {
2878 assert_eq!(cnum, LOCAL_CRATE);
2879 Lrc::new(tcx.maybe_unused_extern_crates.clone())
2882 providers.stability_index = |tcx, cnum| {
2883 assert_eq!(cnum, LOCAL_CRATE);
2884 Lrc::new(stability::Index::new(tcx))
2886 providers.lookup_stability = |tcx, id| {
2887 assert_eq!(id.krate, LOCAL_CRATE);
2888 let id = tcx.hir.definitions().def_index_to_hir_id(id.index);
2889 tcx.stability().local_stability(id)
2891 providers.lookup_deprecation_entry = |tcx, id| {
2892 assert_eq!(id.krate, LOCAL_CRATE);
2893 let id = tcx.hir.definitions().def_index_to_hir_id(id.index);
2894 tcx.stability().local_deprecation_entry(id)
2896 providers.extern_mod_stmt_cnum = |tcx, id| {
2897 let id = tcx.hir.as_local_node_id(id).unwrap();
2898 tcx.cstore.extern_mod_stmt_cnum_untracked(id)
2900 providers.all_crate_nums = |tcx, cnum| {
2901 assert_eq!(cnum, LOCAL_CRATE);
2902 Lrc::new(tcx.cstore.crates_untracked())
2904 providers.postorder_cnums = |tcx, cnum| {
2905 assert_eq!(cnum, LOCAL_CRATE);
2906 Lrc::new(tcx.cstore.postorder_cnums_untracked())
2908 providers.output_filenames = |tcx, cnum| {
2909 assert_eq!(cnum, LOCAL_CRATE);
2910 tcx.output_filenames.clone()
2912 providers.features_query = |tcx, cnum| {
2913 assert_eq!(cnum, LOCAL_CRATE);
2914 Lrc::new(tcx.sess.features_untracked().clone())
2916 providers.is_panic_runtime = |tcx, cnum| {
2917 assert_eq!(cnum, LOCAL_CRATE);
2918 attr::contains_name(tcx.hir.krate_attrs(), "panic_runtime")
2920 providers.is_compiler_builtins = |tcx, cnum| {
2921 assert_eq!(cnum, LOCAL_CRATE);
2922 attr::contains_name(tcx.hir.krate_attrs(), "compiler_builtins")