1 //! This calculates the types which has storage which lives across a suspension point in a
2 //! generator from the perspective of typeck. The actual types used at runtime
3 //! is calculated in `rustc_mir::transform::generator` and may be a subset of the
4 //! types computed here.
7 use rustc_data_structures::fx::{FxHashMap, FxHashSet};
9 use rustc_hir::def::{CtorKind, DefKind, Res};
10 use rustc_hir::def_id::DefId;
11 use rustc_hir::intravisit::{self, NestedVisitorMap, Visitor};
12 use rustc_hir::{Expr, ExprKind, Pat, PatKind};
13 use rustc_middle::middle::region::{self, YieldData};
14 use rustc_middle::ty::{self, Ty};
17 struct InteriorVisitor<'a, 'tcx> {
18 fcx: &'a FnCtxt<'a, 'tcx>,
19 closure_def_id: DefId,
20 types: FxHashMap<ty::GeneratorInteriorTypeCause<'tcx>, usize>,
21 region_scope_tree: &'tcx region::ScopeTree,
23 kind: hir::GeneratorKind,
24 prev_unresolved_span: Option<Span>,
27 impl<'a, 'tcx> InteriorVisitor<'a, 'tcx> {
31 scope: Option<region::Scope>,
32 expr: Option<&'tcx Expr<'tcx>>,
36 use rustc_span::DUMMY_SP;
39 "generator_interior: attempting to record type {:?} {:?} {:?} {:?}",
40 ty, scope, expr, source_span
43 let live_across_yield = scope
45 self.region_scope_tree.yield_in_scope(s).and_then(|yield_data| {
46 // If we are recording an expression that is the last yield
47 // in the scope, or that has a postorder CFG index larger
48 // than the one of all of the yields, then its value can't
49 // be storage-live (and therefore live) at any of the yields.
51 // See the mega-comment at `yield_in_scope` for a proof.
54 "comparing counts yield: {} self: {}, source_span = {:?}",
55 yield_data.expr_and_pat_count, self.expr_count, source_span
58 if yield_data.expr_and_pat_count >= self.expr_count {
66 Some(YieldData { span: DUMMY_SP, expr_and_pat_count: 0, source: self.kind.into() })
69 if let Some(yield_data) = live_across_yield {
70 let ty = self.fcx.resolve_vars_if_possible(&ty);
72 "type in expr = {:?}, scope = {:?}, type = {:?}, count = {}, yield_span = {:?}",
73 expr, scope, ty, self.expr_count, yield_data.span
76 if let Some((unresolved_type, unresolved_type_span)) =
77 self.fcx.unresolved_type_vars(&ty)
80 "the type is part of the {} because of this {}",
81 self.kind, yield_data.source
84 // If unresolved type isn't a ty_var then unresolved_type_span is None
87 .unwrap_or_else(|| unresolved_type_span.unwrap_or(source_span));
89 .need_type_info_err_in_generator(self.kind, span, unresolved_type)
90 .span_note(yield_data.span, &*note)
93 // Map the type to the number of types added before it
94 let entries = self.types.len();
95 let scope_span = scope.map(|s| s.span(self.fcx.tcx, self.region_scope_tree));
97 .entry(ty::GeneratorInteriorTypeCause {
101 yield_span: Some(yield_data.span),
102 expr: expr.map(|e| e.hir_id),
108 "no type in expr = {:?}, count = {:?}, span = {:?}",
113 let ty = self.fcx.resolve_vars_if_possible(&ty);
114 if let Some((unresolved_type, unresolved_type_span)) =
115 self.fcx.unresolved_type_vars(&ty)
118 "remained unresolved_type = {:?}, unresolved_type_span: {:?}",
119 unresolved_type, unresolved_type_span
121 self.prev_unresolved_span = unresolved_type_span;
124 let entries = self.types.len();
125 let scope_span = scope.map(|s| s.span(self.fcx.tcx, self.region_scope_tree));
127 .entry(ty::GeneratorInteriorTypeCause {
132 expr: expr.map(|e| e.hir_id),
141 pub fn resolve_interior<'a, 'tcx>(
142 fcx: &'a FnCtxt<'a, 'tcx>,
144 body_id: hir::BodyId,
146 kind: hir::GeneratorKind,
148 let body = fcx.tcx.hir().body(body_id);
150 let closure_def_id = fcx.tcx.hir().body_owner_def_id(body_id).to_def_id();
152 let mut visitor = InteriorVisitor {
155 types: FxHashMap::default(),
156 region_scope_tree: fcx.tcx.region_scope_tree(def_id),
159 prev_unresolved_span: None,
161 intravisit::walk_body(&mut visitor, body);
163 // Check that we visited the same amount of expressions and the RegionResolutionVisitor
164 let region_expr_count = visitor.region_scope_tree.body_expr_count(body_id).unwrap();
165 assert_eq!(region_expr_count, visitor.expr_count);
167 let mut types: Vec<_> = visitor.types.drain().collect();
169 // Sort types by insertion order
170 types.sort_by_key(|t| t.1);
172 // The types in the generator interior contain lifetimes local to the generator itself,
173 // which should not be exposed outside of the generator. Therefore, we replace these
174 // lifetimes with existentially-bound lifetimes, which reflect the exact value of the
175 // lifetimes not being known by users.
177 // These lifetimes are used in auto trait impl checking (for example,
178 // if a Sync generator contains an &'α T, we need to check whether &'α T: Sync),
179 // so knowledge of the exact relationships between them isn't particularly important.
181 debug!("types in generator {:?}, span = {:?}", types, body.value.span);
184 let mut captured_tys = FxHashSet::default();
185 let type_causes: Vec<_> = types
187 .filter_map(|(mut cause, _)| {
188 // Erase regions and canonicalize late-bound regions to deduplicate as many types as we
190 let erased = fcx.tcx.erase_regions(&cause.ty);
191 if captured_tys.insert(erased) {
192 // Replace all regions inside the generator interior with late bound regions.
193 // Note that each region slot in the types gets a new fresh late bound region,
194 // which means that none of the regions inside relate to any other, even if
195 // typeck had previously found constraints that would cause them to be related.
196 let folded = fcx.tcx.fold_regions(&erased, &mut false, |_, current_depth| {
198 fcx.tcx.mk_region(ty::ReLateBound(current_depth, ty::BrAnon(counter)))
209 // Extract type components to build the witness type.
210 let type_list = fcx.tcx.mk_type_list(type_causes.iter().map(|cause| cause.ty));
211 let witness = fcx.tcx.mk_generator_witness(ty::Binder::bind(type_list));
213 // Store the generator types and spans into the tables for this generator.
214 visitor.fcx.inh.tables.borrow_mut().generator_interior_types = type_causes;
217 "types in generator after region replacement {:?}, span = {:?}",
218 witness, body.value.span
221 // Unify the type variable inside the generator with the new witness
222 match fcx.at(&fcx.misc(body.value.span), fcx.param_env).eq(interior, witness) {
223 Ok(ok) => fcx.register_infer_ok_obligations(ok),
228 // This visitor has to have the same visit_expr calls as RegionResolutionVisitor in
229 // librustc_middle/middle/region.rs since `expr_count` is compared against the results
231 impl<'a, 'tcx> Visitor<'tcx> for InteriorVisitor<'a, 'tcx> {
232 type Map = intravisit::ErasedMap<'tcx>;
234 fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
235 NestedVisitorMap::None
238 fn visit_pat(&mut self, pat: &'tcx Pat<'tcx>) {
239 intravisit::walk_pat(self, pat);
241 self.expr_count += 1;
243 if let PatKind::Binding(..) = pat.kind {
244 let scope = self.region_scope_tree.var_scope(pat.hir_id.local_id);
245 let ty = self.fcx.tables.borrow().pat_ty(pat);
246 self.record(ty, Some(scope), None, pat.span, false);
250 fn visit_expr(&mut self, expr: &'tcx Expr<'tcx>) {
252 ExprKind::Call(callee, args) => match &callee.kind {
253 ExprKind::Path(qpath) => {
254 let res = self.fcx.tables.borrow().qpath_res(qpath, callee.hir_id);
256 // Direct calls never need to keep the callee `ty::FnDef`
257 // ZST in a temporary, so skip its type, just in case it
258 // can significantly complicate the generator type.
260 DefKind::Fn | DefKind::AssocFn | DefKind::Ctor(_, CtorKind::Fn),
263 // NOTE(eddyb) this assumes a path expression has
264 // no nested expressions to keep track of.
265 self.expr_count += 1;
267 // Record the rest of the call expression normally.
269 self.visit_expr(arg);
272 _ => intravisit::walk_expr(self, expr),
275 _ => intravisit::walk_expr(self, expr),
277 _ => intravisit::walk_expr(self, expr),
280 self.expr_count += 1;
282 let scope = self.region_scope_tree.temporary_scope(expr.hir_id.local_id);
284 // If there are adjustments, then record the final type --
285 // this is the actual value that is being produced.
286 if let Some(adjusted_ty) = self.fcx.tables.borrow().expr_ty_adjusted_opt(expr) {
287 self.record(adjusted_ty, scope, Some(expr), expr.span, false);
290 // Also record the unadjusted type (which is the only type if
291 // there are no adjustments). The reason for this is that the
292 // unadjusted value is sometimes a "temporary" that would wind
293 // up in a MIR temporary.
295 // As an example, consider an expression like `vec![].push()`.
296 // Here, the `vec![]` would wind up MIR stored into a
297 // temporary variable `t` which we can borrow to invoke
298 // `<Vec<_>>::push(&mut t)`.
300 // Note that an expression can have many adjustments, and we
301 // are just ignoring those intermediate types. This is because
302 // those intermediate values are always linearly "consumed" by
303 // the other adjustments, and hence would never be directly
304 // captured in the MIR.
306 // (Note that this partly relies on the fact that the `Deref`
307 // traits always return references, which means their content
308 // can be reborrowed without needing to spill to a temporary.
309 // If this were not the case, then we could conceivably have
310 // to create intermediate temporaries.)
312 // The type table might not have information for this expression
313 // if it is in a malformed scope. (#66387)
314 if let Some(ty) = self.fcx.tables.borrow().expr_ty_opt(expr) {
315 self.record(ty, scope, Some(expr), expr.span, false);
317 self.fcx.tcx.sess.delay_span_bug(expr.span, "no type for node");
320 if let Some(upvars) = self.fcx.tcx.upvars(self.closure_def_id) {
321 for (upvar_id, upvar) in upvars.iter() {
322 let upvar_ty = self.fcx.tables.borrow().node_type(*upvar_id);
323 debug!("type of upvar: {:?}", upvar_ty);
324 self.record(upvar_ty, scope, Some(expr), upvar.span, true);